code stringlengths 1 1.49M | vector listlengths 0 7.38k | snippet listlengths 0 7.38k |
|---|---|---|
import numpy as np, math
X,Y = np.meshgrid( np.arange(0.0,11.0), np.arange(0.0,7.0) )
xy = np.column_stack([ X.flatten(), Y.flatten() ])
cap = ''
for i in xy:
for j in [0.0, 90.0, 180.0, 270.0]:
# for j in [0.0, 180.0]:
cap += ' - [%6.1f, %6.1f, %6.1f]\n' % (i[0],i[1],j)
f = open( 'captures.yaml', 'w' )
f.write( cap )
f.close()
| [
[
1,
0,
0.0714,
0.0714,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
14,
0,
0.2143,
0.0714,
0,
0.66,
0.1429,
835,
3,
2,
0,
0,
500,
10,
3
],
[
14,
0,
0.2857,
0.0714,
0,
... | [
"import numpy as np, math",
"X,Y = np.meshgrid( np.arange(0.0,11.0), np.arange(0.0,7.0) )",
"xy = np.column_stack([ X.flatten(), Y.flatten() ])",
"cap = ''",
"for i in xy:\n for j in [0.0, 90.0, 180.0, 270.0]:\n # for j in [0.0, 180.0]:\n cap += ' - [%6.1f, %6.1f, %6.1f]\\n' % (i[0],i[1],j)... |
import numpy as np
from numpy import pi
import time
import transforms as tr
import pylab as pl
import functools as fct
import pickle as pkl
from scipy.special import erf
import prob as pb
import optparse
waveLen = 3e8 / 900e6 # Middle of UHF RFID band
class AntennaGain():
def __init__(self, RadiationPattern, Gmax = None, GmaxDB = None, front_back_ratio = None):
if Gmax is not None:
self.gain = Gmax
elif GmaxDB is not None:
self.gain = 10.0 ** (GmaxDB/10.0)
else:
self.gain = 1.0
# If the FBR is 8 dB => fbr = DBToWatts( -8 )
self.fbr = front_back_ratio
self.RadiationPattern = RadiationPattern
def G(self, theta, phi):
rv = self.RadiationPattern( standard_rad(theta), standard_rad(phi)) * self.gain
# Account for front-back ratio
if self.fbr:
rv = np.max([ self.fbr * self.gain, rv ])
return rv
def Gdb(self, theta, phi):
#return np.max([ -80.0, WattsToDB( self.G( theta, phi ))]) # Assume always > -80db
return WattsToDB( self.G( theta, phi ))
def CartToSphere(x, y, z):
r = np.power( np.power(x,2) + np.power(y,2) + np.power(z,2), 0.5)
theta = np.arctan2( np.power(np.power(x,2) + np.power(y,2),0.5) , z)
phi = np.arctan2( y, x )
return (r,theta,phi)
def CartToSphere2(x, y, z): # Equivalent
r = np.power( np.power(x,2) + np.power(y,2) + np.power(z,2), 0.5)
theta = np.arccos( z / r )
phi = np.arctan2( y, x )
return (r,theta,phi)
def mCartToSphere(v):
x = v[0]
y = v[1]
z = v[2]
r = np.power( np.power(x,2) + np.power(y,2) + np.power(z,2), 0.5)
theta = np.arctan2( np.power(np.power(x,2) + np.power(y,2),0.5) , z)
phi = np.arctan2( y, x )
return (r,theta,phi)
def SphereToCart(r, theta, phi):
x = r * np.sin(theta) * np.cos(phi)
y = r * np.sin(theta) * np.sin(phi)
z = r * np.cos(theta)
return (x,y,z)
def WattsToDBm(pwr):
return 10.0 * np.log10(pwr) + 30.0
def WattsToDB(pwr):
return 10.0 * np.log10(pwr)
def DBmToWatts(pwr):
return np.power(10.0, (pwr - 30.0) / 10.0)
def DBToWatts(pwr):
return np.power(10.0, (pwr) / 10.0)
def PL( radius ):
return np.power( waveLen/(4*pi*radius), 2.0 )
def standard_rad(t):
if t > 0:
return ((t + np.pi) % (np.pi * 2)) - np.pi
else:
return ((t - np.pi) % (np.pi * -2)) + np.pi
# Dipole Antenna
def rad_dipole(theta, phi):
return np.power( np.sin(theta), 2.0 )
dipole = AntennaGain(rad_dipole, Gmax=1.5, front_back_ratio = DBToWatts(-8) )
#dipole = AntennaGain(rad_dipole, Gmax=1.5 )
# Isotropic Antenna
isotropic = AntennaGain( lambda theta,phi: 1, Gmax=1.0 )
# Patch Antenna
alpha = 1.0
k0 = 2*pi / waveLen
# See balanis 3-27: k^2 = omega^2 * mu * epsilon
# Free space: mu * epsilon = 1/c^2
# So k0 = w/c = 2pi * freq / c = 2pi / lambda (since c = f*lambda)
width = waveLen / 2.0
Leff = 1.02*waveLen / 2.0 # This is probably waveLen / 2.0 + epsilon
def rad_patch(theta, phi):
t1 = np.sin(theta)
t2 = np.sin(k0*width/2.0 * np.cos(theta)) / np.cos(theta)
t3 = np.cos(k0*Leff/2.0 * np.sin(theta)*np.sin(phi))
#return alpha * np.power(t1*t2*t3,2)
if -pi/2 <= phi <= pi/2:
return alpha * np.power(t1*t2*t3,2)
else:
return alpha * np.power(t1*t2*t3,2) * 0.0001
# S9028?
#patch = AntennaGain( rad_patch, Gmax=5.623/rad_patch(pi/2,0.0)) #7.5dBi ==> 5.623 ==> 18.233 multiplier (since rad_patch_max = 0.30841937174)
# S9025P
#patch = AntennaGain( rad_patch, Gmax=3.548/rad_patch(pi/2,0.0)) # 5.5dBi boresight. => 3.548 [ 10**(5.5/10) ]
# note: we modify Gmax instead of alpha st. Gmax * rad_patch(pi/2) => 5.5dB
patch = AntennaGain( rad_patch,
Gmax=3.548/rad_patch(pi/2,0.0), # 5.5dBi boresight. => 3.548 [ 10**(5.5/10) ]
front_back_ratio = DBToWatts( -8 )) # 8dB front-back ratio
# Friis Forward
def Friis_Inc_Tag( Prdr, CL, waveLen, radius,
Grdr_func, theta_rdr, phi_rdr,
Gtag_func, theta_tag, phi_tag ):
return Prdr * CL * Grdr_func(theta_rdr, phi_rdr) * PL(radius) * Gtag_func(theta_tag, phi_tag)
pwr_inc_tag = fct.partial( Friis_Inc_Tag, 1.0, 0.5, waveLen )
def Friis_Inc_Rdr( Prdr, CL, waveLen, AlphaBeta, radius,
Grdr_func, theta_rdr, phi_rdr,
Gtag_func, theta_tag, phi_tag ):
inter = np.power( CL * Grdr_func(theta_rdr, phi_rdr) * PL(radius) * Gtag_func(theta_tag, phi_tag), 2.0 )
return Prdr * AlphaBeta * inter
pwr_inc_rdr = fct.partial( Friis_Inc_Rdr, 1.0, 0.5, waveLen, 1.0 )
def plot_gain_patterns( gain_func, sup_title, label_psi_plane, label_theta_plane ):
gf = gain_func
psi = np.linspace( -np.pi, np.pi, 50 )
theta_front = np.linspace( 0, np.pi, 50 )
theta_back = theta_front[::-1] * -1.0
fig = pl.figure()
fig.suptitle( sup_title )
ax1 = fig.add_subplot( 121, polar=True)
ax1.plot( psi, np.array([ gf( np.pi / 2.0, p ) for p in psi ]), 'bo-' )
ax1.set_title( label_psi_plane )
# Hacky to get rotated polar...
ax2 = fig.add_subplot( 122, polar=True )
ax2.hold( True )
ax2.plot( theta_front - np.pi / 2, np.array([ gf( t, 0.0 ) for t in theta_front ]), 'ro-' )
ax2.plot( theta_back - np.pi / 2, np.array([ gf( -1.0*t, -np.pi ) for t in theta_back ]), 'ro-' ) # the negative goes to psi
pl.thetagrids( [0, 45, 90, 135, 180, 225, 270, 315], [90, 45, 0, -45, -90, -135, 180, 135 ] )
ax2.set_title( label_theta_plane )
pl.show()
if __name__ == "__main__":
p = optparse.OptionParser()
p.add_option('-a', '--patch', action='store_true', dest='patch', help='Look at patch antenna.')
opt, args = p.parse_args()
if opt.patch:
# Patch (verify E/H plane designations?)
plot_gain_patterns( patch.G, 'Patch Antenna Gains (Absolute)', 'E-Plane', 'H-Plane' )
#plot_gain_patterns( patch.Gdb, 'Patch Antenna Gains', 'E-Plane', 'H-Plane' )
else:
# Dipole (verify E/H plane designations?)
plot_gain_patterns( dipole.G, 'Dipole Antenna Gains (Absolute)', 'H-Plane', 'E-Plane' )
#plot_gain_patterns( dipole.Gdb, 'Dipole Antenna Gains', 'H-Plane', 'E-Plane' )
| [
[
1,
0,
0.0053,
0.0053,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0106,
0.0053,
0,
0.66,
0.0278,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0159,
0.0053,
0,
... | [
"import numpy as np",
"from numpy import pi",
"import time",
"import transforms as tr",
"import pylab as pl",
"import functools as fct",
"import pickle as pkl",
"from scipy.special import erf",
"import prob as pb",
"import optparse",
"waveLen = 3e8 / 900e6 # Middle of UHF RFID band",
"class A... |
#! /usr/bin/python
import roslib
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('robotis')
roslib.load_manifest('geometry_msgs')
roslib.load_manifest('move_base_msgs')
roslib.load_manifest('std_msgs')
roslib.load_manifest('tf')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('robotis')
import rospy
import smach
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
import actionlib
from rfid_servoing.msg import ServoAction, ServoGoal
from robotis.srv import MoveAng, MoveAngRequest
from geometry_msgs.msg import PoseStamped, Quaternion
from move_base_msgs.msg import MoveBaseAction
from std_msgs.msg import String
from rfid_datacapture.srv import BagCapture, BagCaptureRequest
from rfid_behaviors.srv import FlapEarsSrv
import rfid_datacapture.utils as rdut
import numpy as np, math
def sm_rfid_servo_approach( yaml_fname ):
# Create a SMACH state machine
sm = smach.StateMachine( outcomes = ['succeeded','aborted','preempted'])
# Open the container
with sm:
smach.StateMachine.add(
'CAPTURE_POSITIONS',
rdut.YAMLprocPoses( yaml_fname ),
remapping = {'next_move_pose':'next_move_pose'}, # output
transitions = {'aborted':'succeeded',
'succeeded':'READY_MOVE'})
smach.StateMachine.add(
'READY_MOVE',
rdut.MoveNotify(),
transitions = {'succeeded':'MOVE_POSITION'})
smach.StateMachine.add(
'MOVE_POSITION',
SimpleActionState( '/move_base',
MoveBaseAction,
goal_slots = [ 'target_pose' ]),
remapping = { 'target_pose' : 'next_move_pose' }, # input
transitions = {'aborted':'MANUAL_SKIP',
'preempted':'aborted',
'succeeded':'CAPTURE_TAGS'})
smach.StateMachine.add(
'MANUAL_SKIP',
rdut.ManualSkip(),
transitions = {'succeeded':'CAPTURE_TAGS', # We already manually positioned the robot
'aborted':'CAPTURE_POSITIONS'}) # skip this position and go to next
smach.StateMachine.add(
'CAPTURE_TAGS',
rdut.YAMLprocMultitag( yaml_fname ),
remapping = {'bagfile_name':'bagfile_name', # output
'bagfile_topics':'bagfile_topics', # output
'panrate':'panrate',
'tagid':'tagid',
'tilt_left':'tilt_left',
'tilt_right':'tilt_right',
'tilt_rate':'tilt_rate',
'tilt_block':'tilt_block'}, # output
transitions = {'aborted':'CAPTURE_POSITIONS', # move to next location
'succeeded':'TILT_LEFT'}) # capture bag
smach.StateMachine.add(
'TILT_LEFT',
ServiceState( '/robotis/servo_left_tilt_moveangle',
MoveAng,
request_slots = ['angle','angvel','blocking']),
transitions = {'succeeded':'TILT_RIGHT'},
remapping = {'angle':'tilt_left',
'angvel':'tilt_rate',
'blocking':'tilt_block'})
smach.StateMachine.add(
'TILT_RIGHT',
ServiceState( '/robotis/servo_right_tilt_moveangle',
MoveAng,
request_slots = ['angle','angvel','blocking']),
transitions = {'succeeded':'START_BAG_CAPTURE'},
remapping = {'angle':'tilt_right',
'angvel':'tilt_rate',
'blocking':'tilt_block'})
smach.StateMachine.add(
'START_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request_slots = ['topics','dest'] ),
remapping = {'topics':'bagfile_topics',
'dest':'bagfile_name'},
transitions = {'succeeded':'FLAP'})
# Servoing is a basic state machine. Success means servoing finished @ obs.
smach.StateMachine.add(
'FLAP',
ServiceState( '/rfid_orient/flap',
FlapEarsSrv,
request_slots = ['data','panrate']),
transitions = { 'succeeded': 'STOP_BAG_CAPTURE' },
remapping = {'data':'tagid', # input
'panrate':'panrate'}) # input
smach.StateMachine.add(
'STOP_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request = BagCaptureRequest('','') ),
transitions = {'succeeded':'TUCK_LEFT'})
# Tuck Left (non-blocking)
smach.StateMachine.add(
'TUCK_LEFT',
ServiceState( 'robotis/servo_left_pan_moveangle',
MoveAng,
request = MoveAngRequest( 1.250, 0.2, 0 )), # ang (float), angvel (float), blocking (bool)
transitions = {'succeeded':'TUCK_RIGHT'})
# Tuck Right (non-blocking)
smach.StateMachine.add(
'TUCK_RIGHT',
ServiceState( 'robotis/servo_right_pan_moveangle',
MoveAng,
request = MoveAngRequest( -1.250, 0.2, 0 )), # ang (float), angvel (float), blocking (bool)
transitions = {'succeeded':'CAPTURE_TAGS'})
return sm
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--yaml', action='store', type='string', dest='yaml',
help='Capture description yaml file', default='')
opt, args = p.parse_args()
if opt.yaml == '':
print 'ERROR: Must specify YAML file.'
exit()
rospy.init_node('rfid_servo_capture')
sm = sm_rfid_servo_approach( opt.yaml )
sis = IntrospectionServer('RFID_servo_approach', sm, '/SM_RFID_SERVO_APPROACH')
sis.start()
outcome = sm.execute()
sis.stop()
# python sm_servo_capture_simple.py --yaml datacap_vert.yaml
| [
[
1,
0,
0.0118,
0.0059,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0176,
0.0059,
0,
0.66,
0.0435,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0235,
0.0059,
0,
0.... | [
"import roslib",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('robotis')",
"roslib.load_manifest('geometry_msgs')",
"roslib.load_manifest('move_base_msgs')",
"roslib.load_manifest('std_msgs')",
"roslib.load_manifest('tf')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_... |
#!/usr/bin/python
# Basically a giant script.
import roslib
roslib.load_manifest( 'geometry_msgs' ) # the pickle files containe Point and Pose Stamped.
import rospy
from geometry_msgs.msg import PointStamped, PoseStamped
import sys
import glob
import yaml
import time
import optparse
import cPickle as pkl
import numpy as np, math
import pylab as pl
import friis
PLOT = False
# glob_files: '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/cap_360/datacap/*.pkl'
# filters:
# antennas:
# PR2_Head: '/head_rfid'
# tags:
# 'datacap ':
if __name__ == '__main__':
p = optparse.OptionParser()
p.add_option('--yaml', action='store', type='string', dest='yaml', default='',
help='yaml file that describes this run.')
p.add_option('--plot', action='store_true', dest='plot',
help='Pop-up the resulting plot')
opt, args = p.parse_args()
yaml_fname = opt.yaml
PLOT = opt.plot
else:
yaml_fname = '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/cap_360/rad_plot_combined.yaml'
# SCRIPT:
if not yaml_fname:
print 'YAML file required!'
exit()
else:
f = open( yaml_fname )
yaml_config = yaml.load( f )
f.close()
# XBINS = yaml_config['rad_histbins']['xbins']
# YBINS = yaml_config['rad_histbins']['ybins']
XBINS = 50
YBINS = 50
XMIN = yaml_config['rad_histbins']['xmin']
XMAX = yaml_config['rad_histbins']['xmax']
YMIN = yaml_config['rad_histbins']['ymin']
YMAX = yaml_config['rad_histbins']['ymax']
def add_files( d, arg ):
fname, fcount = arg
print 'Loading (%d of %d): %s' % (fcount, len(fnames), fname)
f = open( fname, 'r' )
d_new = pkl.load( f )
f.close()
for k in d_new.keys():
if not d.has_key( k ):
d[k] = []
d[k] += d_new[k]
return d
def planar_xy( reading ):
# Model estimate of P^inc_tag.
# r_rdr, theta_rdr, phi_rdr, r_tag, theta_tag, phi_tag, rssi, antname, tagid = reading
d_rdr = reading[0]
d_tag = reading[1]
read = reading[2]
d_rot = reading[3]
ps = reading[4]
r_rdr, theta_rdr, phi_rdr = d_rdr
r_tag, theta_tag, phi_tag = d_tag
x,y,z = friis.SphereToCart( r_rdr, theta_rdr, phi_rdr )
return [x,y]
fnames = reduce( lambda x,y: x+y, [ glob.glob(i) for i in yaml_config['glob_files'] ], [] )
if len(glob.glob(yaml_config['use_combined'])) > 0:
print 'Loading pickle: %s' % (yaml_config['use_combined'])
f = open( yaml_config['use_combined'], 'r' )
data = pkl.load( f )
f.close()
print 'Done.'
else:
f = open( yaml_config['use_combined'], 'w' )
d = reduce( add_files, zip(fnames,range(len(fnames))), {} )
# Apply Filters:
# Start with just the desired tagids
all_reads = reduce( lambda x,y: x+y,
[ d[k] for k in yaml_config['filters']['tags'] if d.has_key(k) ],
[] )
print '*** File \'%s\' had a total of %d reads ***' % ( yaml_fname, len( all_reads ))
# Filter based on antennas
# return [ [r_rdr, theta_rdr, phi_rdr], # all floats
# [r_tag, theta_tag, phi_tag], # all floats
# [rr.rssi, rr.antenna_name, rr.tagID ], # int, string, string
# [theta_rot_map, theta_tag_map], # floats (radians)
# [ tag_map, rdr_map, rot_map ] ] # geometry_msgs/PoseStamped
ant_dict = dict.fromkeys( yaml_config['filters']['antennas'] )
filt_reads = [ r for r in all_reads if ant_dict.has_key( r[2][1] ) ]
reads = filt_reads
data = np.array([ planar_xy(r) + [ r[2][0] ] for r in reads ]).T # 3xN: x,y,rssi
print 'Dumping data into combined pickle file: %s ' % (yaml_config['use_combined'])
pkl.dump( data, f, -1 )
f.close()
print 'Done. Re-run.'
exit()
# data will be 3xN => x, y, RSSI
# Calculate Useful Values
xy = data[0:2,:]
rssi = data[2]
pos_mask = rssi != -1
neg_mask = rssi == -1
# *******************
# Reads per location
# *******************
H,xedges,yedges = np.histogram2d( xy[0], xy[1],
bins=(XBINS,YBINS),
range=[[XMIN,XMAX],
[YMIN,YMAX]])
XS,YS = np.meshgrid( xedges, yedges )
bins_ind_x = np.sum( xy[0][:,np.newaxis] > xedges[:-1], axis = 1 ) - 1 # Tells the index for each of the reads
bins_ind_y = np.sum( xy[1][:,np.newaxis] > yedges[:-1], axis = 1 ) - 1
d = np.copy( H.T ) # The H matrices are actually transposed from how we display
d[ np.where( d > 100 ) ] = 100 # I just want to see which locations have few / no reads.
dma = np.ma.array( d, mask=(d<yaml_config['points_per_loc']) )
f = pl.figure( figsize=(10,6) )
pl.hold(True)
pl.pcolor( XS, YS, dma, cmap=pl.cm.jet ) # or hot?
pl.clim( 0.0, 100.0 )
pl.colorbar()
pl.xlabel( 'X-Coordinate (meters)' )
pl.ylabel( 'Y-Coordinate (meters)' )
pl.title( 'Number of reads attempts at each location' )
pl.savefig( yaml_config['outimage'] + '_datapoints_masked.png' )
f = pl.figure( figsize=(10,6) )
pl.hold(True)
pl.pcolor( XS, YS, d, cmap=pl.cm.jet ) # or hot?
pl.clim( 0.0, 100.0 )
pl.colorbar()
pl.xlabel( 'X-Coordinate (meters)' )
pl.ylabel( 'Y-Coordinate (meters)' )
pl.title( 'Number of reads attempts at each location' )
pl.savefig( yaml_config['outimage'] + '_datapoints_notmasked.png' )
# *******************
# Tag detection probability
# *******************
# Note... I'm still using H from above!
# Need to rebuild dma to not be capped at 100 reads.
dma = np.ma.array( np.copy(H.T), mask=(H.T<yaml_config['points_per_loc']) )
H_pos,xedges,yedges = np.histogram2d( xy[0][pos_mask], xy[1][pos_mask],
bins=(XBINS,YBINS),
range=[[XMIN,XMAX],
[YMIN,YMAX]])
# Where was it actually detected.
dma_det = np.ma.array( np.copy(H_pos.T), mask=(H.T<yaml_config['points_per_loc']) )
# Compute the probability...
dma_det[ np.where(dma.mask==False) ] = (1.0*dma_det[ np.where(dma_det.mask==False) ]) / (1.0*dma[ np.where(dma_det.mask==False) ])
f = pl.figure( figsize=(10,6) )
pl.hold(True)
pl.pcolor( XS, YS, dma_det, cmap=pl.cm.jet ) # or hot?
pl.clim( 0.0, 1.0 )
pl.colorbar()
pl.xlabel( 'X-Coordinate (meters)' )
pl.ylabel( 'Y-Coordinate (meters)' )
pl.title( 'Probability of Tag Detection' )
pl.savefig( yaml_config['outimage'] + '_tag_detect_prob.png' )
# *******************
# Mean RSSI
# *******************
# Note... I'm still using H and H_pos from above!
def get_mean( xi, yi ):
# Which indices (into 3xN data) correspond to this x-bin and y-bin?
data_ind = np.intersect1d( np.where( bins_ind_x == yi )[0], np.where( bins_ind_y == xi )[0] )
rm = np.mean([ r for r in rssi[ data_ind ] if r != -1 ])
return rm
def get_std( xi, yi ):
# Which indices (into 3xN data) correspond to this x-bin and y-bin?
data_ind = np.intersect1d( np.where( bins_ind_x == yi )[0], np.where( bins_ind_y == xi )[0] )
rm = np.std([ r for r in rssi[ data_ind ] if r != -1 ])
return rm
# To calculate the rssi mean...
dma_rm = np.ma.array( np.copy(H_pos.T), mask=(H_pos.T<yaml_config['rssi_points_per_loc']) )
means = np.ma.copy( dma_rm )
for i, (xi,yi) in enumerate( zip( *np.where( dma_rm.mask == False ))):
# Note: the bin indices are relative to H-matrices, which are transposed. So we switch xi/yi
means[xi,yi] = get_mean( xi, yi )
stddev = np.ma.copy( dma_rm )
for i, (xi,yi) in enumerate( zip( *np.where( dma_rm.mask == False ))):
# Note: the bin indices are relative to H-matrices, which are transposed. So we switch xi/yi
stddev[xi,yi] = get_std( xi, yi )
f = pl.figure( figsize=(10,6) )
pl.hold(True)
pl.pcolor( XS, YS, means, cmap=pl.cm.jet ) # or hot?
pl.clim( 72,96 )
pl.colorbar()
pl.xlabel( 'X-Coordinate (meters)' )
pl.ylabel( 'Y-Coordinate (meters)' )
pl.title( 'Mean RSSI' )
pl.savefig( yaml_config['outimage'] + '_tag_rssi_mean.png' )
f = pl.figure( figsize=(10,6) )
pl.hold(True)
pl.pcolor( XS, YS, stddev, cmap=pl.cm.jet ) # or hot?
pl.colorbar()
pl.xlabel( 'X-Coordinate (meters)' )
pl.ylabel( 'Y-Coordinate (meters)' )
pl.title( 'Standard Deviation of RSSI' )
pl.savefig( yaml_config['outimage'] + '_tag_rssi_std.png' )
if PLOT:
pl.show()
# SAVE SENSOR MODEL
fname = yaml_config['use_combined'].replace('.pkl','_MODEL.pkl')
MODEL = { 'detect_model': np.ma.copy( dma_det ),
'rssi_model': np.ma.copy( means ),
'stddev_model': np.ma.copy( stddev ),
'xedges': xedges, # These are necessary to define the boundaries of the bins.
'yedges': yedges }
f = open( fname, 'w' )
pkl.dump( MODEL, f, -1 )
f.close()
| [
[
1,
0,
0.0176,
0.0035,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0211,
0.0035,
0,
0.66,
0.0103,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0246,
0.0035,
0,
0.... | [
"import roslib",
"roslib.load_manifest( 'geometry_msgs' ) # the pickle files containe Point and Pose Stamped.",
"import rospy",
"from geometry_msgs.msg import PointStamped, PoseStamped",
"import sys",
"import glob",
"import yaml",
"import time",
"import optparse",
"import cPickle as pkl",
"impo... |
import numpy as np, math
import numpy
import math
import sys
##
# Bound the value of a number to be above lower, and lower than upper
# @return a number
def bound(value, lower, upper):
raise RuntimeError('math_util.bound moved to hrl_lib.util')
#sys.exit()
# if lower >= upper:
# t = lower
# lower = upper
# upper = t
# #print 'bound', value, 'lower', lower, 'upper', upper
# #return min(max(value, lower), upper)
# return min(max(value, lower), upper)
def bound_mat(m, lower, upper):
if lower >= upper:
t = lower
lower = upper
upper = t
m = m.copy()
m[np.where(m > upper)] = upper
m[np.where(m < lower)] = lower
return m
def approx_equal(a, b, epsilon=.001):
return (b < (a+epsilon)) and ((a-epsilon) < b)
def approx_equalv(a, b, epsilon=.001):
return np.all(np.abs(a - b) < epsilon)
def radians(mat):
return mat * (np.pi/180.0)
def degrees(mat):
return mat * (180.0 / np.pi)
##################################################################
# Angles related functions
##################################################################
def vec_of_ang(a):
return numpy.matrix([numpy.cos(a), numpy.sin(a)]).T
def ang_of_vec(a):
a = a / numpy.linalg.norm(a)
return math.atan2(a[1,0], a[0,0])
def avg_ang(wa, a, wb, b):
"""
Calculates the average between two angles
wa weight of first angle
a first angle
wb weight of second angle
b second angle
"""
return ang_of_vec(wa * vec_of_ang(a) + wb * vec_of_ang(b))
def blend_ang(alpha, a, b):
return avg_ang(alpha, a, 1-alpha,b)
def standard_rad(t):
if t > 0:
return ((t + numpy.pi) % (numpy.pi * 2)) - numpy.pi
else:
return ((t - numpy.pi) % (numpy.pi * -2)) + numpy.pi
def best_turn_dir(reference, new_angle):
""" positive is left, negative is right! """
return standard_rad(reference - new_angle)
def cart_of_pol(p):
""" Finds cartesian coordinates of polar points [r, t]' """
r = p[0,:]
t = p[1,:]
x = numpy.multiply(numpy.cos(t), r)
y = numpy.multiply(numpy.sin(t), r)
return numpy.vstack((x,y))
def pol_of_cart(p):
""" Find polar coordinates of cartesian points [x, y]' """
norm = numpy.linalg.norm(p)
ang = math.atan2(p[1,0], p[0,0])
return numpy.matrix([norm, ang]).T
##################################################################
# NUMPY HELPER FUNCTIONS
##################################################################
##
# Find the maximal position in a 2D array
# @return (r,c)
def argmax2d(mat):
max_1d = np.argmax(mat)
row_length = float(mat.shape[1])
row_idx = np.floor(max_1d / row_length)
col_idx = max_1d % row_length
return row_idx, col_idx
##
# Changes the range of an numpy array to betwen [0, 255] from it's [min_value, max_value]
def renormalize(npimage_gray):
min_img = np.min(npimage_gray)
max_img = np.max(npimage_gray)
ret = np.matrix(np.round(((npimage_gray - min_img) / (max_img - min_img) * 255.0)), 'uint8')
return ret
def list_mat_to_mat(list_mat, axis=0):
return np.concatenate(tuple(list_mat), axis=axis)
def list_of_mat(mat):
for i in range(mat.shape[1]):
yield mat[:,i]
def nearest(mat, target):
'''
Return a sorted list of the nearest (euclidean dist) element
of a matrix to a target value and their indeices.
@param mat mxn
@param target mx1
'''
#mat = mat.T
#target = target.T
#import util as ut
#import pdb
#ut.save_pickle(mat, 'mat.pkl')
#ut.save_pickle(target, 'target.pkl')
diff_vec = mat - target
pwr = np.ones_like(mat[0])*2
dist = np.power(np.sum(np.power(diff_vec, pwr),axis=0),0.5)
indices = dist.argsort(axis=1)
#want indices from sort order
#indices = np.concatenate((np.matrix(range(sort_order.shape[1])), sort_order), 0)[:, sort_order.A1]
#print sort_order
#print indices
#pdb.set_trace()
return mat[:, indices.A1], indices
if __name__ == '__main__':
import hrl_lib.util as ut
import pdb
mat = ut.load_pickle('mat.pkl')
target = ut.load_pickle('target.pkl')
diff_vec = mat - target
pwr = np.ones_like(mat[0])*2
dist = np.power(np.sum(np.power(diff_vec, pwr),axis=0),0.5)
sort_order = dist.argsort(axis=1)
#want indices from sort order
indices = np.concatenate((np.matrix(range(sort_order.shape[1])), sort_order), 0)[:, sort_order.A1]
print sort_order
print indices
pdb.set_trace()
#def nearest(mat, target):
# '''
# Return a sorted list of the nearest (euclidean dist) element
# of a matrix to a target value and their indeices.
# '''
# mat = mat.T
# target = target.T
# diff_vec = mat - target
# pwr = np.ones_like(mat[0])*2
# dist = np.power(np.sum(np.power(diff_vec ,pwr),axis=1),0.5)
# indices = dist.argsort(axis=0)
# return mat[indices.A1], indices
##################################################################
# MISCELLANEOUS MATH HELPER FUNCTIONS
##################################################################
def blend(alpha, a, b):
return (alpha * a) + ((1-alpha) * b)
def approx_equal(a, b, epsilon=.001):
return (b < (a+epsilon)) and ((a-epsilon) < b)
def norm(mat):
"""
Calculate L2 norm for column vectors in a matrix
"""
return np.power(np.sum(np.power(mat,2), axis=0), 0.5)
def rnd(v):
return int(round(v))
def point_to_homo(p):
""" Convert points into homogeneous coordinates """
o = numpy.matrix(numpy.ones((1, p.shape[1])))
return numpy.vstack((p,o))
""" Convert points back from homogeneous coordinates """
def homo_to_point(p):
return p[0:p.shape[0]-1,:]
| [
[
1,
0,
0.0049,
0.0049,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.0098,
0.0049,
0,
0.66,
0.0345,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0147,
0.0049,
0,
... | [
"import numpy as np, math",
"import numpy",
"import math",
"import sys",
"def bound(value, lower, upper):\n raise RuntimeError('math_util.bound moved to hrl_lib.util')",
"def bound_mat(m, lower, upper):\n if lower >= upper:\n t = lower\n lower = upper\n upper = t\n\n m = m.co... |
#!/usr/bin/python
import roslib
roslib.load_manifest('geometry_msgs')
roslib.load_manifest('tf')
import rospy
import tf
from geometry_msgs.msg import PointStamped
import time
rospy.init_node('ground_truth_tag_pose')
listener = tf.TransformListener()
# listener.waitForTransform('/l_gripper_tool_frame', '/ear_antenna_left',
# rospy.Time(0), timeout = rospy.Duration(10) )
listener.waitForTransform('/l_gripper_tool_frame', '/map',
rospy.Time(0), timeout = rospy.Duration(10) )
rate = rospy.Rate(2)
while not rospy.is_shutdown():
p = PointStamped()
p.header.stamp = rospy.Time(0)
p.header.frame_id = '/l_gripper_tool_frame'
p.point.x = 0.015 # Frame is actually in middle of fingers. Move it out to tips
#p_earl = listener.transformPoint('/ear_antenna_left', p)
p_earl = listener.transformPoint('/map', p)
print '<x,y,z> = <%2.3f, %2.3f, %2.3f> ' % ( p_earl.point.x, p_earl.point.y, p_earl.point.z )
rate.sleep()
| [
[
1,
0,
0.0968,
0.0323,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.129,
0.0323,
0,
0.66,
0.0909,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.1613,
0.0323,
0,
0.6... | [
"import roslib",
"roslib.load_manifest('geometry_msgs')",
"roslib.load_manifest('tf')",
"import rospy",
"import tf",
"from geometry_msgs.msg import PointStamped",
"import time",
"rospy.init_node('ground_truth_tag_pose')",
"listener = tf.TransformListener()",
"listener.waitForTransform('/l_gripper_... |
#!/usr/bin/python
# Basically a giant script.
import roslib
roslib.load_manifest( 'geometry_msgs' ) # the pickle files containe Point and Pose Stamped.
import rospy
from geometry_msgs.msg import PointStamped, PoseStamped
import sys
import glob
import yaml
import time
import optparse
import cPickle as pkl
import numpy as np, math
import pylab as pl
import string
from scipy.spatial import KDTree
import matplotlib as mpl
import friis
import math_util as mu
PLOT = False
if __name__ == '__main__':
p = optparse.OptionParser()
p.add_option('--yaml', action='store', type='string', dest='yaml', default='',
help='yaml file that describes this run.')
p.add_option('--plot', action='store_true', dest='plot',
help='Pop-up the resulting plot')
opt, args = p.parse_args()
yaml_fname = opt.yaml
PLOT = opt.plot
else:
yaml_fname = '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/cap_360/dir_est_head_datacap.yaml'
# yaml_fname = '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/cap_360/dir_est_shoulder_datacap.yaml'
if not yaml_fname:
print 'YAML file required!'
exit()
else:
f = open( yaml_fname )
yaml_config = yaml.load( f )
f.close()
def desired_ant( r ):
if dict.fromkeys(yaml_config['filters']['antennas']).has_key( r[2][1] ):
return True
else:
return False
# def xy( reading ):
# # Model estimate of P^inc_tag.
# # r_rdr, theta_rdr, phi_rdr, r_tag, theta_tag, phi_tag, rssi, antname, tagid = reading
# d_rdr = reading[0]
# d_tag = reading[1]
# read = reading[2]
# d_rot = reading[3]
# ps = reading[4]
# tag_map, rdr_map, rot_map, base_map = ps
# return [ base_map.pose.position.x, base_map.pose.position.y ]
class Scan():
def __init__( self, d, tagid, fname ):
self.d = d
self.tagid = tagid
self.fname = fname
if not self.d.has_key( self.tagid ):
print 'ERROR: %s does not have tagid \'%s\'' % (self.fname, self.tagid)
exit()
self.theta_rot_map = [ r[3][0] for r in self.d[ self.tagid ] if desired_ant( r )]
self.theta_tag_map = [ r[3][1] for r in self.d[ self.tagid ] if desired_ant( r )]
self.rssi = [ r[2][0] for r in self.d[ self.tagid ] if r[2][1] if desired_ant( r )]
self.dat = np.row_stack([ np.array( self.theta_rot_map ),
np.array( self.theta_tag_map ),
np.array( self.rssi ) ])
self.theta_tag_gt = np.mean( self.dat[1] ) # The actual estimates for gt will fluctuate per read due to tf
# We want to know if the tag was inview and if there were positive reads
self.h, self.bins = np.histogram( self.dat[0], 36, (-np.pi, np.pi )) # 10-deg width bins
self.bin_centers = (self.bins[:-1] + self.bins[1:]) / 2.0
self.ind_all = np.sum( self.dat[0][:,np.newaxis] > self.bins[:-1], axis = 1) - 1 # gives indices for each datapoint into corresponding bin
self.ind_tag = np.sum( np.array([ self.theta_tag_gt ])[:,np.newaxis] > self.bins[:-1], axis = 1) - 1
self.inview = np.sum( self.ind_all == self.ind_tag ) > 0 # was there at least one reading in the ground-truth bin?
self.hasposreads = np.sum( self.dat != -1 ) > 0
# Histmax estimate only uses positive readings
self.m_rssi = [ np.mean( self.dat[2,np.intersect1d( np.where( self.ind_all == i )[0],
np.where( self.dat[2] != -1 )[0] )])
for i in xrange(len(self.h))]
self.m_rssi = np.nan_to_num( np.array( self.m_rssi )) # convert all the places with nan (mean of zero readings) to
self.ind_max = np.argmax( self.m_rssi )
self.max_m_rssi = self.m_rssi[ self.ind_max ]
self.histmax_est = self.bin_centers[ self.ind_max ]
self.histmax_err = mu.standard_rad( self.histmax_est - self.theta_tag_gt )
# Static estimation: pick the midpoint angle.
# We assume theta_rot_map will be contiguous between [-pi,pi] --> choose a good rotation frame.
self.static_est = np.min(self.dat[0]) + ( np.max(self.dat[0]) - np.min(self.dat[0]) ) / 2.0
self.static_err = mu.standard_rad( self.static_est - self.theta_tag_gt )
first_read = self.d[ self.tagid ][0]
pose_robot = first_read[4][3]
prx = pose_robot.pose.position.x
pry = pose_robot.pose.position.y
pose_tag = first_read[4][0]
ptx = pose_tag.pose.position.x
pty = pose_tag.pose.position.y
dist = np.sqrt( (prx-ptx)**2.0 + (pry-pty)**2.0 )
self.stats = [ prx,
pry,
dist,
self.max_m_rssi,
self.histmax_est,
self.histmax_err,
self.static_est,
self.static_err ]
return
fnames = reduce( lambda x,y: x+y, [ glob.glob(i) for i in yaml_config['glob_files'] ], [] )
# Load all pkl files into one huge dictionary.
# d = { 'tagid1': [ PROC_READ1, PROC_READ2, ... ],
# ...
# }
scans = []
for i,fname in enumerate( fnames ):
print 'Loading (%d of %d): %s' % (i+1, len(fnames), fname)
f = open( fname, 'r' )
d_new = pkl.load( f )
f.close()
for k in d_new.keys():
if dict.fromkeys(yaml_config['filters']['tags']).has_key( k ):
scans += [ Scan( d_new, k, fname ) ]
skipped = 0
stats = []
for s in scans:
if not s.inview or not s.hasposreads:
skipped += 1
else:
if PLOT:
f = pl.figure( figsize=(10,6) )
pl.axes([0.1,0.1,0.65,0.8])
pl.plot( s.dat[0] * 180.0 / np.pi, np.clip(s.dat[2],69,110), 'rx' )
pl.hold( True )
pl.plot( [ s.theta_tag_gt * 180.0 / np.pi ], [ np.max(s.dat[2])+2 ], 'bo' ) # tag groundtruth
pl.plot( [ s.histmax_est * 180.0 / np.pi ], [ np.max(s.dat[2])+1 ], 'ko' ) # histmax estiamte
pl.ylim( (68,105) )
pl.xlim( (-180,180) )
pl.xlabel( 'Rotation Angle (degrees)')
pl.ylabel( 'RSSI')
pl.title( 'RSSI versus rotation angle' )
pl.legend(['RFID reads', 'Groundtruth', 'ARGMAX Est'], loc=(1.03,0.2))
f = s.fname
sr = string.rfind( f, '/' )
pl.savefig( yaml_config['outimage'] + f[sr+1:sr+string.find(f[sr:],'.')] + '_pincrdr.png' )
pl.close()
stats.append( s.stats )
npstats = np.array( stats ).T
print 'Skipped (not in view or no positive reads): ', skipped
print 'ARGMAX Stats:'
herr = npstats[5]
magherr = np.abs( herr )
print '\tmean err: ', math.degrees(np.mean( herr ))
print '\tstd err: ', math.degrees(np.std( herr ))
print '\tRMS err (should be same as stderr): ', math.degrees( np.sqrt(np.mean(np.power(herr, 2.0))) )
print '\tmean magerr: ', math.degrees(np.mean( magherr ))
print '\tstddev magerr: ', math.degrees(np.std( magherr ))
print 'STATIC Stats:'
serr = npstats[7]
magserr = np.abs( serr )
print '\tmean err: ', math.degrees(np.mean( serr ))
print '\tstd err: ', math.degrees(np.std( serr ))
print '\tRMS err (should be same as stderr): ', math.degrees( np.sqrt(np.mean(np.power(serr, 2.0))) )
print '\tmean magerr: ', math.degrees(np.mean( magserr ))
print '\tstddev magerr: ', math.degrees(np.std( magserr ))
# Setup for plots below
dist = npstats[2]
h_d,bins_d = np.histogram( dist, 8, range=(0.0,8.0)) # we want to consider distances out to 0m-8m
ind_d = np.sum( dist[:,np.newaxis] > bins_d[:-1], axis=1) - 1
num_d = np.array([ len( np.where( ind_d == i )[0] ) for i in xrange(len(h_d)) ])
rssi = npstats[3]
h_r,bins_r = np.histogram( rssi, 8, range=(71,101)) # we want to consider distances out to 0m-8m
ind_r = np.sum( rssi[:,np.newaxis] > bins_r[:-1], axis=1) - 1
num_r = np.array([ len( np.where( ind_r == i )[0] ) for i in xrange(len(h_r)) ])
# Means (md) and StandardDev (sd) at Distance (in Degrees)
magherr_md = np.array([ np.mean( magherr[ np.where( ind_d == i )[0] ]) for i in xrange(len(h_d)) ]) * 180.0 / np.pi
magherr_sd = np.array([ np.std( magherr[ np.where( ind_d == i )[0] ]) for i in xrange(len(h_d)) ]) * 180.0 / np.pi
magherr_sd[ np.where( num_d < 3 )[0] ] = 0 # Only put errorbars where we have sufficient data!
magserr_md = np.array([ np.mean( magserr[ np.where( ind_d == i )[0] ]) for i in xrange(len(h_d)) ]) * 180.0 / np.pi
magserr_sd = np.array([ np.std( magserr[ np.where( ind_d == i )[0] ]) for i in xrange(len(h_d)) ]) * 180.0 / np.pi
magserr_sd[ np.where( num_d < 3 )[0] ] = 0 # Only put errorbars where we have sufficient data!
# Means (mr) and StandardDev (sr) at MaxRSSI (in Degrees)
magherr_mr = np.array([ np.mean( magherr[ np.where( ind_r == i )[0] ]) for i in xrange(len(h_r)) ]) * 180.0 / np.pi
magherr_sr = np.array([ np.std( magherr[ np.where( ind_r == i )[0] ]) for i in xrange(len(h_r)) ]) * 180.0 / np.pi
magherr_sr[ np.where( num_r < 3 )[0] ] = 0 # Only put errorbars where we have sufficient data!
magserr_mr = np.array([ np.mean( magserr[ np.where( ind_r == i )[0] ]) for i in xrange(len(h_r)) ]) * 180.0 / np.pi
magserr_sr = np.array([ np.std( magserr[ np.where( ind_r == i )[0] ]) for i in xrange(len(h_r)) ]) * 180.0 / np.pi
magserr_sr[ np.where( num_r < 3 )[0] ] = 0 # Only put errorbars where we have sufficient data!
# | Err | vs. distance (Bar Plot)
f = pl.figure( figsize=(10,6) )
pl.axes([0.1,0.1,0.65,0.8])
pl.bar( bins_d[:-1], magherr_md, 1.0, color='g', yerr=magherr_sd )
pl.xlim( (0.0,8.0) )
pl.ylim( (-20.0,180) )
pl.xlabel( 'Distance From Tag (m)')
pl.ylabel( '|Angular Error| (degrees)')
# pl.title( 'ARGMAX' )
pl.savefig( yaml_config['outimage'] + '_magherr_vs_dist.png' )
pl.close()
f = pl.figure( figsize=(10,6) )
pl.axes([0.1,0.1,0.65,0.8])
pl.bar( bins_d[:-1], magserr_md, 1.0, color='r', yerr=magserr_sd )
pl.xlim( (0.0,8.0) )
pl.ylim( (-20.0,180) )
pl.xlabel( 'Distance From Tag (m)')
pl.ylabel( '|Angular Error| (degrees)')
# pl.title( 'STATIC' )
pl.savefig( yaml_config['outimage'] + '_magserr_vs_dist.png' )
# | Err | vs. MaxRSSI (Bar Plot)
f = pl.figure( figsize=(10,6) )
pl.axes([0.1,0.1,0.65,0.8])
pl.bar( bins_r[:-1], magherr_mr, bins_r[1] - bins_r[0], color='g', yerr=magherr_sr )
pl.xlim( (69,105) )
print 'Max RSSI (argmax):'
print '\t', bins_r[:-1]
print '\t', magherr_mr
print '\t', magherr_sr
pl.ylim( (-20.0,180) )
pl.xlabel( r'Max $\mathsf{E}[P(RSSI|\tau)]$')
pl.ylabel( '|Angular Error| (degrees)')
# pl.title( 'ARGMAX' )
pl.savefig( yaml_config['outimage'] + '_magherr_vs_mrssi.png' )
pl.close()
f = pl.figure( figsize=(10,6) )
pl.axes([0.1,0.1,0.65,0.8])
pl.bar( bins_r[:-1], magserr_mr, bins_r[1] - bins_r[0], color='r', yerr=magserr_sr )
print 'Max RSSI (static):'
print '\t', bins_r[:-1]
print '\t', magserr_mr
print '\t', magserr_sr
pl.xlim( (69,105) )
pl.ylim( (-20.0,180) )
pl.xlabel( r'Max $\mathsf{E}[P(RSSI|\tau)]$')
pl.ylabel( '|Angular Error| (degrees)')
# pl.title( 'ARGMAX' )
pl.savefig( yaml_config['outimage'] + '_magserr_vs_mrssi.png' )
pl.close()
# | Err | vs. location (Surface Plot)
XBINS = 9
YBINS = 5
XMIN = 0.0
XMAX = 11.0
YMIN = 0.0
YMAX = 7.0
xy = np.row_stack([ npstats[0],
npstats[1] ]) # xy as 2xN
H,xedges,yedges = np.histogram2d( xy[0], xy[1],
bins=(XBINS,YBINS),
range=[[XMIN,XMAX],
[YMIN,YMAX]])
XS,YS = np.meshgrid( xedges, yedges )
bins_ind_x = np.sum( xy[0][:,np.newaxis] > xedges[:-1], axis = 1 ) - 1 # Tells the index for each of the reads
bins_ind_y = np.sum( xy[1][:,np.newaxis] > yedges[:-1], axis = 1 ) - 1
def get_mean( xi, yi, darr ):
# Which indices (into 3xN data) correspond to this x-bin and y-bin?
data_ind = np.intersect1d( np.where( bins_ind_x == yi )[0], np.where( bins_ind_y == xi )[0] )
return np.mean( darr[ data_ind ]) * 180 / np.pi
# import pdb
# pdb.set_trace()
magherr_Z = np.ma.array( np.copy(H.T), mask=(H.T < 1) )
magserr_Z = np.ma.array( np.copy(H.T), mask=(H.T < 1) )
# pdb.set_trace()
for i, (xi,yi) in enumerate( zip( *np.where( magherr_Z.mask == False ))):
magherr_Z[xi,yi] = get_mean( xi, yi, magherr )
magserr_Z[xi,yi] = get_mean( xi, yi, magserr )
# pdb.set_trace()
f = pl.figure( figsize=(10,6) )
pl.hold(True)
pl.pcolor( XS, YS, magherr_Z, cmap=pl.cm.jet ) # or hot?
pl.xlim( (-1,12) )
pl.ylim( (-1,8) )
pl.axis( 'equal' )
pl.clim( 0.0, 180.0 )
pl.colorbar()
pl.xlabel( 'X-Coordinate (meters)' )
pl.ylabel( 'Y-Coordinate (meters)' )
pl.savefig( yaml_config['outimage'] + '_magherr_contour.png' )
pl.close()
f = pl.figure( figsize=(10,6) )
pl.hold(True)
pl.pcolor( XS, YS, magserr_Z, cmap=pl.cm.jet ) # or hot?
pl.xlim( (-1,12) )
pl.ylim( (-1,8) )
pl.axis( 'equal' )
pl.clim( 0.0, 180.0 )
pl.colorbar()
pl.xlabel( 'X-Coordinate (meters)' )
pl.ylabel( 'Y-Coordinate (meters)' )
pl.savefig( yaml_config['outimage'] + '_magserr_contour.png' )
pl.close()
| [
[
1,
0,
0.0139,
0.0028,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0167,
0.0028,
0,
0.66,
0.0068,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0194,
0.0028,
0,
0.... | [
"import roslib",
"roslib.load_manifest( 'geometry_msgs' ) # the pickle files containe Point and Pose Stamped.",
"import rospy",
"from geometry_msgs.msg import PointStamped, PoseStamped",
"import sys",
"import glob",
"import yaml",
"import time",
"import optparse",
"import cPickle as pkl",
"impo... |
import numpy as np, math
import numpy
import math
import sys
##
# Bound the value of a number to be above lower, and lower than upper
# @return a number
def bound(value, lower, upper):
raise RuntimeError('math_util.bound moved to hrl_lib.util')
#sys.exit()
# if lower >= upper:
# t = lower
# lower = upper
# upper = t
# #print 'bound', value, 'lower', lower, 'upper', upper
# #return min(max(value, lower), upper)
# return min(max(value, lower), upper)
def bound_mat(m, lower, upper):
if lower >= upper:
t = lower
lower = upper
upper = t
m = m.copy()
m[np.where(m > upper)] = upper
m[np.where(m < lower)] = lower
return m
def approx_equal(a, b, epsilon=.001):
return (b < (a+epsilon)) and ((a-epsilon) < b)
def approx_equalv(a, b, epsilon=.001):
return np.all(np.abs(a - b) < epsilon)
def radians(mat):
return mat * (np.pi/180.0)
def degrees(mat):
return mat * (180.0 / np.pi)
##################################################################
# Angles related functions
##################################################################
def vec_of_ang(a):
return numpy.matrix([numpy.cos(a), numpy.sin(a)]).T
def ang_of_vec(a):
a = a / numpy.linalg.norm(a)
return math.atan2(a[1,0], a[0,0])
def avg_ang(wa, a, wb, b):
"""
Calculates the average between two angles
wa weight of first angle
a first angle
wb weight of second angle
b second angle
"""
return ang_of_vec(wa * vec_of_ang(a) + wb * vec_of_ang(b))
def blend_ang(alpha, a, b):
return avg_ang(alpha, a, 1-alpha,b)
def standard_rad(t):
if t > 0:
return ((t + numpy.pi) % (numpy.pi * 2)) - numpy.pi
else:
return ((t - numpy.pi) % (numpy.pi * -2)) + numpy.pi
def best_turn_dir(reference, new_angle):
""" positive is left, negative is right! """
return standard_rad(reference - new_angle)
def cart_of_pol(p):
""" Finds cartesian coordinates of polar points [r, t]' """
r = p[0,:]
t = p[1,:]
x = numpy.multiply(numpy.cos(t), r)
y = numpy.multiply(numpy.sin(t), r)
return numpy.vstack((x,y))
def pol_of_cart(p):
""" Find polar coordinates of cartesian points [x, y]' """
norm = numpy.linalg.norm(p)
ang = math.atan2(p[1,0], p[0,0])
return numpy.matrix([norm, ang]).T
##################################################################
# NUMPY HELPER FUNCTIONS
##################################################################
##
# Find the maximal position in a 2D array
# @return (r,c)
def argmax2d(mat):
max_1d = np.argmax(mat)
row_length = float(mat.shape[1])
row_idx = np.floor(max_1d / row_length)
col_idx = max_1d % row_length
return row_idx, col_idx
##
# Changes the range of an numpy array to betwen [0, 255] from it's [min_value, max_value]
def renormalize(npimage_gray):
min_img = np.min(npimage_gray)
max_img = np.max(npimage_gray)
ret = np.matrix(np.round(((npimage_gray - min_img) / (max_img - min_img) * 255.0)), 'uint8')
return ret
def list_mat_to_mat(list_mat, axis=0):
return np.concatenate(tuple(list_mat), axis=axis)
def list_of_mat(mat):
for i in range(mat.shape[1]):
yield mat[:,i]
def nearest(mat, target):
'''
Return a sorted list of the nearest (euclidean dist) element
of a matrix to a target value and their indeices.
@param mat mxn
@param target mx1
'''
#mat = mat.T
#target = target.T
#import util as ut
#import pdb
#ut.save_pickle(mat, 'mat.pkl')
#ut.save_pickle(target, 'target.pkl')
diff_vec = mat - target
pwr = np.ones_like(mat[0])*2
dist = np.power(np.sum(np.power(diff_vec, pwr),axis=0),0.5)
indices = dist.argsort(axis=1)
#want indices from sort order
#indices = np.concatenate((np.matrix(range(sort_order.shape[1])), sort_order), 0)[:, sort_order.A1]
#print sort_order
#print indices
#pdb.set_trace()
return mat[:, indices.A1], indices
if __name__ == '__main__':
import hrl_lib.util as ut
import pdb
mat = ut.load_pickle('mat.pkl')
target = ut.load_pickle('target.pkl')
diff_vec = mat - target
pwr = np.ones_like(mat[0])*2
dist = np.power(np.sum(np.power(diff_vec, pwr),axis=0),0.5)
sort_order = dist.argsort(axis=1)
#want indices from sort order
indices = np.concatenate((np.matrix(range(sort_order.shape[1])), sort_order), 0)[:, sort_order.A1]
print sort_order
print indices
pdb.set_trace()
#def nearest(mat, target):
# '''
# Return a sorted list of the nearest (euclidean dist) element
# of a matrix to a target value and their indeices.
# '''
# mat = mat.T
# target = target.T
# diff_vec = mat - target
# pwr = np.ones_like(mat[0])*2
# dist = np.power(np.sum(np.power(diff_vec ,pwr),axis=1),0.5)
# indices = dist.argsort(axis=0)
# return mat[indices.A1], indices
##################################################################
# MISCELLANEOUS MATH HELPER FUNCTIONS
##################################################################
def blend(alpha, a, b):
return (alpha * a) + ((1-alpha) * b)
def approx_equal(a, b, epsilon=.001):
return (b < (a+epsilon)) and ((a-epsilon) < b)
def norm(mat):
"""
Calculate L2 norm for column vectors in a matrix
"""
return np.power(np.sum(np.power(mat,2), axis=0), 0.5)
def rnd(v):
return int(round(v))
def point_to_homo(p):
""" Convert points into homogeneous coordinates """
o = numpy.matrix(numpy.ones((1, p.shape[1])))
return numpy.vstack((p,o))
""" Convert points back from homogeneous coordinates """
def homo_to_point(p):
return p[0:p.shape[0]-1,:]
| [
[
1,
0,
0.0049,
0.0049,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.0098,
0.0049,
0,
0.66,
0.0345,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0147,
0.0049,
0,
... | [
"import numpy as np, math",
"import numpy",
"import math",
"import sys",
"def bound(value, lower, upper):\n raise RuntimeError('math_util.bound moved to hrl_lib.util')",
"def bound_mat(m, lower, upper):\n if lower >= upper:\n t = lower\n lower = upper\n upper = t\n\n m = m.co... |
#! /usr/bin/python
import roslib
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('move_base_msgs')
roslib.load_manifest('pr2_controllers_msgs')
roslib.load_manifest('rfid_hardware')
import rospy
import smach
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
import actionlib
from move_base_msgs.msg import MoveBaseAction
from pr2_controllers_msgs.msg import PointHeadAction, PointHeadGoal
from rfid_datacapture.srv import BagCapture, BagCaptureRequest
from hrl_rfid.srv import RfidSrv
import rfid_datacapture.utils as rdut
import numpy as np, math
class RfidStart(smach.State):
def __init__(self, srv_path ):
smach.State.__init__(self,
outcomes=['succeeded', 'aborted'],
input_keys = ['tagid'])
self.srv_path = srv_path
self.init = False
def execute(self, userdata):
if not self.init:
rospy.wait_for_service( self.srv_path )
self.srv = rospy.ServiceProxy( self.srv_path, RfidSrv )
self.init = True
rv = self.srv([ 'track', userdata.tagid ])
if rv:
return 'succeeded'
else:
return 'aborted' # done!
class RfidStop(smach.State):
def __init__(self, srv_path ):
smach.State.__init__(self, outcomes=['succeeded', 'aborted'])
self.srv_path = srv_path
self.init = False
def execute(self, userdata):
if not self.init:
rospy.wait_for_service( self.srv_path )
self.srv = rospy.ServiceProxy( self.srv_path, RfidSrv )
self.init = True
rv = self.srv([ 'stop' ])
if rv:
return 'succeeded'
else:
return 'aborted' # done!
def sm_cap_360( yaml_fname ):
# Create a SMACH state machine
sm = smach.StateMachine( outcomes = ['succeeded','aborted','preempted'],
input_keys = ['track_mode'])
# Open the container
with sm:
# Lots of head moving -- abstract into function
def PointAdd( x, y, z, dur, state, res ):
pgoal = PointHeadGoal()
pgoal.target.header.frame_id = '/torso_lift_link'
pgoal.target.point.x = x
pgoal.target.point.y = y
pgoal.target.point.z = z
pgoal.min_duration = rospy.Duration( dur )
pgoal.max_velocity = 1.0
smach.StateMachine.add(
state,
SimpleActionState( '/head_traj_controller/point_head_action',
PointHeadAction,
goal = pgoal ),
transitions = { 'succeeded' : res })
return
PointAdd( 1.0, 0.0, 0.35, 5.0, 'INIT_HEAD', 'CAP_START' ) # Go to safe initial conditions
PointAdd( -1.0, -0.25, 0.35, 7.0, 'CAP_START', 'CAPTURE_POSITIONS' ) # Prepare for lots of "neck craning"
smach.StateMachine.add(
'CAPTURE_POSITIONS',
rdut.YAMLprocPoses( yaml_fname ),
remapping = {'next_move_pose':'next_move_pose'}, # output
transitions = {'aborted':'succeeded',
'succeeded':'READY_MOVE'})
smach.StateMachine.add(
'READY_MOVE',
rdut.MoveNotify(),
transitions = {'succeeded':'MOVE_POSITION'})
smach.StateMachine.add(
'MOVE_POSITION',
SimpleActionState( '/move_base',
MoveBaseAction,
goal_slots = [ 'target_pose' ]),
remapping = { 'target_pose' : 'next_move_pose' }, # input
transitions = {'aborted':'MANUAL_SKIP',
'preempted':'aborted',
'succeeded':'CAPTURE_TAGS'})
smach.StateMachine.add(
'MANUAL_SKIP',
rdut.ManualSkip(),
transitions = {'succeeded':'CAPTURE_TAGS', # We already manually positioned the robot
'aborted':'CAPTURE_POSITIONS'}) # skip this position and go to next
# This isn't realy necessary, but it provides a nice way to reuse code.
smach.StateMachine.add(
'CAPTURE_TAGS',
rdut.YAMLprocMultitag( yaml_fname ),
remapping = {'bagfile_name':'bagfile_name', # output
'bagfile_topics':'bagfile_topics', # output
'panrate':'panrate',
'tagid':'tagid',
'tilt_left':'tilt_left',
'tilt_right':'tilt_right',
'tilt_rate':'tilt_rate',
'tilt_block':'tilt_block'}, # output
transitions = {'aborted':'CAPTURE_POSITIONS', # move to next location
'succeeded':'START_BAG_CAPTURE'}) # capture bag
smach.StateMachine.add(
'START_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request_slots = ['topics','dest'] ),
remapping = {'topics':'bagfile_topics',
'dest':'bagfile_name'},
transitions = {'succeeded':'RFID_START'})
# Initialize RFID reader!
# rosservice call /rfid/head_mode -- ['track','OrangeMedBot']
# Am having issues with service states with request_cb, so just making my own....
smach.StateMachine.add(
'RFID_START',
RfidStart( '/rfid/head_mode' ),
remapping = {'tagid':'tagid'},
transitions = {'succeeded':'LOOK_LEFT'})
PointAdd( -1.0, 0.25, 0.35, 27.0, 'LOOK_LEFT', 'RFID_STOP' )
smach.StateMachine.add(
'RFID_STOP',
RfidStop( '/rfid/head_mode' ),
transitions = {'succeeded':'STOP_BAG_CAPTURE'})
smach.StateMachine.add(
'STOP_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request = BagCaptureRequest('','') ),
transitions = {'succeeded':'LOOK_RIGHT'})
PointAdd( -1.0, -0.25, 0.35, 8.0, 'LOOK_RIGHT', 'CAPTURE_TAGS' )
return sm
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--yaml', action='store', type='string', dest='yaml',
help='Capture description yaml file', default='')
opt, args = p.parse_args()
if opt.yaml == '':
print 'ERROR: Must specify YAML file.'
exit()
rospy.init_node('rfid_head_capture')
sm = sm_cap_360( opt.yaml )
sm.userdata.track_mode = 'track'
outcome = sm.execute()
| [
[
1,
0,
0.0105,
0.0052,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0157,
0.0052,
0,
0.66,
0.0556,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0209,
0.0052,
0,
0.... | [
"import roslib",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('move_base_msgs')",
"roslib.load_manifest('pr2_controllers_msgs')",
"roslib.load_manifest('rfid_hardware')",
"import rospy",
"import smach",
"from smach_ros import SimpleActionState, ServiceState, IntrospectionServer",
... |
#!/usr/bin/python
# Basically a giant script.
import roslib
roslib.load_manifest( 'geometry_msgs' ) # the pickle files containe Point and Pose Stamped.
import rospy
from geometry_msgs.msg import PointStamped, PoseStamped
import sys
import glob
import yaml
import time
import optparse
import cPickle as pkl
import numpy as np, math
import pylab as pl
import friis
PLOT = False
# glob_files: '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/cap_360/datacap/*.pkl'
# filters:
# antennas:
# PR2_Head: '/head_rfid'
# tags:
# 'datacap ':
if __name__ == '__main__':
p = optparse.OptionParser()
p.add_option('--yaml', action='store', type='string', dest='yaml', default='',
help='yaml file that describes this run.')
p.add_option('--plot', action='store_true', dest='plot',
help='Pop-up the resulting plot')
opt, args = p.parse_args()
yaml_fname = opt.yaml
PLOT = opt.plot
else:
yaml_fname = '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/cap_360/friis_plot_datacap.yaml'
# SCRIPT:
if not yaml_fname:
print 'YAML file required!'
exit()
else:
f = open( yaml_fname )
yaml_config = yaml.load( f )
f.close()
# Load all pkl files into one huge dictionary.
# d = { 'tagid1': [ PROC_READ1, PROC_READ2, ... ],
# ...
# }
def add_files( d, arg ):
fname, fcount = arg
print 'Loading (%d of %d): %s' % (fcount, len(fnames), fname)
f = open( fname, 'r' )
d_new = pkl.load( f )
f.close()
for k in d_new.keys():
if not d.has_key( k ):
d[k] = []
d[k] += d_new[k]
return d
# Calculate Useful Values
def friis_pwr_tag( reading ):
# Model estimate of P^inc_tag.
# r_rdr, theta_rdr, phi_rdr, r_tag, theta_tag, phi_tag, rssi, antname, tagid = reading
d_rdr = reading[0]
d_tag = reading[1]
read = reading[2]
d_rot = reading[3]
ps = reading[4]
r_rdr, theta_rdr, phi_rdr = d_rdr
r_tag, theta_tag, phi_tag = d_tag
watts = friis.pwr_inc_tag( r_rdr,
friis.patch.G, theta_rdr, phi_rdr,
friis.dipole.G, theta_tag, phi_tag )
return friis.WattsToDBm( watts )
def friis_pwr_rdr( reading ):
# Model estimate of P^inc_rdr.
# r_rdr, theta_rdr, phi_rdr, r_tag, theta_tag, phi_tag, rssi, antname, tagid = reading
d_rdr = reading[0]
d_tag = reading[1]
read = reading[2]
d_rot = reading[3]
ps = reading[4]
r_rdr, theta_rdr, phi_rdr = d_rdr
r_tag, theta_tag, phi_tag = d_tag
watts = friis.pwr_inc_rdr( r_rdr,
friis.patch.G, theta_rdr, phi_rdr,
friis.dipole.G, theta_tag, phi_tag )
return friis.WattsToDBm( watts )
fnames = reduce( lambda x,y: x+y, [ glob.glob(i) for i in yaml_config['glob_files'] ], [] )
if len(glob.glob(yaml_config['use_combined'])) > 0:
print 'Loading pickle: %s' % (yaml_config['use_combined'])
f = open( yaml_config['use_combined'], 'r' )
data = pkl.load( f )
f.close()
print 'Done.'
else:
f = open( yaml_config['use_combined'], 'w' )
d = reduce( add_files, zip(fnames,range(len(fnames))), {} )
# Apply Filters:
# Start with just the desired tagids
all_reads = reduce( lambda x,y: x+y,
[ d[k] for k in yaml_config['filters']['tags'] if d.has_key(k) ],
[] )
print '*** File \'%s\' had a total of %d reads ***' % ( yaml_fname, len( all_reads ))
# Filter based on antennas
# return [ [r_rdr, theta_rdr, phi_rdr], # all floats
# [r_tag, theta_tag, phi_tag], # all floats
# [rr.rssi, rr.antenna_name, rr.tagID ], # int, string, string
# [theta_rot_map, theta_tag_map], # floats (radians)
# [ tag_map, rdr_map, rot_map ] ] # geometry_msgs/PoseStamped
ant_dict = dict.fromkeys( yaml_config['filters']['antennas'] )
filt_reads = [ r for r in all_reads if ant_dict.has_key( r[2][1] ) ]
reads = filt_reads
p_inc_tag = np.array([ friis_pwr_tag( r ) for r in reads ]) # in dBm!
p_inc_rdr = np.array([ friis_pwr_rdr( r ) for r in reads ]) # in dBm!
rssi = np.array([ r[2][0] for r in reads ])
data = np.row_stack([ p_inc_tag,
p_inc_rdr,
rssi ])
print 'Dumping data into combined pickle file: %s ' % (yaml_config['use_combined'])
pkl.dump( data, f, -1 )
f.close()
print 'Done. Re-run.'
exit()
p_inc_tag = data[0]
p_inc_rdr = data[1]
rssi = data[2]
pos_mask = rssi != -1
neg_mask = rssi == -1
if len(pos_mask) == 0:
print '### File \'%s\' had no positive reads -- exiting ###' % ( yaml_fname )
exit()
if len(neg_mask) == 0:
print '### File \'%s\' had no positive reads -- exiting ###' % ( yaml_fname )
exit()
# P^inc_rdr vs. RSSI.
def plot_pincrdr( f = None ):
if not f:
f = pl.figure( figsize=(10,6) )
pl.axes([0.1,0.1,0.65,0.8])
pl.plot( p_inc_rdr[pos_mask], rssi[pos_mask], 'bx', alpha = 0.5 )
pl.xlabel( '$P^{inc}_{rdr}$ (dBm)')
pl.ylabel( 'RSSI')
pl.title( 'Measured RSSI vs Predicted Power at Reader' )
xval = None
yval = None
if yaml_config.has_key( 'rdr_calcfriis' ):
# Old way: take max Pincrdr
# ind = p_inc_rdr >= np.max( p_inc_rdr ) - yaml_config['rdr_calcfriis']['within_max']
# New way: take region we know to be linear in Friis. Note, this also elimates negative reads!
ind = np.all( np.row_stack([
rssi >= yaml_config['rdr_calcfriis']['rssi_min'],
rssi <= yaml_config['rdr_calcfriis']['rssi_max'] ]), axis = 0)
xval = np.mean( p_inc_rdr[ ind ] ) # Friis line runs throught this point
yval = np.mean( rssi[ ind ] )
# CURIOUS: Both methods are the same. I could probably show that, but I'm tired!
# We know the slope is 0.75. y = 0.75 * x + c ==> c = mean( y - 0.75 x )
# c = np.mean( rssi[ind] - 0.75 * p_inc_rdr[ind] )
# xval = -19.19 # arbitrary, but convenient
# yval = 0.75 * xval + c
print 'Calculated Friis Line:\n\txval: %3.2f\n\tyval: %3.2f' % (xval,yval)
if yaml_config.has_key( 'rdr_drawfriis' ):
xval = yaml_config[ 'rdr_drawfriis' ][ 'xval' ]
yval = yaml_config[ 'rdr_drawfriis' ][ 'yval' ]
if xval and yval:
pl.hold( True )
# Slope of line (from Matt's measurements) should be 0.75
xs = np.linspace( yaml_config['rdr_axis'][0], yaml_config['rdr_axis'][1], 100 )
ys = 0.75 * ( xs - xval ) + yval # pt-slope form
pl.plot( xs, ys, 'g-', linewidth = 2.0 )
pl.legend(['Positive Reads','Friis Model Fit'], loc=(1.03,0.2))
else:
pl.legend(['Positive Reads'], loc=(1.03,0.2))
pl.axis( yaml_config['rdr_axis'] )
return f
plot_pincrdr()
pl.savefig( yaml_config['outimage'] + '_pincrdr.png' )
# P^inc_rdr vs. P(read)
def plot_pincrdr_probs( f = None ):
hist, bins = np.histogram( p_inc_rdr,
bins = yaml_config['rdr_histbins']['bins'],
range = (yaml_config['rdr_histbins']['min'],
yaml_config['rdr_histbins']['max']) )
bin_width = bins[1] - bins[0]
# Find out which bin each read belongs to.
# Sanity check: print [ sum( bins_ind == i ) for i in xrange(len(hist)) ] => equals hist
bins_ind = np.sum( p_inc_rdr[:,np.newaxis] > bins[:-1], axis = 1 ) - 1
prob_read = [ sum(rssi[bins_ind == i] != -1)*1.0 / hist[i] # positive reads / total reads for bin i
for i in xrange(len(hist)) # same as len(bins)-1
if hist[i] != 0 ] # only where we have data! (also prevents div by 0)
if not f:
f = pl.figure( figsize=(10,6) )
pl.axes([0.1,0.1,0.65,0.8])
pos_bars = pl.bar([ bins[i] for i in xrange(len(hist)) if hist[i] != 0 ], # Only plot the bars for places we have data!
prob_read, # This is only defined for ones that have data!
width = bin_width,
color = 'b',
alpha = 0.7 )
pl.hold( True )
neg_bars = pl.bar([ bins[i] for i in xrange(len(hist)) if hist[i] != 0 ], # Only plot the bars for places we have data!
[ 1.0 - p for p in prob_read ], # This is only defined for ones that have data!
width = bin_width,
bottom = prob_read,
color = 'r',
alpha = 0.7 )
pl.axis([ yaml_config['rdr_axis'][0], yaml_config['rdr_axis'][1], 0.0, 1.0 ])
pl.xlabel( '$P^{inc}_{rdr}$ (dBm)')
pl.ylabel( 'Probability of Tag Read / No-Read')
pl.title( 'Probability of Tag Read / No-Read vs Predicted Power at Reader ' )
pl.legend((pos_bars[0], neg_bars[0]), ('P( read )', 'P( no read )'), loc=(1.03,0.2))
return f
plot_pincrdr_probs()
pl.savefig( yaml_config['outimage'] + '_pincrdr_probs.png' )
# P^inc_tag vs. P(read)
def plot_pinctag_probs( f = None ):
pl.figure( figsize=(10,6) )
pl.axes([0.1,0.1,0.65,0.8])
pl.plot( p_inc_tag[pos_mask], rssi[pos_mask], 'bx', alpha = 0.5 )
pl.xlabel( '$P^{inc}_{tag}$ (dBm)')
pl.ylabel( 'RSSI')
pl.title( 'Measured RSSI vs Predicted Power at Tag' )
pl.legend(['Positive Reads'], loc=(1.03,0.2))
pl.axis( yaml_config['tag_axis'] )
pl.savefig( yaml_config['outimage'] + '_pinctag.png' )
hist, bins = np.histogram( p_inc_tag,
bins = yaml_config['tag_histbins']['bins'],
range = (yaml_config['tag_histbins']['min'],
yaml_config['tag_histbins']['max']) )
bin_width = bins[1] - bins[0]
# Find out which bin each read belongs to.
# Sanity check: print [ sum( bins_ind == i ) for i in xrange(len(hist)) ] => equals hist
bins_ind = np.sum( p_inc_tag[:,np.newaxis] > bins[:-1], axis = 1 ) - 1
prob_read = [ sum(rssi[bins_ind == i] != -1)*1.0 / hist[i] # positive reads / total reads for bin i
for i in xrange(len(hist)) # same as len(bins)-1
if hist[i] != 0 ] # only where we have data! (also prevents div by 0)
if not f:
f = pl.figure( figsize=(10,6) )
pl.axes([0.1,0.1,0.65,0.8])
pos_bars = pl.bar([ bins[i] for i in xrange(len(hist)) if hist[i] != 0 ], # Only plot the bars for places we have data!
prob_read, # This is only defined for ones that have data!
width = bin_width,
color = 'b',
alpha = 0.7 )
pl.hold( True )
neg_bars = pl.bar([ bins[i] for i in xrange(len(hist)) if hist[i] != 0 ], # Only plot the bars for places we have data!
[ 1.0 - p for p in prob_read ], # This is only defined for ones that have data!
width = bin_width,
bottom = prob_read,
color = 'r',
alpha = 0.7)
pl.axis([ yaml_config['tag_axis'][0], yaml_config['tag_axis'][1], 0.0, 1.0 ])
pl.xlabel( '$P^{inc}_{tag}$ (dBm)')
pl.ylabel( 'Probability of Tag Read / No-Read')
pl.title( 'Probability of Tag Read / No-Read vs Predicted Power at Tag' )
pl.legend((pos_bars[0], neg_bars[0]), ('P( read )', 'P( no read )'), loc=(1.03,0.2))
# pl.legend(['P( read )'], loc=(1.03,0.2))
pl.savefig( yaml_config['outimage'] + '_pinctag_probs.png' )
# return f
plot_pinctag_probs()
if PLOT:
pl.show()
| [
[
1,
0,
0.0152,
0.003,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0183,
0.003,
0,
0.66,
0.0278,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0213,
0.003,
0,
0.66,... | [
"import roslib",
"roslib.load_manifest( 'geometry_msgs' ) # the pickle files containe Point and Pose Stamped.",
"import rospy",
"from geometry_msgs.msg import PointStamped, PoseStamped",
"import sys",
"import glob",
"import yaml",
"import time",
"import optparse",
"import cPickle as pkl",
"impo... |
#!/usr/bin/python
import roslib
roslib.load_manifest( 'tf' )
roslib.load_manifest( 'hrl_lib' )
import rospy
import tf
from hrl_lib.cmd_process import CmdProcess
from threading import Thread
import time
def bagplay( fname ):
# to use:
# bp = bagplay( my_file_name )
# bp.run() # starts the execution
# while not bp.is_finished():
# rospy.sleep( 0.5 )
# bp.kill() # not necessary
cmd = 'rosbag play --clock ' + fname + ' -r 2.0 -q'
rospy.logout( 'Launching bag file: %s' % fname )
return CmdProcess( cmd.split() )
def sim_safe_sleep( dur, real_time_sleep = 0.05 ):
t0 = rospy.Time.now().to_sec()
ct = rospy.Time.now().to_sec()
while True:
if ct - t0 >= dur:
break
time.sleep( real_time_sleep )
nt = rospy.Time.now().to_sec()
if nt == ct: # rostime will stop when bag not playing -- exit immediately.
break
ct = nt
return
class TFthread( Thread ):
# Takes in a (PoseStamped-like) dictionary and publishes a new transform from frame_id -> frame_name
# For example: (YAML syntax)
# child_frame: '/datacap'
# parent_frame: '/map'
# x_pos: 6.480
# y_pos: 2.865
# z_pos: 1.291
# x_orient: 0.0
# y_orient: 0.0
# z_orient: 0.0
# w_orient: 1.0
def __init__( self, d ):
self.d = d
Thread.__init__( self )
# cf = self.d[ 'child_frame' ]
# rospy.init_node( 'tf_thread_' + cf.strip('/') )
self.bc = tf.TransformBroadcaster()
self.should_run = True
self.start()
def run( self ):
rospy.logout( 'TFthread: Starting %s ' % self.d[ 'child_frame' ] )
while self.should_run and not rospy.is_shutdown():
self.bc.sendTransform( ( self.d[ 'x_pos' ],
self.d[ 'y_pos' ],
self.d[ 'z_pos' ] ),
( self.d[ 'x_orient' ],
self.d[ 'y_orient' ],
self.d[ 'z_orient' ],
self.d[ 'w_orient' ] ),
rospy.Time.now(),
self.d[ 'child_frame' ],
self.d[ 'parent_frame' ] )
try:
sim_safe_sleep( 0.10 ) # 10 Hz
except:
pass # ROS exception (eg. Ctrl-C).
rospy.logout( 'TFthread: Stopping %s ' % self.d[ 'child_frame' ] )
def stop( self ):
# Kill off the poller thread.
self.should_run = False
self.join(5)
if (self.isAlive()):
raise RuntimeError('TFthread: Unable to stop thread %s ' % self.d[ 'child_frame' ] )
| [
[
1,
0,
0.0323,
0.0108,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.043,
0.0108,
0,
0.66,
0.1,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0538,
0.0108,
0,
0.66,
... | [
"import roslib",
"roslib.load_manifest( 'tf' )",
"roslib.load_manifest( 'hrl_lib' )",
"import rospy",
"import tf",
"from hrl_lib.cmd_process import CmdProcess",
"from threading import Thread",
"import time",
"def bagplay( fname ):\n # to use:\n # bp = bagplay( my_file_name )\n # bp.run(... |
#! /usr/bin/python
import roslib
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('geometry_msgs')
roslib.load_manifest('move_base_msgs')
roslib.load_manifest('tf')
roslib.load_manifest('sound_play')
roslib.load_manifest('hrl_lib')
import rospy
import smach
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
import actionlib
import tf
import tf.transformations as tft
from geometry_msgs.msg import PoseStamped, Quaternion
from sound_play.msg import SoundRequest
from rfid_datacapture.srv import BagCapture
from sound_play.msg import SoundRequest
from hrl_lib.cmd_process import CmdProcess
import yaml
import numpy as np, math
import os
import time
class BagCap():
def __init__( self, topic_name = 'capture' ):
self._srv = rospy.Service( 'bag_cap/'+topic_name, BagCapture, self.process_srv )
def process_srv( self, req ):
cmd = 'rosbag record -O %s ' % req.dest
cmd += req.topics
if req.topics != '':
self.bag_cap = CmdProcess( cmd.split() )
self.bag_cap.run()
else:
self.bag_cap.kill()
return True
class YAMLproc(smach.State):
def __init__(self, fname):
smach.State.__init__(self,
outcomes=['succeeded', 'aborted'],
output_keys = ['next_move_pose','bagfile_name',
'bagfile_topics','tagid'])
self.fname = fname
self.d = None
def execute(self, userdata):
if not self.d:
f = open( self.fname, 'r' )
self.d = yaml.load( f )
self.size = len( self.d['servo_cap']['captures'] )
self.ind = 0
f.close()
if self.ind < self.size:
rospy.logout( 'YamlProc: issuing %d of %d' % (self.ind+1, self.size))
ps = PoseStamped()
ps.header.frame_id = '/map'
ps.header.stamp = rospy.Time(0)
ps.pose.position.x = self.d['servo_cap']['captures'][ self.ind ][0]
ps.pose.position.y = self.d['servo_cap']['captures'][ self.ind ][1]
ang = self.d['servo_cap']['captures'][ self.ind ][2]
q = Quaternion( *tft.quaternion_from_euler( 0.0, 0.0, math.radians( ang )))
ps.pose.orientation = q
self.ind += 1
userdata.next_move_pose = ps
userdata.tagid = self.d['servo_cap']['tagid']
userdata.bagfile_name = self.d['servo_cap']['bagfile_path'] + str(int(rospy.Time.now().to_sec()))
userdata.bagfile_topics = self.d['servo_cap']['bagfile_topics']
return 'succeeded'
else:
return 'aborted' # done!
class YAMLprocPoses(smach.State):
def __init__(self, fname):
smach.State.__init__(self,
outcomes=['succeeded', 'aborted'],
output_keys = ['next_move_pose'])
self.fname = fname
self.d = None
def execute(self, userdata):
if not self.d:
f = open( self.fname, 'r' )
self.d = yaml.load( f )
self.size = len( self.d['rad_cap']['captures'] )
self.ind = 0
f.close()
self.t0 = rospy.Time.now().to_sec()
if self.ind < self.size:
rospy.logout( 'YamlProcPoses: issuing %d of %d' % (self.ind+1, self.size))
rospy.logout( 'YamlProcPoses: Time to capture %2.2f' % ( rospy.Time.now().to_sec() - self.t0 ))
self.t0 = rospy.Time.now().to_sec()
ps = PoseStamped()
ps.header.frame_id = '/map'
ps.header.stamp = rospy.Time(0)
ps.pose.position.x = self.d['rad_cap']['captures'][ self.ind ][0]
ps.pose.position.y = self.d['rad_cap']['captures'][ self.ind ][1]
ang = self.d['rad_cap']['captures'][ self.ind ][2]
q = Quaternion( *tft.quaternion_from_euler( 0.0, 0.0, math.radians( ang )))
ps.pose.orientation = q
self.ind += 1
userdata.next_move_pose = ps
return 'succeeded'
else:
return 'aborted' # done!
class YAMLprocMultitag(smach.State):
def __init__(self, fname):
smach.State.__init__(self,
outcomes=['succeeded', 'aborted'],
output_keys = ['bagfile_name',
'bagfile_topics',
'tagid',
'panrate',
'tilt_left',
'tilt_right',
'tilt_rate',
'tilt_block'])
self.fname = fname
self.d = None
def execute(self, userdata):
if not self.d:
f = open( self.fname, 'r' )
self.d = yaml.load( f )
self.tagids = self.d['rad_cap']['tagids'].keys()
self.keys_list = list( self.tagids )
f.close()
self.tilt_angs = []
if not self.keys_list and not self.tilt_angs: # Done! reset.
self.keys_list = list( self.tagids )
return 'aborted'
if not self.tilt_angs: # get next tagid
self.tid = self.keys_list.pop()
self.tilt_angs = list( self.d['rad_cap']['tagids'][self.tid]['tilts'] )
rospy.logout( 'setting tid: %s' % self.tid )
print 'Tilt angs: ', self.tilt_angs
ta = self.tilt_angs.pop()[0]
rospy.logout( 'Using tilt angle: %2.2f' % ta )
userdata.tilt_left = -1.0 * ta
userdata.tilt_right = ta
userdata.tilt_rate = 0.4
userdata.tilt_block = 1
tad = math.degrees( ta )
rospy.logout( 'YamlProcMultitag: issuing %d of %d for tag \'%s\'' %
(len(self.tagids) - len(self.keys_list), len(self.tagids), self.tid))
userdata.tagid = self.tid
tmp = self.tid
path = self.d['rad_cap']['bagfile_path']
tsec = str(int(rospy.Time.now().to_sec()))
tads = str(int( tad ))
userdata.bagfile_name = path + tsec + '_' + tads + '_' + tmp.replace(' ', '' )
userdata.bagfile_topics = self.d['rad_cap']['bagfile_topics']
userdata.panrate = 30.0
return 'succeeded'
class ManualSkip(smach.State):
def __init__(self):
smach.State.__init__(self,outcomes=['succeeded', 'aborted'])
self.init = None
def execute(self, userdata):
if not self.init:
self.init = True
self.pub = rospy.Publisher( 'robotsound', SoundRequest )
rospy.sleep( 1.0 )
self.pub.publish( SoundRequest( -3, 1, 'MANUAL or SKIP' ))
ui = ''
while not (ui == '0' or ui == '1'):
print '\'0\' to Position Manually (position robot first)'
print '\'1\' to skip this position'
ui = raw_input()
if ui == '0':
return 'succeeded' # Robot manually positioned
else:
return 'aborted' # Forget this position
class MoveNotify(smach.State):
def __init__(self):
smach.State.__init__(self,outcomes=['succeeded', 'aborted'])
self.init = None
def execute(self, userdata):
if not self.init:
self.init = True
self.pub = rospy.Publisher( 'robotsound', SoundRequest )
rospy.sleep( 1.0 )
self.pub.publish( SoundRequest( -3, 1, 'Ready to Move' ))
ui = ''
while not (ui == '0' or ui == '1'):
print '\'0\' or \'1\' when Ready.'
ui = raw_input()
return 'succeeded'
if __name__ == '__main__':
rospy.init_node( 'bag_capture_services' )
bc = BagCap()
rospy.spin()
# rosservice call /bag_cap/capture '/tf' 'out'
# rosservice call /bag_cap/capture '' ''
| [
[
1,
0,
0.0087,
0.0043,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.013,
0.0043,
0,
0.66,
0.0357,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0174,
0.0043,
0,
0.6... | [
"import roslib",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('geometry_msgs')",
"roslib.load_manifest('move_base_msgs')",
"roslib.load_manifest('tf')",
"roslib.load_manifest('sound_play')",
"roslib.load_manifest('hrl_lib')",
"import rospy",
"import smach",
"from smach_ros imp... |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('tf')
roslib.load_manifest('rfid_pf')
import rospy
import rfid_datacapture.math_util as mu
import sm_aware_home_explore as ahe
import glob
import yaml
import tf
import tf.transformations as tft
import json
import numpy as np, math
import cPickle as pkl
import template
import rfid_pf.pf_stats as pfs
res = []
def process_trialobj( trial_num, obj_num, servo_yn ):
obj_name = ahe.tdb[ obj_num ][0]
tname = obj_name.replace( ' ', '' )
loc = (trial_num + obj_num) % 9
loc_name = ahe.pts[loc][0]
loc_pos = np.array(ahe.pts[loc][1]) # Tag ground-truth location
print 'Trial %d with Object %d (%s) at Position %d (%s)' % (trial_num, obj_num, obj_name, loc, loc_name)
fname = 'search_aware_home/woot_150_'+str(trial_num)+'_reads.pkl'
f = open( fname, 'r' )
summary_search = pkl.load( f )
f.close()
pos_readings_search = sum([ True for p in summary_search if p.read.rssi != -1 and p.read.tagID == obj_name ])
tot_readings_search = len( summary_search )
search_positions_fname = 'search_aware_home/woot_150_' + str(trial_num) + '_tag_' + tname + '.yaml'
servo_positions_fname = 'search_aware_home/woot_150_' + str(trial_num) + '_tag_' + tname + '_end.txt'
servo_fname = 'search_aware_home/woot_150_' + str(trial_num) + '_tag_' + tname + '_servo.pkl'
if pos_readings_search == 0:
print '\t No results for this instance.'
res = { 'loc': loc,
'obj_num': obj_num,
'trial_num': trial_num,
'pos_readings': pos_readings_search,
'tot_readings':tot_readings_search,
'best_pos': '--',
'orient_est': '--',
'dxy': '--',
'dtheta': '--',
'servo_yn': servo_yn,
'other': { 'w_mass': 0.0, # specific to pf
'orient_fit': 0.0 }}
return False, res, None
# All fnames should be defined if we got here!
f = open( search_positions_fname )
y = yaml.load( f )
f.close()
# "Best" Location determined by search
efq = tft.euler_from_quaternion
search_theta = efq( [ y['pose']['orientation']['x'],
y['pose']['orientation']['y'],
y['pose']['orientation']['z'],
y['pose']['orientation']['w'] ])[-1]
search_pos = np.array([ y['pose']['position']['x'],
y['pose']['position']['y'],
y['pose']['position']['z'] ])
search_true_theta = np.arctan2( loc_pos[1] - search_pos[1], # y / x
loc_pos[0] - search_pos[0] )
search_theta_diff = mu.standard_rad( search_theta - search_true_theta )
# search_pos_diff = np.linalg.norm( search_pos - loc_pos )
search_pos_diff = np.linalg.norm( search_pos[0:2] - loc_pos[0:2] )
# Should we return the search only results?
if not servo_yn:
res = { 'loc': loc,
'obj_num': obj_num,
'trial_num': trial_num,
'pos_readings': pos_readings_search,
'tot_readings':tot_readings_search,
'best_pos': search_pos,
'orient_est': search_theta,
'dxy': search_pos_diff,
'dtheta': search_theta_diff,
'servo_yn': servo_yn }
return True, res, None
# Else... compute the SEARCH PLUS SERVO results
# update the stats for pos reads
f = open( servo_fname, 'r' )
summary_servo = pkl.load( f )
f.close()
pos_readings_servo = sum([ True for p in summary_servo if p.read.rssi != -1 and p.read.tagID == obj_name ])
tot_readings_servo = len( summary_servo )
# Location after Servoing
f = open( servo_positions_fname )
r = f.readlines()
f.close()
# ['At time 1313069718.853\n',
# '- Translation: [2.811, 1.711, 0.051]\n',
# '- Rotation: in Quaternion [0.003, 0.001, -0.114, 0.993]\n',
# ' in RPY [0.005, 0.003, -0.229]\n',
# 'At time 1313069719.853\n',
# '- Translation: [2.811, 1.711, 0.051]\n',
# '- Rotation: in Quaternion [0.003, 0.001, -0.114, 0.993]\n',
# ' in RPY [0.005, 0.002, -0.229]\n']
rpy = r[-1].find('RPY')+3
servo_theta = json.loads( r[-1][rpy:] )[-1]
tion = r[-3].find('tion:')+5
servo_pos = np.array(json.loads( r[-3][tion:] ))
servo_true_theta = np.arctan2( loc_pos[1] - servo_pos[1], # y / x
loc_pos[0] - servo_pos[0] )
servo_theta_diff = mu.standard_rad( servo_theta - servo_true_theta )
# servo_pos_diff = np.linalg.norm( servo_pos - loc_pos )
servo_pos_diff = np.linalg.norm( servo_pos[0:2] - loc_pos[0:2] )
# print '\t Post-Servo Stats:'
# print '\t\t Tag-Robot distance err (m): %2.3f' % (servo_pos_diff)
# print '\t\t Tag-Robot orient err (deg): %2.3f' % (math.degrees(servo_theta_diff))
# print '\t\t Debug Stats', math.degrees(servo_theta), math.degrees(servo_true_theta), servo_pos, loc_pos
res = { 'loc': loc,
'obj_num': obj_num,
'trial_num': trial_num,
'pos_readings': pos_readings_search + pos_readings_servo,
'tot_readings':tot_readings_search + tot_readings_servo,
'best_pos': servo_pos,
'orient_est': servo_theta,
'dxy': servo_pos_diff,
'dtheta': servo_theta_diff,
'servo_yn': servo_yn }
return True, res, None
return
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--trial', action='store', type='int', dest='trial',
help='trial number (0-8)')
p.add_option('--obj', action='store', type='int', dest='obj',
help='object number (0-8)')
p.add_option('--servo', action='store_true', dest='servo',
help='Use combined search and servo?', default = False)
opt, args = p.parse_args()
obj_num = opt.obj
trial_num = opt.trial
servo_yn = opt.servo
rospy.init_node( 'goober' )
if trial_num < 9:
print 'Publishing.'
was_reads, res, p_set = process_trialobj( trial_num, obj_num, servo_yn )
print 'RESULTS'
pfs.pprint( res )
f = open( 'Obj%d_Trial%d_Servo%d_rel_results.pkl' % (obj_num, trial_num, int( servo_yn )), 'w')
pkl.dump( res, f )
f.close()
else:
# for i in range( 0, 1 ): # trial
# for j in range( 4, 5 ): # obj
for i in range( 0, 9 ): # trial
for j in range( 0, 9 ): # obj
for s in [False, True]: # servo_yn
was_reads, res, p_set = process_trialobj( trial_num = i, obj_num = j, servo_yn = s )
print 'RESULTS'
pfs.pprint( res )
f = open( 'Obj%d_Trial%d_Servo%d_rel_results.pkl' % ( j, i, int( s )), 'w')
pkl.dump( res, f )
f.close()
| [
[
1,
0,
0.0096,
0.0048,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0144,
0.0048,
0,
0.66,
0.0435,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0191,
0.0048,
0,
0.... | [
"import roslib",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('rfid_demos')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('tf')",
"roslib.load_manifes... |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
import rospy
import smach
import actionlib
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
from rfid_demos import sm_rfid_explore
from rfid_behaviors import recorder
from hrl_lib import util
from rfid_datacapture.srv import BagCapture, BagCaptureRequest
class DummyClass():
def __init__(self, tagid):
self.tagid = tagid
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--fname', action='store', type='string', dest='fname',
help='File name. Should be without extension. [eg. \'trial\']', default='')
p.add_option('--radius', action='store', type='float', dest='radius',
help='Exploration radius in meters.', default=4.0)
opt, args = p.parse_args()
if opt.fname == '':
print 'Fname required'
exit()
fname_base = '/u/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/search_cap/search_aware_home/'
fname = fname_base + opt.fname
sm = smach.StateMachine(outcomes=['succeeded','aborted','preempted'],
input_keys = [ 'bagfile_name',
'bagfile_topics',
'tagid',
'explore_radius' ])
with sm:
smach.StateMachine.add(
'START_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request_slots = ['topics','dest'] ),
remapping = {'topics':'bagfile_topics',
'dest':'bagfile_name'},
transitions = {'succeeded':'SEARCH'})
sm_search = sm_rfid_explore.sm_search()
smach.StateMachine.add(
'SEARCH',
sm_search,
transitions = {'succeeded':'STOP_BAG_CAPTURE'},
remapping = {'tagid':'tagid',
'explore_radius':'explore_radius'})
smach.StateMachine.add(
'STOP_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request = BagCaptureRequest('','') ),
transitions = {'succeeded':'succeeded'})
rospy.init_node('smach_datacap_rfid_explore')
rec = recorder.Recorder( serv_name = 'temp_recorder', node_name = 'temp_recorder_py' )
rec.process_service( None ) # start recording
sm.userdata.tagid = ''
sm.userdata.explore_radius = opt.radius
sm.userdata.bagfile_name = fname
sm.userdata.bagfile_topics = '/tf /visarr /rfid/ears_reader /rfid/ears_reader_arr /map /robot_pose_ekf/odom_combined'
outcome = sm.execute()
rec.process_service( None ) # stop recording
print 'Saving recorder pickle data.'
util.save_pickle( rec.recorder_data, fname + '_reads.pkl' )
print 'Saving best read locations.'
tagids = ['OrangeMedBot','TravisTVremo','RedBottle ',
'OnMetalKeys ','WhiteMedsBot','BlueMedsBox ',
'TeddyBearToy','CordlessPhon','BlueHairBrus']
for t in tagids:
print '\tTagid: \'%s\'' % t,
tname = t.replace( ' ', '' )
try:
pos = rec.bestvantage( DummyClass( t ))
pos.header.stamp = rospy.Time(0)
dat = pos.__str__() + '\n'
f = open( fname + '_tag_' + tname + '.yaml', 'w' )
f.write( dat )
f.close()
print ' Done.'
except:
print ' NO READS. Done.'
pass
| [
[
1,
0,
0.0175,
0.0088,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0263,
0.0088,
0,
0.66,
0.0625,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0351,
0.0088,
0,
0.... | [
"import roslib",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('rfid_demos')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"import rospy",
"import smach",
"import actionli... |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('tf')
roslib.load_manifest('sensor_msgs')
roslib.load_manifest('visualization_msgs')
import rospy
import cPickle as pkl
import hrl_lib.rutils as ru
import hrl_lib.viz as viz
import sensor_msgs.msg as sm
import numpy as np, math
import sm_aware_home_explore as ahe
import visualization_msgs.msg as vm
import glob
import json
import yaml
import time
import os
def publish_readings( trial, obj, pub_time = 30e3, screen_cap = False ):
loc = ( trial + obj ) % 9
fname = 'search_aware_home/woot_150_'+str(trial)+'_reads.pkl'
f = open( fname, 'r' )
r = pkl.load(f)
f.close()
rospy.init_node( 'starter_woot' )
vsm = viz.single_marker
# RFID readings
pos = [ vsm( np.matrix([ p.ps_ant_map.pose.position.x,
p.ps_ant_map.pose.position.y,
p.ps_ant_map.pose.position.z ]).T,
np.matrix([ p.ps_ant_map.pose.orientation.x,
p.ps_ant_map.pose.orientation.y,
p.ps_ant_map.pose.orientation.z,
p.ps_ant_map.pose.orientation.w ]).T,
'arrow', '/map',
color = [1.0, 0.0, 0.0, 0.8], # rgba,
duration = 10.0,
m_id = i )
for i,p in enumerate( r )
if p.read.rssi != -1 and p.read.tagID == ahe.tdb[obj][0] ]
neg = [ vsm( np.matrix([ p.ps_ant_map.pose.position.x,
p.ps_ant_map.pose.position.y,
p.ps_ant_map.pose.position.z ]).T,
np.matrix([ p.ps_ant_map.pose.orientation.x,
p.ps_ant_map.pose.orientation.y,
p.ps_ant_map.pose.orientation.z,
p.ps_ant_map.pose.orientation.w ]).T,
'arrow', '/map',
color = [0.2, 0.2, 0.2, 0.2], # rgba
duration = 10.0,
m_id = i + len(r) )
for i,p in enumerate( r )
if p.read.tagID != ahe.tdb[obj] ] # for no-read or other tag reads
print 'Pos: ', len(pos), '\nNeg: ', len(neg)
# Robot Trajectory
tm = [ vsm( np.matrix([ ahe.pts[loc][1][0],
ahe.pts[loc][1][1],
ahe.pts[loc][1][2] ]).T,
np.matrix([ [0.0], [0.0], [0.0], [1.0] ]),
'sphere', '/map',
color = [0.0, 1.0, 0.0, 1.0], # rgba
duration = 10.0,
m_id = 2*len(r) + 1 )]
xyz = np.array([ [p.ps_base_map.pose.position.x,
p.ps_base_map.pose.position.y,
p.ps_base_map.pose.position.z ] for p in r ]).T
pts = ru.np_to_pointcloud( xyz, '/map' )
pub_pts = rospy.Publisher( '/robot_traj', sm.PointCloud )
pub_mark = rospy.Publisher( '/tag_poses', vm.Marker )
# Search and Servo Positions
obj_name = ahe.tdb[obj][0] # tagID
tname = obj_name.replace( ' ', '' )
# "Best" Location determined by search
search_fname = 'search_aware_home/woot_150_' + str(trial) + '_tag_' + tname + '.yaml'
try:
f = open( search_fname )
except:
return
y = yaml.load( f )
f.close()
search = [ vsm( np.matrix([ y['pose']['position']['x'],
y['pose']['position']['y'],
y['pose']['position']['z'] ]).T,
np.matrix([ y['pose']['orientation']['x'],
y['pose']['orientation']['y'],
y['pose']['orientation']['z'],
y['pose']['orientation']['w'] ]).T,
'arrow', '/map',
scale = [0.5, 1.0, 1.0],
color = [255./255, 123./255, 1./255, 1.0], # rgba
duration = 10.0,
m_id = 2 * len(r) + 2 )]
# Location after Servoing
servo_fname = 'search_aware_home/woot_150_' + str(trial) + '_tag_' + tname + '_end.txt'
try:
f = open( servo_fname )
except:
return
y = f.readlines()
f.close()
# ['At time 1313069718.853\n',
# '- Translation: [2.811, 1.711, 0.051]\n',
# '- Rotation: in Quaternion [0.003, 0.001, -0.114, 0.993]\n',
# ' in RPY [0.005, 0.003, -0.229]\n',
# 'At time 1313069719.853\n',
# '- Translation: [2.811, 1.711, 0.051]\n',
# '- Rotation: in Quaternion [0.003, 0.001, -0.114, 0.993]\n',
# ' in RPY [0.005, 0.002, -0.229]\n']
quat = y[-2].find('Quaternion')+10
quat_list = json.loads( y[-2][quat:] )
sloc = y[-3].find('tion:')+5
sloc_list = json.loads( y[-3][sloc:] )
servo = [ vsm( np.matrix([ sloc_list ]).T,
np.matrix([ quat_list ]).T,
'arrow', '/map',
scale = [0.5, 1.0, 1.0],
color = [0./255, 205./255, 255./255, 1.0], # rgba
duration = 10.0,
m_id = 2 * len(r) + 3 )]
marks = neg + pos + tm + search + servo
t0 = time.time()
while time.time() - t0 < pub_time and not rospy.is_shutdown():
pts.header.stamp = rospy.Time.now()
pub_pts.publish( pts )
[ pub_mark.publish( x ) for x in marks ]
rospy.sleep( 1.0 )
if screen_cap:
os.system( 'scrot -d 2 -u Obj%d_Trial%d.png' % ( obj, trial ))
print 'Closing down... letting markers expire'
rospy.sleep( 15 )
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
# p.add_option('--fname', action='store', type='string', dest='fname',
# help='File name. Should be woot_150_x_reads.pkl', default='')
p.add_option('--trial', action='store', type='int', dest='trial',
help='trial number (0-8)')
p.add_option('--obj', action='store', type='int', dest='obj',
help='object number (0-8)')
# p.add_option('--loc', action='store', type='int', dest='loc',
# help='location number (0-8)')
opt, args = p.parse_args()
if opt.trial < 9:
publish_readings( opt.trial, opt.obj )
else:
print 'Click on RVIZ!'
time.sleep( 3 )
#for trial in xrange(9):
for trial in [1]:
#for obj in xrange(9):
for obj in [6]:
print 'Change screen to RVIZ. Starting %d, %d' % (trial, obj)
publish_readings( trial, obj, 15, screen_cap = True )
| [
[
1,
0,
0.0099,
0.005,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0149,
0.005,
0,
0.66,
0.0417,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0198,
0.005,
0,
0.66,... | [
"import roslib",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('rfid_demos')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('tf')",
"roslib.load_manifes... |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('tf')
roslib.load_manifest('sensor_msgs')
roslib.load_manifest('visualization_msgs')
import rospy
import cPickle as pkl
import hrl_lib.rutils as ru
import hrl_lib.viz as viz
import sensor_msgs.msg as sm
import numpy as np, math
import sm_aware_home_explore as ahe
import visualization_msgs.msg as vm
import tf
import tf.transformations as tft
import glob
import json
import yaml
import time
import os
rospy.init_node( 'derp' )
vsm = viz.single_marker
m_id = 0
# Tags
# tag_pts = np.array([ ahe.pts[k][1] for k in ahe.pts.keys() ]).T
# tm = [ viz.single_marker( tag_pts[:,i].reshape([3,1]),
# np.matrix([ [0.0], [0.0], [0.0], [1.0] ]),
# 'sphere', '/map',
# color=[0.0, 1.0, 0.0, 1.0],
# m_id=i ) for i in xrange( tag_pts.shape[1] )]
# Results:
# 'Obj5_Trial8_Servo1_rel_results.pkl'
# {'best_pos': array([-2.165, 0.979, 0.055]),
# 'dtheta': 0.041245071031409619,
# 'dxy': 1.1365249667297239,
# 'loc': 7,
# 'obj_num': 5,
# 'orient_est': 2.8607549289685905,
# 'pos_readings': 717,
# 'servo_yn': True,
# 'tot_readings': 3580,
# 'trial_num': 2}
fnames = glob.glob('Obj[0-9]_Trial[0-9]_Servo1_rel_results.pkl')
def load( fn ):
f = open( fn )
dat = pkl.load( f )
f.close()
return dat
d = [ load( f ) for f in fnames ]
d = [ di for di in d if di['pos_readings'] > 0 ]
def arrow( m_id, res_dict, color ):
m = vsm( np.matrix([ res_dict['best_pos'] ]).T,
np.matrix([ tft.quaternion_from_euler( 0.0,
0.0,
res_dict['orient_est']) ]).T,
'arrow', '/map',
scale = [0.5, 1.0, 1.0],
color = color, # rgba
duration = 10.0,
m_id = m_id )
# color = [0./255, 205./255, 255./255, 1.0], # rgba
return m
pub_mark = rospy.Publisher( '/tag_poses', vm.Marker )
tag_spheres = []
servo_arrows = []
def hextorgba( h ):
return h * 1.0 / 255.0
colors = [ [ hextorgba( i ) for i in [ 0x00, 0x99, 0x00, 0xFF ]], # green
[ hextorgba( i ) for i in [ 0xEE, 0x00, 0x00, 0xFF ]], # red
[ hextorgba( i ) for i in [ 0x00, 0xCC, 0xFF, 0xFF ]]] # teal
for loc in range( 0, 9 ):
m_id += 1
color = colors[ loc % len(colors) ]
tag_spheres.append( vsm( np.matrix([ ahe.pts[loc][1][0],
ahe.pts[loc][1][1],
ahe.pts[loc][1][2] ]).T,
np.matrix([ [0.0], [0.0], [0.0], [1.0] ]),
'sphere', '/map',
color = color, # rgba
duration = 10.0,
m_id = m_id ))
for di in d:
if di['loc'] == loc:
m_id += 1
servo_arrows.append( arrow( m_id, di, color ))
marks = tag_spheres + servo_arrows
while not rospy.is_shutdown():
[ pub_mark.publish( x ) for x in marks ]
rospy.sleep( 1.0 )
print 'WOOT'
| [
[
1,
0,
0.0174,
0.0087,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0261,
0.0087,
0,
0.66,
0.025,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0348,
0.0087,
0,
0.6... | [
"import roslib",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('rfid_demos')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('tf')",
"roslib.load_manifes... |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('tf')
roslib.load_manifest('sensor_msgs')
roslib.load_manifest('visualization_msgs')
import rospy
import cPickle as pkl
import hrl_lib.rutils as ru
import hrl_lib.viz as viz
import sensor_msgs.msg as sm
import numpy as np, math
import sm_aware_home_explore as ahe
import visualization_msgs.msg as vm
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--trial', action='store', type='int', dest='trial',
help='trial number (0-8)')
# p.add_option('--fname', action='store', type='string', dest='fname',
# help='File name. Should be woot_150_x_reads.pkl', default='')
opt, args = p.parse_args()
fname = 'search_aware_home/woot_150_'+str(opt.trial)+'_reads.pkl'
f = open( fname, 'r' )
r = pkl.load(f)
f.close()
xyz = np.array([ [p.ps_base_map.pose.position.x,
p.ps_base_map.pose.position.y,
p.ps_base_map.pose.position.z ] for p in r ]).T
rospy.init_node( 'pub_traj' )
pts = ru.np_to_pointcloud( xyz, '/map' )
pub_pts = rospy.Publisher( '/robot_traj', sm.PointCloud )
pub_mark = rospy.Publisher( '/tag_poses', vm.Marker )
rospy.sleep( 0.5 )
tag_pts = np.array([ ahe.pts[k][1] for k in ahe.pts.keys() ]).T
tm = [ viz.single_marker( tag_pts[:,i].reshape([3,1]),
np.matrix([ [0.0], [0.0], [0.0], [1.0] ]),
'sphere', '/map',
color=[0.0, 1.0, 0.0, 1.0],
m_id=i ) for i in xrange( tag_pts.shape[1] )]
while not rospy.is_shutdown():
pts.header.stamp = rospy.Time.now()
pub_pts.publish( pts )
[ pub_mark.publish( x ) for x in tm ]
rospy.sleep( 1.0 )
| [
[
1,
0,
0.0323,
0.0161,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0484,
0.0161,
0,
0.66,
0.0556,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0645,
0.0161,
0,
0.... | [
"import roslib",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('rfid_demos')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('tf')",
"roslib.load_manifes... |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
import rospy
tdb = { 0: ['OrangeMedBot',[]],
1: ['TravisTVremo',[]],
2: ['RedBottle ',[]],
3: ['OnMetalKeys ',[]],
4: ['WhiteMedsBot',[]],
5: ['BlueMedsBox ',[]],
6: ['TeddyBearToy',[]],
7: ['CordlessPhon',[]],
8: ['BlueHairBrus',[]]}
pts = { 0: ['BehindTree',[3.757, 6.017, 0.036]],
1: ['FireplaceMantle',[5.090, 4.238, 1.514]],
2: ['CircleEndTable',[5.399, 2.857, 0.607]],
3: ['Couch',[3.944, 1.425, 0.527]],
4: ['RectEndTable',[3.302, 0.932, 0.534]],
5: ['BehindKitchenTable',[-0.339, -2.393, 0.793]],
6: ['NearDishwaser',[-1.926, -0.835, 0.946]],
7: ['InCupboard',[-3.257, 1.294, 1.397]],
8: ['OnFilingCabinet',[-0.083, 2.332, 0.670]]}
import smach
import actionlib
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
from rfid_demos import sm_rfid_explore
from rfid_behaviors import recorder
from hrl_lib import util
from rfid_datacapture.srv import BagCapture, BagCaptureRequest
class DummyClass():
def __init__(self, tagid):
self.tagid = tagid
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--fname', action='store', type='string', dest='fname',
help='File name. Should be without extension. [eg. \'trial\']', default='')
p.add_option('--radius', action='store', type='float', dest='radius',
help='Exploration radius in meters.', default=4.0)
opt, args = p.parse_args()
if opt.fname == '':
print 'Fname required'
exit()
fname_base = '/u/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/search_cap/search_bags/'
fname = fname_base + opt.fname
sm = smach.StateMachine(outcomes=['succeeded','aborted','preempted'],
input_keys = [ 'bagfile_name',
'bagfile_topics',
'tagid',
'explore_radius' ])
with sm:
smach.StateMachine.add(
'START_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request_slots = ['topics','dest'] ),
remapping = {'topics':'bagfile_topics',
'dest':'bagfile_name'},
transitions = {'succeeded':'SEARCH'})
sm_search = sm_rfid_explore.sm_search()
smach.StateMachine.add(
'SEARCH',
sm_search,
transitions = {'succeeded':'STOP_BAG_CAPTURE'},
remapping = {'tagid':'tagid',
'explore_radius':'explore_radius'})
smach.StateMachine.add(
'STOP_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request = BagCaptureRequest('','') ),
transitions = {'succeeded':'succeeded'})
rospy.init_node('smach_datacap_rfid_explore')
rec = recorder.Recorder( serv_name = 'temp_recorder', node_name = 'temp_recorder_py' )
rec.process_service( None ) # start recording
sm.userdata.tagid = ''
sm.userdata.explore_radius = opt.radius
sm.userdata.bagfile_name = fname
sm.userdata.bagfile_topics = '/tf /visarr /rfid/ears_reader /rfid/ears_reader_arr /map /robot_pose_ekf/odom_combined'
outcome = sm.execute()
rec.process_service( None ) # stop recording
print 'Saving recorder pickle data.'
util.save_pickle( rec.recorder_data, fname + '_reads.pkl' )
print 'Saving best read locations.'
tagids = ['person ','OrangeMedBot' ,'SpectrMedBot','OnMetalKeys ',
'TravisTVremo','Red Mug ']
for t in tagids:
print '\tTagid: \'%s\'' % t,
tname = t.replace( ' ', '' )
try:
pos = rec.bestvantage( DummyClass( t ))
pos.header.stamp = rospy.Time(0)
dat = pos.__str__() + '\n'
f = open( fname + '_tag_' + tname + '.yaml', 'w' )
f.write( dat )
f.close()
print ' Done.'
except:
print ' No reads. Done.'
pass
| [
[
1,
0,
0.0149,
0.0075,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0224,
0.0075,
0,
0.66,
0.0556,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0299,
0.0075,
0,
0.... | [
"import roslib",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('rfid_demos')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"import rospy",
"tdb = { 0: ['OrangeMedBot',[]],\n... |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('tf')
roslib.load_manifest('sensor_msgs')
roslib.load_manifest('rosbag')
roslib.load_manifest('visualization_msgs')
import rospy
import cPickle as pkl
import hrl_lib.rutils as ru
import hrl_lib.viz as viz
import sensor_msgs.msg as sm
import numpy as np, math
import sm_aware_home_explore as ahe
import rosbag
import visualization_msgs.msg as vm
from hrl_lib.cmd_process import CmdProcess
import glob
import json
import yaml
import time
import os
def publish_robotpose( trial, obj, pub_time = 30e3, screen_cap = False ):
print 'Change screen to RVIZ. Starting %d, %d' % (trial, obj)
loc = ( trial + obj ) % 9
obj_name = ahe.tdb[obj][0]
tname = obj_name.replace( ' ', '' )
# woot_150_6_tag_BlueHairBrus_headpost.bag
fname = 'search_aware_home/woot_150_'+str(trial)+'_tag_'+tname+'_headpost.bag'
# Start the new bagplay
bp = bagplay( fname )
bp.run()
while not bp.is_finished():
try:
rospy.init_node( 'markpub' )
pub_mark = rospy.Publisher( '/robot_pose', vm.Marker )
except:
print 'Init Failure.'
# Publish the robot marker
vsm = viz.single_marker
mark = [ vsm( np.matrix([ 0.0, 0.0, 0.0 ]).T,
np.matrix([ 0.0, 0.0, 0.0, 1.0 ]).T,
'cube', '/base_link',
scale = [0.65, 0.65, 0.001],
color = [158./255, 86./255, 192./255, 0.9], # rgba,
duration = 30.0,
m_id = 20000 )]
[ pub_mark.publish( x ) for x in mark ]
sim_safe_sleep( 1.0 ) # Cannot use rostime, since it will stall when bag stops
# Screenshot!
if screen_cap:
os.system( 'scrot -d 2 -u Obj%d_Trial%d_RobotView.png' % ( obj, trial ))
# Let markers expire
print 'Waiting for markers and points to expire'
t0 = time.time()
t_sleep = 60.0
while time.time() - t0 < t_sleep and not rospy.is_shutdown():
if int(time.time() - t0) % 5 == 0:
print 'Time left: %d' % (t_sleep - int(time.time() - t0))
time.sleep( 1.0 )
return
def sim_safe_sleep( dur, real_time_sleep = 0.05 ):
t0 = rospy.Time.now().to_sec()
ct = rospy.Time.now().to_sec()
while True:
if ct - t0 >= dur:
break
time.sleep( real_time_sleep )
nt = rospy.Time.now().to_sec()
if nt == ct: # rostime will stop when bag not playing -- exit immediately.
break
ct = nt
return
def bagplay( fname ):
# to use:
# bp = bagplay( my_file_name )
# bp.run() # starts the execution
# while not bp.is_finished():
# rospy.sleep( 0.5 )
# bp.kill() # not necessary
cmd = 'rosbag play --clock ' + fname + ' -r 2.0 -q'
rospy.logout( 'Launching bag file: %s' % fname )
return CmdProcess( cmd.split() )
def order_by_rostime( dat ):
# dat is [[trial, obj], ... ]
# I'm too lazy to figure out how to reset time and prevent "TF_OLD_DATA" errors / warnings.
# Instead, we're just going to order the bag playback in wall-clock order.
def build_fname( t,o ):
# woot_150_6_tag_BlueHairBrus_headpost.bag
fname = 'search_aware_home/woot_150_'+str(t)+'_tag_'+ahe.tdb[o][0].replace( ' ', '' )+'_headpost.bag'
return fname
dat = [ [t,o] + [ build_fname(t,o) ] for t,o in dat ]
dat = [ d for d in dat if glob.glob( d[-1] ) != [] ]
rospy.logout( 'Ordering the bagfiles in increasing order of start time.' )
def gettime( fname ):
print fname
# returns the timestamp of the first message
b = rosbag.Bag( fname )
msg = b.read_messages().next()
tt = msg[-1].to_sec()
b.close()
return tt
start_times = [ gettime( d[-1] ) for d in dat ]
rospy.logout( 'Done ordering.' )
return [ [dat[ind][0],dat[ind][1]] for ind in np.argsort( start_times ) ]
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
# p.add_option('--fname', action='store', type='string', dest='fname',
# help='File name. Should be woot_150_x_reads.pkl', default='')
p.add_option('--trial', action='store', type='int', dest='trial',
help='trial number (0-8)')
p.add_option('--obj', action='store', type='int', dest='obj',
help='object number (0-8)')
p.add_option('--sc', action='store_true', dest='sc',
help='Take screenshot', default=False)
# p.add_option('--loc', action='store', type='int', dest='loc',
# help='location number (0-8)')
opt, args = p.parse_args()
if opt.trial < 9:
publish_robotpose( opt.trial, opt.obj, screen_cap = opt.sc )
else:
print 'Click on RVIZ!'
time.sleep( 3 )
#X,Y = np.meshgrid( range(0,9), range(0,9) )
X,Y = np.meshgrid( range(0,9), range(0,9) )
trial_obj = zip( Y.flatten(), X.flatten() )
[ publish_robotpose( trial, obj, 15, screen_cap = True )
for trial, obj in order_by_rostime(trial_obj) ]
| [
[
1,
0,
0.0117,
0.0058,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0175,
0.0058,
0,
0.66,
0.0333,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0234,
0.0058,
0,
0.... | [
"import roslib",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('rfid_demos')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('tf')",
"roslib.load_manifes... |
x = '''
\\begin{landscape}
\\begin{table}[p]
\\newcolumntype{x}[1]{>{\\centering\\hspace{0pt}}p{#1}}
\\newcolumntype{y}[1]{>{\\raggedleft\\hspace{0pt}}p{#1}}
\\centering
\\begin{tabular}{ | y{2cm} | c | c | c | c | r }
\\toprule
\\textbf{\\Large Object:} & Orange Meds & TV Remote & Red Bottle & Keys & \\multirow{4}{1.0cm}{\\LARGE ...} \\\\
\\midrule
\\midrule
\\textbf{Reads} & %s & %s & %s & %s & \\\\
\\textbf{$\\Delta xy$} & %s & %s & %s & %s & \\\\
\\textbf{$\\Delta \\Theta$} & %s & %s & %s & %s & \\\\
\\bottomrule
\\end{tabular}
\\vskip 30pt
\\begin{tabular}{ c | c | c | c | c | c | }
\\toprule
\\multirow{4}{0.5cm}{\\LARGE ...} & Med Bottle & Med Box & Teddy Bear & Cordless Phone & Hair Brush \\\\
\\midrule
\\midrule
& %s & %s & %s & %s & %s \\\\
& %s & %s & %s & %s & %s \\\\
& %s & %s & %s & %s & %s \\\\
\\bottomrule
\\end{tabular}
\\vskip 30pt
\\begin{tabular}{r r@{ = }l r@{ = }l}
\\toprule
\\multicolumn{5}{c}{\\textbf{\\Large OVERALL FOR LOCATION \\#%d}}\\\\
\\midrule
\\midrule
\\textbf{Reads:} & $\\mu$ & %3.1f & $\\sigma$ & %3.1f \\\\
\\textbf{$\\Delta xy$} & $\\mu$ & %3.1f m & $\\sigma$ & %3.1f m \\\\
\\textbf{$|\\Delta \\Theta|$} & $\\mu$ & %3.1f$^o$ & $\\sigma$ & %3.1f$^o$ \\\\
\\bottomrule
\\end{tabular}
\\caption{ RFID Search Performance for Tagged Objects at Location \\#%d }
\\label{tab:search-results-%d}
\end{table}
\end{landscape}
'''
| [
[
14,
0,
0.5104,
1,
0,
0.66,
0,
190,
1,
0,
0,
0,
0,
3,
0
]
] | [
"x = '''\n\\\\begin{landscape}\n\\\\begin{table}[p]\n \\\\newcolumntype{x}[1]{>{\\\\centering\\\\hspace{0pt}}p{#1}}\n \\\\newcolumntype{y}[1]{>{\\\\raggedleft\\\\hspace{0pt}}p{#1}}\n \\\\centering\n \\\\begin{tabular}{ | y{2cm} | c | c | c | c | r }\n \\\\toprule"
] |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
import rospy
import smach
import actionlib
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
from rfid_demos import sm_rfid_explore
from rfid_behaviors import recorder
from hrl_lib import util
from rfid_datacapture.srv import BagCapture, BagCaptureRequest
from rfid_demos import sm_rfid_servo_approach
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--fname', action='store', type='string', dest='fname',
help='File name. Should be without extension. [eg. \'trial\']', default='')
p.add_option('--tag', action='store', type='string', dest='tagid',
help='Tagid to approach', default='person ')
opt, args = p.parse_args()
if opt.fname == '':
print 'Fname required'
exit()
fname_base = '/u/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/search_cap/search_aware_home/'
fname = fname_base + opt.fname
print 'SERVO APPROACH to ID: \'%s\'' % (opt.tagid)
rospy.init_node('smach_servo_datacapture')
sm = smach.StateMachine(outcomes=['succeeded','aborted','preempted'],
input_keys = [ 'bagfile_name',
'bagfile_topics',
'tagid'])
with sm:
smach.StateMachine.add(
'START_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request_slots = ['topics','dest'] ),
remapping = {'topics':'bagfile_topics',
'dest':'bagfile_name'},
transitions = {'succeeded':'SERVO'})
sm_servo = sm_rfid_servo_approach.sm_rfid_servo_approach()
smach.StateMachine.add(
'SERVO',
sm_servo,
transitions = {'succeeded':'STOP_BAG_CAPTURE'},
remapping = {'tagid':'tagid'})
smach.StateMachine.add(
'STOP_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request = BagCaptureRequest('','') ),
transitions = {'succeeded':'succeeded'})
sm.userdata.tagid = opt.tagid
sm.userdata.bagfile_name = fname + '_servo'
sm.userdata.bagfile_topics = '/tf /rfid/ears_reader /rfid/ears_reader_arr /map /robot_pose_ekf/odom_combined /navigation/cmd_vel'
outcome = sm.execute()
| [
[
1,
0,
0.0267,
0.0133,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.04,
0.0133,
0,
0.66,
0.0625,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0533,
0.0133,
0,
0.66... | [
"import roslib",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('rfid_demos')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"import rospy",
"import smach",
"import actionli... |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('tf')
import rospy
import rfid_datacapture.math_util as mu
import sm_aware_home_explore as ahe
import glob
import yaml
import tf
import tf.transformations as tft
import json
import numpy as np, math
import cPickle as pkl
import template
print 'THIS FILE DEPRICATED AND INACCURATE!!! USE REL_STATS!'
res = []
for i in xrange( 9 ): # trial
for j in xrange( 9 ): # object
skip = False # (hacky)
obj_num = j
obj_name = ahe.tdb[j][0]
tname = obj_name.replace( ' ', '' )
trial_num = i
loc = (i + j) % 9
loc_name = ahe.pts[loc][0]
loc_pos = np.array(ahe.pts[loc][1]) # Tag ground-truth location
print 'Trial %d with Object %d (%s) at Position %d (%s)' % (i, obj_num, obj_name, loc, loc_name)
fname = 'search_aware_home/woot_150_'+str(i)+'_reads.pkl'
f = open( fname, 'r' )
summary = pkl.load( f )
f.close()
pos_readings = sum([ True for p in summary if p.read.rssi != -1 and p.read.tagID == obj_name ])
tot_readings = len( summary )
print '\t Positive Reads: %d of %d (%2.2f)' % ( pos_readings,
tot_readings,
100.0 * pos_readings / tot_readings )
search_fname = 'search_aware_home/woot_150_' + str(i) + '_tag_' + tname + '.yaml'
glob_r = glob.glob( search_fname )
if glob_r == []:
print '\t No results for this instance.'
skip = True
if len(glob_r) > 1:
print '\t Multiple results...?! Weirdness. Skipping.'
skip = True
servo_fname = 'search_aware_home/woot_150_' + str(i) + '_tag_' + tname + '_end.txt'
glob_r = glob.glob( servo_fname )
if glob_r == []:
print '\t No results for this instance.'
skip = True
if len(glob_r) > 1:
print '\t Multiple results...?! Weirdness. Skipping.'
skip = True
if not skip:
f = open( search_fname )
y = yaml.load( f )
f.close()
# "Best" Location determined by search
efq = tft.euler_from_quaternion
search_theta = efq( [ y['pose']['orientation']['x'],
y['pose']['orientation']['y'],
y['pose']['orientation']['z'],
y['pose']['orientation']['w'] ])[-1]
search_pos = np.array([ y['pose']['position']['x'],
y['pose']['position']['y'],
y['pose']['position']['z'] ])
search_true_theta = np.arctan2( loc_pos[1] - search_pos[1], # y / x
loc_pos[0] - search_pos[0] )
search_theta_diff = mu.standard_rad( search_theta - search_true_theta )
search_pos_diff = np.linalg.norm( search_pos - loc_pos )
print '\t Post-Search Stats:'
print '\t\t Tag-Robot distance err (m): %2.3f' % (search_pos_diff)
print '\t\t Tag-Robot orient err (deg): %2.3f' % (math.degrees(search_theta_diff))
print '\t\t Debug Stats', math.degrees(search_theta), math.degrees(search_true_theta), search_pos, loc_pos
# Location after Servoing
f = open( servo_fname )
r = f.readlines()
f.close()
# ['At time 1313069718.853\n',
# '- Translation: [2.811, 1.711, 0.051]\n',
# '- Rotation: in Quaternion [0.003, 0.001, -0.114, 0.993]\n',
# ' in RPY [0.005, 0.003, -0.229]\n',
# 'At time 1313069719.853\n',
# '- Translation: [2.811, 1.711, 0.051]\n',
# '- Rotation: in Quaternion [0.003, 0.001, -0.114, 0.993]\n',
# ' in RPY [0.005, 0.002, -0.229]\n']
rpy = r[-1].find('RPY')+3
servo_theta = json.loads( r[-1][rpy:] )[-1]
tion = r[-3].find('tion:')+5
servo_pos = np.array(json.loads( r[-3][tion:] ))
servo_true_theta = np.arctan2( loc_pos[1] - servo_pos[1], # y / x
loc_pos[0] - servo_pos[0] )
servo_theta_diff = mu.standard_rad( servo_theta - servo_true_theta )
servo_pos_diff = np.linalg.norm( servo_pos - loc_pos )
print '\t Post-Servo Stats:'
print '\t\t Tag-Robot distance err (m): %2.3f' % (servo_pos_diff)
print '\t\t Tag-Robot orient err (deg): %2.3f' % (math.degrees(servo_theta_diff))
print '\t\t Debug Stats', math.degrees(servo_theta), math.degrees(servo_true_theta), servo_pos, loc_pos
res.append( [ loc, obj_num, i, pos_readings, tot_readings, servo_pos_diff, math.degrees(servo_theta_diff) ] )
else:
res.append( [ loc, obj_num, i, pos_readings, tot_readings, '--', '--' ] )
print '\t Done.\n\n'
res.sort()
print '\n\nRESULTS SORTED\n\n'
def pprint(r):
print 'Location %d, Object %d, Trial %d' % (r[0], r[1], r[2])
if r[3] > 0:
print '\tPos Reads: %d' % (r[3])
print '\tTot Reads: %d' % (r[4])
print '\tPercent Reads: %2.3f' % (r[3]*100.0/r[4])
print '\tDist Err (m): %2.3f' % (r[5])
print '\tAng Err (deg): %2.3f' % (r[6])
else:
print '\tPos Reads: %d' % (r[3])
print '\tTot Reads: %d' % (r[4])
print '\tPercent Reads: %2.3f' % (r[3]*100.0/r[4])
print '\tDist Err (m): ----'
print '\tAng Err (deg): ----'
[ pprint(r) for r in res ]
print '\n\n######### OUTPUTTING TEX TABLES from template.py #########\n\n'
def delta_xy( r ):
if r[3] > 0:
return '%2.3f m' % (r[5])
else:
return '$--$'
def delta_theta( r ):
if r[3] > 0:
return '%2.1f$^o$' % (r[6])
else:
return '$--$'
for i in xrange( 9 ):
io = [ r for r in res if r[0] == i ]
if len(io) != 9:
print 'BIG PROBLEM. IO != 9'
exit()
args = []
# Top Table
args += [ '%d / %d (%2.1f\\%%)' % (r[3], r[4], r[3] * 100.0 / r[4]) for r in io ][0:4]
args += [ delta_xy(r) for r in io ][0:4]
args += [ delta_theta(r) for r in io ][0:4]
# Bottom Table
args += [ '%d / %d (%2.1f\\%%)' % (r[3], r[4], r[3] * 100.0 / r[4]) for r in io ][4:]
args += [ delta_xy(r) for r in io ][4:]
args += [ delta_theta(r) for r in io ][4:]
# Overall Table
args += [ io[0][0] ] # Title
args += [ np.mean( [ r[3] for r in io ] ),
np.std( [ r[3] for r in io ] ) ] # Reads
args += [ np.mean( [ r[5] for r in io if r[3] > 0 ] ),
np.std( [ r[5] for r in io if r[3] > 0 ] )] # Distances (Only meaningful for detected tags)
args += [ np.mean( np.abs([ r[6] for r in io if r[3] > 0 ]) ),
np.std( np.abs([ r[6] for r in io if r[3] > 0 ]) )] # |angle| (Only meaningful for detected tags)
# Caption
args += [ io[0][0], io[0][0] ]
io_templated = template.x % tuple( args )
f = open('rfid_search_loc%d_table.tex' % (i),'w')
f.write( io_templated )
f.close()
print 'THIS FILE DEPRICATED AND INACCURATE!!! USE REL_STATS!'
| [
[
1,
0,
0.0233,
0.0233,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
1,
0,
0.0465,
0.0233,
0,
0.66,
0.0714,
164,
0,
1,
0,
0,
164,
0,
0
],
[
1,
0,
0.093,
0.0233,
0,
0... | [
"import roslib",
"import rospy",
"import rfid_datacapture.math_util as mu",
"import sm_aware_home_explore as ahe",
"import glob",
"import yaml",
"import tf",
"import tf.transformations as tft",
"import json",
"import numpy as np, math",
"import cPickle as pkl",
"import template",
"def pprint... |
#! /usr/bin/python
import roslib
roslib.load_manifest('pr2_controllers_msgs')
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
import rospy
import smach
import actionlib
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
from pr2_controllers_msgs.msg import PointHeadAction, PointHeadGoal
from rfid_datacapture.srv import BagCapture, BagCaptureRequest
def head_capture( ):
# Create a SMACH state machine
sm = smach.StateMachine(outcomes=['succeeded','aborted','preempted'],
input_keys = [ 'bagfile_name', 'bagfile_topics' ])
with sm:
def PointAdd( x, y, z, dur, state, res ):
pgoal = PointHeadGoal()
pgoal.target.header.frame_id = '/torso_lift_link'
pgoal.target.point.x = x
pgoal.target.point.y = y
pgoal.target.point.z = z
pgoal.min_duration = rospy.Duration( dur )
pgoal.max_velocity = 1.0
smach.StateMachine.add(
state,
SimpleActionState( '/head_traj_controller/point_head_action',
PointHeadAction,
goal = pgoal ),
transitions = { 'succeeded' : res })
return
PointAdd( 0.00, -1.00, -0.60, 5.0, 'PH1', 'START_BAG_CAPTURE' )
smach.StateMachine.add(
'START_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request_slots = ['topics','dest'] ),
remapping = {'topics':'bagfile_topics',
'dest':'bagfile_name'},
transitions = {'succeeded':'PH2'})
PointAdd( 0.00, 1.00, -0.60, 15.0, 'PH2', 'PH3' )
PointAdd( 0.00, 1.00, -0.20, 3.0, 'PH3', 'PH4' )
PointAdd( 0.00, -1.00, -0.20, 15.0, 'PH4', 'PH5' )
PointAdd( 0.00, -1.00, 0.30, 3.0, 'PH5', 'PH6' )
PointAdd( 0.00, 1.00, 0.30, 15.0, 'PH6', 'PH7' )
PointAdd( 1.00, 0.00, 0.00, 7.5, 'PH7', 'STOP_BAG_CAPTURE' )
smach.StateMachine.add(
'STOP_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request = BagCaptureRequest('','') ),
transitions = {'succeeded':'succeeded'})
return sm
class DelayState( smach.State ):
def __init__( self, delay = 3.0 ):
smach.State.__init__(self,outcomes=['succeeded', 'aborted'])
self.delay = delay
def execute( self, userdata ):
rospy.sleep( self.delay )
return 'succeeded'
def cam_capture( ):
# Create a SMACH state machine
sm = smach.StateMachine(outcomes=['succeeded','aborted','preempted'],
input_keys = [ 'bagfile_name', 'bagfile_topics' ])
with sm:
smach.StateMachine.add(
'START_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request_slots = ['topics','dest'] ),
remapping = {'topics':'bagfile_topics',
'dest':'bagfile_name'},
transitions = {'succeeded':'DELAY'})
smach.StateMachine.add(
'DELAY',
DelayState(),
transitions = {'succeeded':'STOP_BAG_CAPTURE'})
smach.StateMachine.add(
'STOP_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request = BagCaptureRequest('','') ),
transitions = {'succeeded':'succeeded'})
return sm
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--fname', action='store', type='string', dest='fname',
help='File name. Should be without extension. [eg. \'trial\']', default='')
opt, args = p.parse_args()
if opt.fname == '':
print 'Fname required'
exit()
rospy.init_node('smach_head_capture')
sm = head_capture()
sis = IntrospectionServer('sm_head_capture', sm, '/SM_HEAD_CAPTURE')
sis.start()
fname_base = '/u/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/search_cap/search_aware_home/'
fname = fname_base + opt.fname
sm.userdata.bagfile_name = fname
#sm.userdata.bagfile_topics = '/tf /kinect_head/rgb/points_throttled /kinect_head/rgb/image_color'
sm.userdata.bagfile_topics = '/tf /kinect_head/rgb/points_throttled'
outcome = sm.execute()
#raw_input( 'Hit [ENTER] to begin capturing camera.' )
sm_cam = cam_capture()
sm_cam.userdata.bagfile_name = fname + '_cam'
sm_cam.userdata.bagfile_topics = '/tf /kinect_head/rgb/image_color'
sm_cam.execute()
sis.stop()
| [
[
1,
0,
0.0137,
0.0068,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0205,
0.0068,
0,
0.66,
0.0714,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0274,
0.0068,
0,
0.... | [
"import roslib",
"roslib.load_manifest('pr2_controllers_msgs')",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"import rospy",
"import smach",
"import actionlib",
"from smach_ros import SimpleActionState, ServiceState, Introspec... |
__all__ = [
'utils'
'process_bags_friis'
'process_bags_utils'
]
| [
[
14,
0,
0.5,
0.8333,
0,
0.66,
0,
272,
0,
0,
0,
0,
0,
5,
0
]
] | [
"__all__ = [\n'utils'\n'process_bags_friis'\n'process_bags_utils'\n]"
] |
#! /usr/bin/python
import roslib
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('rfid_demos')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('rosbag')
import rospy
import tf
from rfid_behaviors.srv import RecorderSrv, RecorderSrvResponse
from rfid_behaviors.srv import NextBestVantage
from rfid_behaviors.msg import RecorderReads
import hrl_rfid.ros_M5e_client as rmc
from geometry_msgs.msg import PoseStamped
from hrl_rfid.msg import RFIDread
from hrl_lib import util
import process_bags_utils as pbut
from rfid_behaviors import recorder
import rosbag
import glob
import numpy as np,math
SERVO_FNAMES = 'search_cap/search_aware_home/*_servo.bag'
# This is a modified version of rfid_behaviors.recorder.py
class TmpRecorder( ):
def __init__( self, serv_name = 'rfid_recorder', node_name = 'rfid_recorder_py' ):
rospy.logout( 'rfid_recorder: initializing' )
try:
rospy.init_node(node_name)
except:
pass
self.name = 'ears'
self.should_rec = False
self.listener = tf.TransformListener()
# rospy.logout( 'RFID Recorder: Waiting on transforms' )
# self.listener.waitForTransform('/ear_antenna_left', '/map',
# rospy.Time(0), timeout = rospy.Duration(100) )
# self.listener.waitForTransform('/ear_antenna_right', '/map',
# rospy.Time(0), timeout = rospy.Duration(100) )
self.data = []
self._sub = rospy.Subscriber( '/rfid/' + self.name + '_reader', RFIDread, self.add_datum)
rospy.logout( 'rfid_recorder: ready' )
def process_datum( self, datum ):
# Hooray for lexical scope (listener)!
ant_lookup = { 'EleLeftEar': '/ear_antenna_left',
'EleRightEar': '/ear_antenna_right' }
ps_ant = PoseStamped()
ps_ant.header.stamp = rospy.Time( 0 )
ps_ant.header.frame_id = ant_lookup[ datum.antenna_name ]
ps_base = PoseStamped()
ps_base.header.stamp = rospy.Time( 0 )
ps_base.header.frame_id = '/base_link'
try:
ps_ant_map = self.listener.transformPose( '/map', ps_ant )
ps_base_map = self.listener.transformPose( '/map', ps_base )
rv = RecorderReads()
rv.read = datum
rv.ps_ant_map = ps_ant_map
rv.ps_base_map = ps_base_map
except:
rospy.logout( 'RFID Recorder: TF failed. Ignoring read.' )
rv = None
return rv
def add_datum( self, datum ):
# Hooray for lexical scope (data)!
pd = self.process_datum( datum )
if pd != None:
self.data.append( pd )
def order_bagfiles( fnames ):
# I'm too lazy to figure out how to reset time and prevent "TF_OLD_DATA" errors / warnings.
# Instead, we're just going to order the bag playback in wall-clock order.
rospy.logout( 'Ordering the bagfiles in increasing order of start time.' )
def gettime( fname ):
# returns the timestamp of the first message
b = rosbag.Bag( fname )
msg = b.read_messages().next()
tt = msg[-1].to_sec()
b.close()
return tt
start_times = [ gettime( f ) for f in fnames ]
rospy.logout( 'Done ordering.' )
return [ fnames[ind] for ind in np.argsort( start_times ) ]
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--fname', action='store', dest='fname',
help='filename', default = '')
opt, args = p.parse_args()
rospy.init_node( 'process_servo_reads' )
ordered_fnames = order_bagfiles( glob.glob(SERVO_FNAMES) )
print 'starting recorder.'
rec = TmpRecorder( serv_name = 'temp_recorder', node_name = 'temp_recorder_py' )
print 'done starting'
for i,fname in enumerate( ordered_fnames ):
rospy.logout( 'Processing [ %d of %d ]: %s' % (i+1, len(ordered_fnames), fname) )
rec.data = []
# Start the new bagplay
bp = pbut.bagplay( fname )
bp.run()
while not bp.is_finished():
print 'Still waiting...'
pbut.sim_safe_sleep( 1.0 ) # Cannot use rostime, since it will stall when bag stops
print 'Done Waiting.'
print 'Saving recorder pickle data.'
util.save_pickle( rec.data, fname.replace('.bag','.pkl') )
| [
[
1,
0,
0.0145,
0.0072,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0217,
0.0072,
0,
0.66,
0.04,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.029,
0.0072,
0,
0.66,... | [
"import roslib",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('rfid_demos')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('rosbag')",
"import rospy",
... |
#!/usr/bin/python
# Basically a giant script.
import roslib
roslib.load_manifest( 'geometry_msgs' ) # the pickle files containe Point and Pose Stamped.
roslib.load_manifest( 'sensor_msgs' )
import rospy
from geometry_msgs.msg import PointStamped, PoseStamped
from sensor_msgs.msg import PointCloud
import sys
import glob
import yaml
import time
import optparse
import cPickle as pkl
import numpy as np, math
import pylab as pl
import friis
import point_cloud_utils as pcu
PLOT = False
# glob_files: '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/cap_360/datacap/*.pkl'
# filters:
# antennas:
# PR2_Head: '/head_rfid'
# tags:
# 'datacap ':
if __name__ == '__main__':
p = optparse.OptionParser()
p.add_option('--yaml', action='store', type='string', dest='yaml', default='',
help='yaml file that describes this run.')
p.add_option('--plot', action='store_true', dest='plot',
help='Pop-up the resulting plot')
opt, args = p.parse_args()
yaml_fname = opt.yaml
PLOT = opt.plot
else:
yaml_fname = ''
# SCRIPT:
if not yaml_fname:
print 'YAML file required!'
exit()
else:
f = open( yaml_fname )
yaml_config = yaml.load( f )
f.close()
def add_files( d, arg ):
fname, fcount = arg
print 'Loading (%d of %d): %s' % (fcount, len(fnames), fname)
f = open( fname, 'r' )
d_new = pkl.load( f )
f.close()
for k in d_new.keys():
if not d.has_key( k ):
d[k] = []
d[k] += d_new[k]
return d
def base_map_xy( reading ):
# Model estimate of P^inc_tag.
# r_rdr, theta_rdr, phi_rdr, r_tag, theta_tag, phi_tag, rssi, antname, tagid = reading
d_rdr = reading[0]
d_tag = reading[1]
read = reading[2]
d_rot = reading[3]
ps = reading[4]
tag_map, rdr_map, rot_map, base_map = ps
return [ base_map.pose.position.x, base_map.pose.position.y ]
fnames = reduce( lambda x,y: x+y, [ glob.glob(i) for i in yaml_config['glob_files'] ], [] )
def pos_finish( reads_list ):
d_rdr = reads_list[-1][0]
d_tag = reads_list[-1][1]
read = reads_list[-1][2]
d_rot = reads_list[-1][3]
ps = reads_list[-1][4]
base_map = ps[3]
bmx = base_map.pose.position.x
bmy = base_map.pose.position.y
if bmx < 7+2 and bmx > 7-2 and bmy > 3-2 and bmy < 3+2:
rv = True
else:
rv = False
return rv
if len(glob.glob(yaml_config['use_combined'])) > 0:
print 'Loading pickle: %s' % (yaml_config['use_combined'])
f = open( yaml_config['use_combined'], 'r' )
data = pkl.load( f )
f.close()
print 'Done.'
else:
f = open( yaml_config['use_combined'], 'w' )
#d = reduce( add_files, zip(fnames,range(len(fnames))), {} )
data = []
# Apply Filters:
for i,fname in enumerate( fnames ):
print 'Loading (%d of %d): %s' % (i, len(fnames), fname)
f = open( fname, 'r' )
d_new = pkl.load( f )
f.close()
for k in d_new.keys():
if dict.fromkeys(yaml_config['filters']['tags']).has_key( k ):
data += [ base_map_xy(r) + [ r[2][0], pos_finish( d_new[k] )] for r in d_new[k] ]
data = np.array( data ).T # 4xN: x,y,rssi,positive finish
print 'Dumping data into combined pickle file: %s ' % (yaml_config['use_combined'])
f = open( yaml_config['use_combined'], 'w' )
pkl.dump( data, f, -1 )
f.close()
print 'Done. Re-run.'
exit()
# data will be 4xN => x, y, RSSI, positive finish (bool)
# Calculate Useful Values
xy = data[0:2,:]
xyz = np.row_stack([ xy, np.zeros( xy.shape[1] )])
rssi = data[2]
pos_read = data[3]
pp = pcu.np_points_to_ros( np.matrix(xyz[:,np.where(pos_read > 0.5)[0]]) )
pp.header.frame_id = '/map'
pn = pcu.np_points_to_ros( np.matrix(xyz[:,np.where(pos_read < 0.5)[0]]) )
pn.header.frame_id = '/map'
rospy.init_node( 'traj_pub_node' )
time.sleep( 0.3 )
print 'PUBLISHING'
pubp = rospy.Publisher( 'traj_pub_pos', PointCloud )
pubn = rospy.Publisher( 'traj_pub_neg', PointCloud )
while not rospy.is_shutdown():
pp.header.stamp = rospy.Time.now()
pn.header.stamp = rospy.Time.now()
pubp.publish( pp )
pubn.publish( pn )
rospy.sleep( 0.5 )
| [
[
1,
0,
0.0298,
0.006,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0357,
0.006,
0,
0.66,
0.027,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0417,
0.006,
0,
0.66,
... | [
"import roslib",
"roslib.load_manifest( 'geometry_msgs' ) # the pickle files containe Point and Pose Stamped.",
"roslib.load_manifest( 'sensor_msgs' )",
"import rospy",
"from geometry_msgs.msg import PointStamped, PoseStamped",
"from sensor_msgs.msg import PointCloud",
"import sys",
"import glob",
... |
import roslib
roslib.load_manifest('sensor_msgs')
roslib.load_manifest('geometry_msgs')
import rospy
from sensor_msgs.msg import PointCloud
from geometry_msgs.msg import Point32
from sensor_msgs.msg import ChannelFloat32
import numpy as np
import time
## PointCloud -> 3xN np matrix
# @param ros_pointcloud - robot_msgs/PointCloud
# @return 3xN np matrix
def ros_pointcloud_to_np(ros_pointcloud):
''' ros PointCloud.pts -> 3xN numpy matrix
'''
return ros_pts_to_np(ros_pointcloud.points)
## list of Point32 points -> 3xN np matrix
# @param ros_points - Point32[ ] (for e.g. from robot_msgs/PointCloud or Polygon3D)
# @return 3xN np matrix
def ros_pts_to_np(ros_pts):
pts_list = []
for p in ros_pts:
pts_list.append([p.x,p.y,p.z])
return np.matrix(pts_list).T
## 3xN np matrix -> ros PointCloud
# @param pts - 3xN np matrix
# @return PointCloud as defined in robot_msgs/msg/PointCloud.msg
def np_points_to_ros(pts):
p_list = []
chlist = []
# p_list = [Point32(p[0,0], p[0,1], p[0,2]) for p in pts.T]
# chlist = np.zeros(pts.shape[1]).tolist()
for p in pts.T:
p_list.append(Point32(p[0,0],p[0,1],p[0,2]))
chlist.append(0.)
ch = ChannelFloat32('t',chlist)
pc = PointCloud()
pc.points = p_list
pc.channels = [ch]
return pc
| [
[
1,
0,
0.0204,
0.0204,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0408,
0.0204,
0,
0.66,
0.0909,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0612,
0.0204,
0,
0.... | [
"import roslib",
"roslib.load_manifest('sensor_msgs')",
"roslib.load_manifest('geometry_msgs')",
"import rospy",
"from sensor_msgs.msg import PointCloud",
"from geometry_msgs.msg import Point32",
"from sensor_msgs.msg import ChannelFloat32",
"import numpy as np",
"import time",
"def ros_pointcloud... |
#! /usr/bin/python
import roslib
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('robotis')
roslib.load_manifest('geometry_msgs')
roslib.load_manifest('move_base_msgs')
roslib.load_manifest('std_msgs')
roslib.load_manifest('tf')
import rospy
import smach
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
import actionlib
from rfid_servoing.msg import ServoAction, ServoGoal
from robotis.srv import MoveAng, MoveAngRequest
from geometry_msgs.msg import PoseStamped, Quaternion
from move_base_msgs.msg import MoveBaseAction
from std_msgs.msg import String
from rfid_datacapture.srv import BagCapture, BagCaptureRequest
import rfid_datacapture.utils as rdut
import numpy as np, math
def sm_rfid_servo_approach( yaml_fname ):
# Create a SMACH state machine
sm = smach.StateMachine( outcomes = ['succeeded','aborted','preempted'])
# Open the container
with sm:
smach.StateMachine.add(
'CAPTURE_MONITOR',
rdut.YAMLproc( yaml_fname ),
remapping = {'next_move_pose':'next_move_pose', # output
'bagfile_name':'bagfile_name', # output
'bagfile_topics':'bagfile_topics', # output
'tagid':'tagid'}, # output
transitions = {'aborted':'succeeded',
'succeeded':'MOVE_POSITION'})
smach.StateMachine.add(
'MOVE_POSITION',
SimpleActionState( '/move_base',
MoveBaseAction,
goal_slots = [ 'target_pose' ]),
remapping = { 'target_pose' : 'next_move_pose' }, # input
transitions = {'aborted':'MANUAL_SKIP',
'preempted':'aborted',
'succeeded':'START_BAG_CAPTURE'})
smach.StateMachine.add(
'MANUAL_SKIP',
rdut.ManualSkip(),
transitions = {'succeeded':'START_BAG_CAPTURE', # We already manually positioned the robot
'aborted':'CAPTURE_MONITOR'}) # skip this position and go to next
smach.StateMachine.add(
'START_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request_slots = ['topics','dest'] ),
remapping = {'topics':'bagfile_topics',
'dest':'bagfile_name'},
transitions = {'succeeded':'SERVO'})
# Servoing is a basic state machine. Success means servoing finished @ obs.
smach.StateMachine.add(
'SERVO',
SimpleActionState( '/rfid_servo/servo_act',
ServoAction,
goal_slots = ['tagid']), #goal = ServoGoal( 'person ' ),
transitions = { 'succeeded': 'STOP_BAG_CAPTURE' },
remapping = {'tagid':'tagid'}) # input
smach.StateMachine.add(
'STOP_BAG_CAPTURE',
ServiceState( '/bag_cap/capture',
BagCapture,
request = BagCaptureRequest('','') ),
transitions = {'succeeded':'TUCK_LEFT'})
# Tuck Left (non-blocking)
smach.StateMachine.add(
'TUCK_LEFT',
ServiceState( 'robotis/servo_left_pan_moveangle',
MoveAng,
request = MoveAngRequest( 1.350, 0.2, 0 )), # ang (float), angvel (float), blocking (bool)
transitions = {'succeeded':'TUCK_RIGHT'})
# Tuck Right (non-blocking)
smach.StateMachine.add(
'TUCK_RIGHT',
ServiceState( 'robotis/servo_right_pan_moveangle',
MoveAng,
request = MoveAngRequest( -1.350, 0.2, 0 )), # ang (float), angvel (float), blocking (bool)
transitions = {'succeeded':'CAPTURE_MONITOR'})
return sm
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--yaml', action='store', type='string', dest='yaml',
help='Capture description yaml file', default='')
opt, args = p.parse_args()
if opt.yaml == '':
print 'ERROR: Must specify YAML file.'
exit()
rospy.init_node('rfid_servo_capture')
sm = sm_rfid_servo_approach( opt.yaml )
sis = IntrospectionServer('RFID_servo_approach', sm, '/SM_RFID_SERVO_APPROACH')
sis.start()
outcome = sm.execute()
sis.stop()
# python sm_servo_capture_simple.py --yaml datacap_vert.yaml
| [
[
1,
0,
0.0154,
0.0077,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0231,
0.0077,
0,
0.66,
0.05,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0308,
0.0077,
0,
0.66... | [
"import roslib",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('robotis')",
"roslib.load_manifest('geometry_msgs')",
"roslib.load_manifest('move_base_msgs')",
"roslib.load_manifest('std_msgs')",
"roslib.load_manifest('tf')",
"import rospy",
"import smach",
"from smach_ros impor... |
import numpy as np, math
import pylab as pl
ar = [[ 0, 104 ],
[ 1, 104 ],
[ 2, 104 ],
[ 3, 104 ],
[ 4, 104 ],
[ 8, 100 ],
[ 9, 97 ],
[ 10, 96 ],
[ 11, 95 ],
[ 12, 93 ],
[ 13, 91 ],
[ 14, 90 ],
[ 15, 89 ],
[ 16, 87 ],
[ 17, 85 ],
[ 18, 84 ],
[ 19, 83 ],
[ 20, 81 ],
[ 21, 80 ],
[ 22, 78 ],
[ 23, 77 ],
[ 24, 74 ],
[ 25, 73 ],
[ 26, 71 ]]
ar = np.array( ar ).T
ar[0] *= -1.0 # Change attenuation to be negative.
# Linear least squares
a = ar[0,4:] # Start at attn = -4 to avoid including saturation region.
a = np.column_stack([ a, np.ones(len(a)) ])
b = ar[1,4:]
m,b = np.linalg.lstsq( a, b )[0]
xs = np.linspace( -27, -3, 100 )
ys = xs * m + b
if __name__ == '__main__':
pl.plot( ar[0], ar[1], 'bo', linewidth=2.0 )
pl.hold( True )
pl.plot( xs, ys, 'g-', linewidth = 2.0 )
pl.xlabel( 'Attenuator Setting (dB)')
pl.ylabel( 'RSSI' )
pl.legend([ 'Measurements', 'Linear Fit' ], loc='upper left')
pl.savefig( 'Attenuator_RSSI_measurements.png' )
pl.show()
| [
[
1,
0,
0.0185,
0.0185,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.037,
0.0185,
0,
0.66,
0.1,
735,
0,
1,
0,
0,
735,
0,
0
],
[
14,
0,
0.287,
0.4444,
0,
0.66... | [
"import numpy as np, math",
"import pylab as pl",
"ar = [[ 0, 104 ],\n [ 1, 104 ],\n [ 2, 104 ],\n [ 3, 104 ],\n [ 4, 104 ],\n [ 8, 100 ],\n [ 9, 97 ],\n [ 10, 96 ],",
"ar = np.array( ar ).T",
"a = ar[0,4:] # Start at attn = -4 to avoid including saturation region... |
import numpy as np, math
import pylab as pl
SENSITIVITY = -80 # dBm
THRESHOLD = -18 # dBm
# P^inc_rdr = P_rdr + 2*alpha > SENSITIVITY (1)
# P^inc_tag = P_rdr + alpha > THRESHOLD (2)
# crossover = (1) - (2)
crossover_alpha = SENSITIVITY - THRESHOLD
crossover_Prdr = THRESHOLD - crossover_alpha
# alpha > 0.5 * (SENSITIVITY - Prdr) (3)
# alpha > THRESHOLD - Prdr (4)
print 'Crossover Point:\n\talpha: %2.2f\n\tPrdr: %2.2f' % (crossover_alpha, crossover_Prdr)
prdr = np.linspace( 0, 50 )
# alpha = np.linspace( -47, -30 )
alpha_3 = 0.5 * ( SENSITIVITY - prdr )
alpha_4 = THRESHOLD - prdr
f = pl.figure( figsize=(12,6) )
pl.axes([0.1,0.1,0.65,0.8])
f3 = pl.plot( prdr, alpha_3, 'g', linewidth = 3.0 )
pl.hold( True )
f4 = pl.plot( prdr, alpha_4, 'r', linewidth = 3.0 )
# x_min, x_max, y_min, y_max = pl.axis()
# pl.axis([-47,-30,y_min,y_max])
pl.legend((f3,f4),
('$P^{inc}_{rdr}$ > %d dBm' % (SENSITIVITY), '$P^{inc}_{tag}$ > %d dBm' % (THRESHOLD)),
loc=(1.03,0.2))
pl.xlabel( '$P_{rdr}$ (dBm)' )
pl.ylabel( '$\\alpha$ (dBm)' )
pl.savefig('forward_link_limit.png')
pl.show()
| [
[
1,
0,
0.0233,
0.0233,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.0465,
0.0233,
0,
0.66,
0.0526,
735,
0,
1,
0,
0,
735,
0,
0
],
[
14,
0,
0.093,
0.0233,
0,
... | [
"import numpy as np, math",
"import pylab as pl",
"SENSITIVITY = -80 # dBm",
"THRESHOLD = -18 # dBm",
"crossover_alpha = SENSITIVITY - THRESHOLD",
"crossover_Prdr = THRESHOLD - crossover_alpha",
"print('Crossover Point:\\n\\talpha: %2.2f\\n\\tPrdr: %2.2f' % (crossover_alpha, crossover_Prdr))",
"prdr =... |
#! /usr/bin/python
import time
import roslib
roslib.load_manifest('rospy')
roslib.load_manifest('tf')
roslib.load_manifest('geometry_msgs')
roslib.load_manifest('std_msgs')
roslib.load_manifest('hrl_rfid')
roslib.load_manifest('robotis')
roslib.load_manifest('rfid_behaviors')
import rospy
import tf
import tf.transformations as tft
from geometry_msgs.msg import Twist
from geometry_msgs.msg import PointStamped
from geometry_msgs.msg import Point
from geometry_msgs.msg import PoseStamped
from geometry_msgs.msg import Quaternion
from geometry_msgs.msg import PoseWithCovarianceStamped
from std_msgs.msg import Float64
import hrl_rfid.ros_M5e_client as rmc
import robotis.ros_robotis as rr
from hrl_rfid.msg import RFIDreadArr
import rfid_behaviors.rotate_backup_node as rb
from rfid_behaviors.cmd_process import CmdProcess
from rfid_behaviors.srv import String_Int32
from rfid_behaviors.srv import String_Int32Response
from rfid_behaviors.srv import FlapEarsSrv
from rfid_behaviors.srv import StringArr_Int32
import numpy as np, math
import time
from threading import Thread
from collections import deque
from functools import reduce
#PAN_RATE = 30.0
PAN_RATE = 10.0 # Used for datacapture.
#PAN_RATE = 3.0
def calculate_angle( pt1 ):
return np.arctan2( pt1.point.y, pt1.point.x )
def standard_rad(t):
if t > 0:
return ((t + np.pi) % (np.pi * 2)) - np.pi
else:
return ((t - np.pi) % (np.pi * -2)) + np.pi
class OrientNode( ):
def __init__( self ):
rospy.logout('orient_node: Initializing')
rospy.init_node('orient_node')
# After calling "flap ears", data will look something like this:
# { 'TagID1': [[ang,rssi], [ang,rssi], ...]
# 'TagID2': ... }
# * All angles are in /base_link and rssi's from both antennas
self.data = {}
# Will be transformed into base frame to determine best turn angle -- results in approximately 5-degrees (max) error for small angle assumption
self.tag_gt = { 'EleLeftEar': PointStamped(), 'EleRightEar': PointStamped() }
self.tag_gt[ 'EleLeftEar' ].header.frame_id = '/ear_antenna_left'
self.tag_gt[ 'EleLeftEar' ].header.stamp = rospy.Time.now()
self.tag_gt[ 'EleLeftEar' ].point.x = 10.0
self.tag_gt[ 'EleRightEar' ].header.frame_id = '/ear_antenna_right'
self.tag_gt[ 'EleRightEar' ].header.stamp = rospy.Time.now()
self.tag_gt[ 'EleRightEar' ].point.x = 10.0
self.listener = tf.TransformListener()
self.listener.waitForTransform('/base_link', '/ear_antenna_left',
rospy.Time(0), timeout = rospy.Duration(100) )
self.listener.waitForTransform('/base_link', '/ear_antenna_right',
rospy.Time(0), timeout = rospy.Duration(100) )
rospy.logout('orient_node: Transforms ready')
# For movement...
self.rotate_backup_client = rb.RotateBackupClient()
# "Ears" Setup
self.p_left = rr.ROS_Robotis_Client( 'left_pan' )
self.t_left = rr.ROS_Robotis_Client( 'left_tilt' )
self.p_right = rr.ROS_Robotis_Client( 'right_pan' )
self.t_right = rr.ROS_Robotis_Client( 'right_tilt' )
self.EX_1 = 1.350
self.EX_2 = 0.920
self.p_left.move_angle( self.EX_1, math.radians(10), blocking = False )
self.p_right.move_angle( -1.0 * self.EX_1, math.radians(10), blocking = True )
self.t_left.move_angle( 0.0, math.radians(10), blocking = False )
self.t_right.move_angle( 0.0, math.radians(10), blocking = True )
while self.p_left.is_moving() or self.p_right.is_moving():
time.sleep( 0.01 )
self.bag_pid = None
self.r = rmc.ROS_M5e_Client('ears')
self.__service_flap = rospy.Service( '/rfid_orient/flap',
FlapEarsSrv,
self.flap_ears )
self.__service_bag = rospy.Service( '/rfid_orient/bag',
StringArr_Int32,
self.bag_cap )
self.__service_orient = rospy.Service( '/rfid_orient/orient',
String_Int32,
self.orient )
self.tag_arr_sub = rospy.Subscriber( '/rfid/ears_reader_arr',
RFIDreadArr,
self.add_tags )
rospy.logout( 'orient_node: Waiting for service calls.' )
def bag_cap( self, request ):
# request.data => String array
# sample args: ['rosbag', 'record', '/tf', '/rfid/ears_reader_arr', '-o', 'data/data']
if (request.data == [] or request.data[0] == 'kill'):
if self.bag_pid == None:
rospy.logout( 'orient_node: No open bag to kill.' )
else:
rospy.logout( 'orient_node: Killing open bag.' )
self.bag_pid.kill()
self.bag_pid = None
return int( True )
s = reduce( lambda x,y: x+' '+y, request.data )
rospy.logout( 'orient_node: Calling CmdProcess with args: %s' % s )
self.bag_pid = CmdProcess( request.data )
self.bag_pid.run()
return int( True )
def orient( self, request ):
tagid = request.data
if not self.data.has_key( tagid ):
rospy.logout( 'Tag id \'%s\' not found during last scan.' % tagid )
return String_Int32Response( int( False ))
arr = np.array( self.data[ tagid ]).T
arr = arr[:,np.argsort( arr[0] )]
h, bins = np.histogram( arr[0], 36, ( -np.pi, np.pi ))
ind = np.sum(arr[0][:, np.newaxis] > bins, axis = 1) - 1 # Gives indices for data into bins
bin_centers = (bins[:-1] + bins[1:]) / 2.0
best_dir = 0.0
best_rssi = 0.0
for i in np.unique( ind ):
avg_rssi = np.mean(arr[1,np.argwhere( ind == i )])
if avg_rssi > best_rssi:
best_rssi = avg_rssi
best_dir = bin_centers[i]
rospy.logout( 'orient_node: Best dir (deg): %2.2f with avg rssi: %2.1f' %
( math.degrees(best_dir), best_rssi ))
self.rotate_backup_client.rotate_backup( best_dir, 0.0 )
return String_Int32Response( int( True ))
def add_tags( self, msg ):
for read in msg.arr:
if read.rssi == -1:
return False
self.tag_gt[ read.antenna_name ].header.stamp = rospy.Time(0)
try:
pt = self.listener.transformPoint( '/base_link',
self.tag_gt[ read.antenna_name ])
except:
rospy.logout( 'orient_node: Transform failed' )
return False
if not self.data.has_key( read.tagID ):
self.data[ read.tagID ] = []
self.data[ read.tagID ].append([ calculate_angle( pt ), 1.0 * read.rssi ])
return True
def flap_ears( self, request ):
if request.panrate == 0.0:
rpan_rate = 30.0
else:
rpan_rate = request.panrate
self.data = {}
tagid = request.data
if tagid == '':
rospy.logout( 'orient_node: capture for tagid: \'\' requested. Using QueryEnv.' )
self.r.query_mode( )
else:
rospy.logout( 'orient_node: capture for tagid: \'%s\' requested' % tagid )
self.r.track_mode( tagid )
forward = False
tilt_angs = [ math.radians( 0.0 ),
math.radians( 0.0 ) ]
for ta in tilt_angs:
# Tilt
self.t_left.move_angle( ta, math.radians( 30.0 ), blocking = False )
self.t_right.move_angle( -1.0 * ta, math.radians( 30.0 ), blocking = False )
while self.t_left.is_moving() or self.t_right.is_moving():
time.sleep(0.01)
# Pan
if forward:
self.p_left.move_angle( self.EX_1, math.radians( rpan_rate ), blocking = False )
self.p_right.move_angle( -1.0 * self.EX_1, math.radians( rpan_rate ), blocking = True )
forward = False
else:
self.p_left.move_angle( -1.0 * self.EX_2, math.radians( rpan_rate ), blocking = False )
self.p_right.move_angle( self.EX_2, math.radians( rpan_rate ), blocking = True )
forward = True
while self.p_left.is_moving() or self.p_right.is_moving():
time.sleep(0.01)
time.sleep(0.1)
self.r.stop()
print self.data.keys()
# Reset / Stow
self.p_left.move_angle( self.EX_1, math.radians(10), blocking = False )
self.t_left.move_angle( 0.0, math.radians(10), blocking = False )
self.p_right.move_angle( -1.0 * self.EX_1, math.radians(10), blocking = False )
self.t_right.move_angle( 0.0, math.radians(10), blocking = False )
rospy.logout( 'orient_node: capture completed' )
# print self.data
return [self.data.keys()]
if __name__ == '__main__':
on = OrientNode()
rospy.spin()
| [
[
1,
0,
0.0084,
0.0042,
0,
0.66,
0,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0127,
0.0042,
0,
0.66,
0.027,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0169,
0.0042,
0,
0... | [
"import time",
"import roslib",
"roslib.load_manifest('rospy')",
"roslib.load_manifest('tf')",
"roslib.load_manifest('geometry_msgs')",
"roslib.load_manifest('std_msgs')",
"roslib.load_manifest('hrl_rfid')",
"roslib.load_manifest('robotis')",
"roslib.load_manifest('rfid_behaviors')",
"import rospy... |
#! /usr/bin/python
import time
import roslib
roslib.load_manifest('rospy')
roslib.load_manifest('actionlib')
roslib.load_manifest( 'move_base_msgs' )
roslib.load_manifest('tf')
roslib.load_manifest('geometry_msgs')
roslib.load_manifest('std_msgs')
roslib.load_manifest('hrl_rfid')
roslib.load_manifest('robotis')
roslib.load_manifest('rfid_behaviors')
import rospy
import tf
import tf.transformations as tft
import actionlib
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from geometry_msgs.msg import Twist
from geometry_msgs.msg import PointStamped
from geometry_msgs.msg import Point
from geometry_msgs.msg import PoseStamped
from geometry_msgs.msg import Quaternion
from geometry_msgs.msg import PoseWithCovarianceStamped
from std_msgs.msg import Float64
from rfid_behaviors.srv import FloatFloat_Int32
from rfid_behaviors.srv import FloatFloat_Int32Response
from rfid_behaviors.srv import FloatFloatFloatFloat_Int32
import numpy as np, math
import time
from threading import Thread
from collections import deque
def standard_rad(t):
if t > 0:
return ((t + np.pi) % (np.pi * 2)) - np.pi
else:
return ((t - np.pi) % (np.pi * -2)) + np.pi
class RotateBackup():
def __init__( self, service_name = '/rotate_backup' ):
try:
rospy.init_node( 'rotater' )
except:
pass
rospy.logout( 'rotate_backup: Initializing service: \'%s\'' % service_name )
self.pub = rospy.Publisher( '/move_base_simple/goal', PoseStamped )
self.pub_direct = rospy.Publisher( '/navigation/cmd_vel', Twist )
self.listener = tf.TransformListener()
self.listener.waitForTransform('/odom_combined', '/base_link',
rospy.Time(0), timeout = rospy.Duration(100) )
self._service_rb = rospy.Service( service_name, FloatFloat_Int32, self.move)
self._service_rb_navstack = rospy.Service( service_name+'/navstack', FloatFloatFloatFloat_Int32, self.navstack)
rospy.logout( 'rotate_backup: Service ready' )
def non_nav_rotate( self, r ):
success, pose, orient = self.get_pose() # orient = rx, ry, rz
if not success:
rospy.logout( 'rotate_backup: Rotate transform fail. Exiting.' )
return
t_rz = standard_rad( orient[-1] + r )
mov = Twist()
mov.linear.x = 0.05
mov.angular.z = 0.6 * np.sign( r )
rate = rospy.Rate( 10 )
while not np.allclose( [t_rz], [orient[-1]], atol=[0.08] ): # Not within 5deg of target
success, pose, orient = self.get_pose() # orient = rx, ry, rz
orient[-1] = standard_rad( orient[-1] )
if not success:
rospy.logout( 'rotate_backup: Rotate transform fail. Exiting.' )
return
self.pub_direct.publish( mov )
rate.sleep()
def non_nav_backup( self, d ):
# Hacky way to backup without relying on nav_stack's bizarre circling.
mov = Twist()
mov.linear.x = 0.1 * np.sign( d )
t0 = time.time()
rate = rospy.Rate( 10.0 )
while time.time() - t0 < np.abs(d) / 0.1:
self.pub_direct.publish( mov )
rate.sleep()
def get_pose( self ):
ps = PoseStamped()
ps.header.stamp = rospy.Time(0)
ps.header.frame_id = '/base_link'
#ps.pose.position.x = d
try:
ps_odom = self.listener.transformPose( '/odom_combined', ps )
except:
rospy.logout( 'rotate_backup: Failed transform #1.' )
time.sleep( 2.0 )
return False, [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]
orient = ps_odom.pose.orientation
rx, ry, rz = tft.euler_from_quaternion([ orient.x, orient.y, orient.z, orient.w ])
pose = [ ps_odom.pose.position.x, ps_odom.pose.position.y, ps_odom.pose.position.z ]
return True, pose, [rx, ry, rz]
# Hacky version not using actionlib
# def wait_for_stop( self, duration = 0.5 ):
# rospy.logout( 'rotate_backup: Waiting for movement to stop.' )
# t0 = time.time()
# rate = rospy.Rate( 10 )
# success, pose, orient = self.get_pose()
# if not success:
# rospy.logout( 'rotate_backup: Waiting 2 sec.' )
# time.sleep( 2.0 )
# return
# sp = np.array([ pose[0], pose[1], pose[2], orient[-1] ])
# while time.time() - t0 < duration:
# success, pose, orient = self.get_pose()
# if not success:
# rospy.logout( 'rotate_backup: Waiting 2 sec.' )
# time.sleep( 2.0 )
# return
# qp = np.array([ pose[0], pose[1], pose[2], orient[-1] ])
# if not np.allclose( sp, qp, atol=[0.01, 0.01, 0.01, 0.005] ):
# t0 = time.time()
# sp = qp
# rate.sleep()
# return
def move( self, request ):
r = request.rotate
d = request.displace
rospy.logout( 'rotate_backup: Asked to rotate: %3.2f (deg)' % math.degrees(r))
rospy.logout( 'rotate_backup: Asked to translate (forward-backward): %3.2f (m)' % d)
self.non_nav_backup( d )
self.non_nav_rotate( r )
# success, pose, orient = self.get_pose()
# if not success:
# return FloatFloat_Int32Response( int(False) )
# new_point = Point( pose[0], pose[1], pose[2] )
# old_rx, old_ry, old_rz = orient
# new_orient = tft.quaternion_from_euler( old_rx, old_ry, old_rz + r )
# new_quat = Quaternion( *new_orient )
# new_ps = PoseStamped()
# new_ps.header.stamp = rospy.Time(0)
# new_ps.header.frame_id = '/odom_combined'
# new_ps.pose.position = new_point
# new_ps.pose.orientation = new_quat
# self.pub.publish( new_ps )
# self.wait_for_stop()
# rospy.logout( 'rotate_backup: Done with call.' )
return FloatFloat_Int32Response( int(True) )
# Hacky version not using actionlib
# def navstack( self, request ):
# new_orient = tft.quaternion_from_euler( 0.0, 0.0, request.ang ) # rx, ry, rz
# new_quat = Quaternion( *new_orient )
# new_ps = PoseStamped()
# new_ps.header.stamp = rospy.Time(0)
# new_ps.header.frame_id = '/map'
# new_ps.pose.position.x = request.x
# new_ps.pose.position.y = request.y
# new_ps.pose.orientation = new_quat
# rospy.logout( 'rotate_backup: Requesting navstack move to <x,y,ang-deg> %3.3f %3.3f %3.3f.' % (request.x, request.y, math.degrees(request.ang)) )
# self.pub.publish( new_ps )
# rospy.logout( 'rotate_backup: Waiting for base to stop moving.' )
# self.wait_for_stop( 7.0 )
# return int( True )
def navstack( self, request ):
rospy.logout( 'rotate_backup: Requesting navstack move to <x,y,ang-deg> %3.3f %3.3f %3.3f.' % (request.x, request.y, math.degrees(request.ang)) )
client = actionlib.SimpleActionClient( 'move_base', MoveBaseAction )
client.wait_for_server()
ps = PoseStamped()
ps.header.frame_id = '/map'
ps.header.stamp = rospy.Time(0)
ps.pose.position.x = request.x
ps.pose.position.y = request.y
ps.pose.orientation = Quaternion( *tft.quaternion_from_euler( 0.0, 0.0, request.ang ))
goal = MoveBaseGoal( ps )
client.send_goal( goal )
rospy.logout( 'rotate_backup: Waiting for base to stop moving.' )
client.wait_for_result()
return int( True )
class RotateBackupClient():
def __init__( self, service_name = '/rotate_backup' ):
rospy.logout( 'rotate_backup_client: Waiting for service: \'%s\'' % service_name )
rospy.wait_for_service( service_name )
rospy.logout( 'rotate_backup_client: Service ready.' )
self._rb_service = rospy.ServiceProxy( service_name, FloatFloat_Int32 )
def rotate_backup( self, rotate, displace ):
return self._rb_service( rotate, displace )
if __name__ == '__main__':
rb = RotateBackup()
rospy.spin()
| [
[
1,
0,
0.0092,
0.0046,
0,
0.66,
0,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0138,
0.0046,
0,
0.66,
0.0303,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0184,
0.0046,
0,
... | [
"import time",
"import roslib",
"roslib.load_manifest('rospy')",
"roslib.load_manifest('actionlib')",
"roslib.load_manifest( 'move_base_msgs' )",
"roslib.load_manifest('tf')",
"roslib.load_manifest('geometry_msgs')",
"roslib.load_manifest('std_msgs')",
"roslib.load_manifest('hrl_rfid')",
"roslib.l... |
#! /usr/bin/python
import roslib
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('pr2_msgs')
roslib.load_manifest('std_msgs')
import rospy
from pr2_msgs.msg import PressureState
from std_msgs.msg import Float64
from rfid_behaviors.srv import FloatFloat_Int32
import numpy as np, math
import time, string
def default_mag_func( x ):
return np.sum( np.abs( x ))
class TactileSensor( ):
def __init__( self, topic = '/pressure/r_gripper_motor',
mag_func = default_mag_func ):
rospy.logout( 'tactile_sensor: Initializing' )
try:
rospy.init_node( 'tactile_sensor' )
except:
pass
self.mag_func = mag_func
self.left = None
self.right = None
self.l_bias = None
self.r_bias = None
self.topic = topic
self.pub = rospy.Publisher( '/readings/' + string.replace(topic,'/','_') + '_mag', Float64 )
self.service = rospy.Service( '/readings/' + string.replace(topic,'/','_') + '_serv',
FloatFloat_Int32,
self.thresh_service )
self.reg_sensor( )
while self.left == None or self.right == None:
time.sleep( 0.1 )
self.unreg_sensor()
rospy.logout( 'tactile_sensor: Ready' )
def reg_sensor( self ):
self.sub = rospy.Subscriber( self.topic, PressureState, self.cb )
time.sleep( 0.3 )
def unreg_sensor( self ):
self.sub.unregister()
def cb( self, msg ):
self.left = np.array( list( msg.l_finger_tip ), dtype=float )
self.right = np.array( list( msg.r_finger_tip ), dtype=float )
if self.l_bias == None or self.r_bias == None:
self.l_bias = np.zeros( len( self.left ))
self.r_bias = np.zeros( len( self.right ))
self.l_read = np.copy( self.left - self.l_bias )
self.r_read = np.copy( self.right - self.r_bias )
self.mag = self.mag_func( np.append( self.l_read, self.r_read ))
#print np.append( self.l_read, self.r_read )
self.pub.publish( self.mag )
def bias( self ):
self.reg_sensor()
rospy.logout( 'tactile_sensor: Biasing' )
self.l_bias = np.copy( self.left )
self.r_bias = np.copy( self.right )
self.unreg_sensor()
return True
def read( self ):
return np.copy( self.l_read ), np.copy( self.r_read )
def thresh_service( self, request ):
self.thresh_detect( request.rotate, request.displace ) # yeah, jacked up names
return int( True )
def thresh_detect( self, threshold, timeout = 100.0 ):
rospy.logout( 'tactile_sensor: Threshold detector activated: %3.2f, timeout: %d' % (threshold, timeout))
self.bias()
self.reg_sensor()
t0 = time.time()
t_diff = time.time() - t0
lp = t0 # Printing status messages
while self.mag < threshold and t_diff < timeout:
if time.time() - lp > 1.0:
lp = time.time()
rospy.logout( 'tactile_sensor: Threshold still undetected' )
time.sleep( 0.05 )
t_diff = time.time() - t0
self.unreg_sensor()
rospy.logout( 'tactile_sensor: Detected (or timeout)' )
return self.mag
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('-r', '--right', action='store_true', dest='right', default=False,
help='Right finger')
p.add_option('-l', '--left', action='store_true', dest='left', default=False,
help='left finger')
opt, args = p.parse_args()
if opt.right:
r_tact = TactileSensor('/pressure/r_gripper_motor')
if opt.left:
l_tact = TactileSensor('/pressure/l_gripper_motor')
rospy.spin()
# print 'Reading: ', r_tact.read()
# raw_input('Hit [ENTER]')
# r_tact.bias()
# time.sleep(3.0)
# print 'Reading: ', r_tact.read()
# raw_input('Hit [ENTER]')
# print 'COMBINED:', np.append( *r_tact.read() )
# print 'ERR:', np.sum(np.abs( np.append( *r_tact.read() )))
# print 'Waiting for thresh'
# r_tact.thresh_detect( 5000 )
| [
[
1,
0,
0.0147,
0.0074,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0221,
0.0074,
0,
0.66,
0.0833,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0294,
0.0074,
0,
0.... | [
"import roslib",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('pr2_msgs')",
"roslib.load_manifest('std_msgs')",
"import rospy",
"from pr2_msgs.msg import PressureState",
"from std_msgs.msg import Float64",
"from rfid_behaviors.srv import FloatFloat_Int32",
"import numpy as np, mat... |
#! /usr/bin/python
import roslib
roslib.load_manifest('rfid_behaviors')
import rospy
from rfid_behaviors.srv import FlapEarsSrv, FlapperSrv
from threading import Thread
# All this guy does is call flap repeatedly until receiving a stop signal.
class Flapper( Thread ):
def __init__( self, serv_name = 'rfid_orient/flap' ):
Thread.__init__( self )
self.should_run = True
self.should_flap = False
self.tagid = ''
rospy.logout( 'flapper: initializing' )
try:
rospy.init_node('flapper_py')
except:
pass
rospy.wait_for_service( serv_name )
self.flap = rospy.ServiceProxy( '/rfid_orient/flap', FlapEarsSrv )
self._service = rospy.Service( '/flapper/flap',
FlapperSrv,
self.process_service )
rospy.logout( 'flapper: ready' )
self.start()
def run( self ):
rospy.logout( 'flapper: running' )
r = rospy.Rate( 10 )
while self.should_run and not rospy.is_shutdown():
if self.should_flap:
self.flap( self.tagid, 0.0 )
r.sleep()
rospy.logout( 'flapper: exiting' )
def stop( self ):
self.should_run = False
self.join(3)
if (self.isAlive()):
raise RuntimeError("ROS_M5e: unable to stop thread")
def process_service( self, req ):
self.tagid = req.tagid
self.should_flap = not self.should_flap
return True
if __name__ == '__main__':
fl = Flapper()
rospy.spin()
| [
[
1,
0,
0.0339,
0.0169,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0508,
0.0169,
0,
0.66,
0.1667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0678,
0.0169,
0,
0.... | [
"import roslib",
"roslib.load_manifest('rfid_behaviors')",
"import rospy",
"from rfid_behaviors.srv import FlapEarsSrv, FlapperSrv",
"from threading import Thread",
"class Flapper( Thread ):\n def __init__( self, serv_name = 'rfid_orient/flap' ):\n Thread.__init__( self )\n self.should_ru... |
#! /usr/bin/python
import roslib
roslib.load_manifest('rfid_behaviors')
import rospy
import tf
from rfid_behaviors.srv import RecorderSrv, RecorderSrvResponse
from rfid_behaviors.srv import NextBestVantage
from rfid_behaviors.msg import RecorderReads
import hrl_rfid.ros_M5e_client as rmc
from geometry_msgs.msg import PoseStamped
import numpy as np, math
class Recorder( ):
def __init__( self, serv_name = 'rfid_recorder', node_name = 'rfid_recorder_py' ):
rospy.logout( 'rfid_recorder: initializing' )
try:
rospy.init_node(node_name)
except:
pass
self.should_rec = False
self._servrec = rospy.Service( serv_name + '/record',
RecorderSrv,
self.process_service )
self._servbest = rospy.Service( serv_name + '/best_vantage',
NextBestVantage,
self.bestvantage )
self.listener = tf.TransformListener()
rospy.logout( 'RFID Recorder: Waiting on transforms' )
self.listener.waitForTransform('/ear_antenna_left', '/map',
rospy.Time(0), timeout = rospy.Duration(100) )
self.listener.waitForTransform('/ear_antenna_right', '/map',
rospy.Time(0), timeout = rospy.Duration(100) )
rospy.logout( 'rfid_recorder: ready' )
def process_service( self, req ):
self.should_rec = not self.should_rec # toggle state. (bad way to do this...)
if self.should_rec == True:
self.data = []
self.rec = rmc.ROS_M5e_Client('ears', callbacks = [self.add_datum])
rospy.logout( 'RFID Recorder: Logging Reads.' )
rv = RecorderSrvResponse()
rv.rfid_reads = []
else:
rospy.logout( 'RFID Recorder: Halting recorder.' )
self.rec.unregister() # Stop processing new reads
rospy.sleep( 0.5 ) # Give it some time to settle
rv = RecorderSrvResponse()
rv.rfid_reads = list(self.data) # Save the data.
self.recorder_data = list( self.data )
return rv
def process_datum( self, datum ):
# Hooray for lexical scope (listener)!
ant_lookup = { 'EleLeftEar': '/ear_antenna_left',
'EleRightEar': '/ear_antenna_right' }
ps_ant = PoseStamped()
ps_ant.header.stamp = rospy.Time( 0 )
ps_ant.header.frame_id = ant_lookup[ datum.antenna_name ]
ps_base = PoseStamped()
ps_base.header.stamp = rospy.Time( 0 )
ps_base.header.frame_id = '/base_link'
try:
ps_ant_map = self.listener.transformPose( '/map', ps_ant )
ps_base_map = self.listener.transformPose( '/map', ps_base )
rv = RecorderReads()
rv.read = datum
rv.ps_ant_map = ps_ant_map
rv.ps_base_map = ps_base_map
except:
rospy.logout( 'RFID Recorder: TF failed. Ignoring read.' )
rv = None
return rv
def add_datum( self, datum ):
# Hooray for lexical scope (data)!
pd = self.process_datum( datum )
if pd != None:
self.data.append( pd )
def bestvantage(self, req):
rospy.logout('Recorder: Calculating best vantage for tag \'%s\'' % req.tagid)
d = {}
for rr in self.recorder_data: # rr is RecorderRead
if not d.has_key( rr.read.tagID ):
d[rr.read.tagID] = []
d[rr.read.tagID].append( rr )
pos_reads = []
if d.has_key( req.tagid ):
pos_reads = [ q for q in d[ req.tagid ] if q.read.rssi != -1 ] # list of RecorderReads
if not pos_reads: # check at least one positive reading
rospy.warn( 'Recorder: Desired tag had no readings.' )
rv = PoseStamped()
rv.header.frame_id = 'base_link'
rv.header.stamp = rospy.Time.now()
rv.pose.orientation.w = 1.0
return rv
# Select the RecorderRead with greatest RSSI
rssi = [ r.read.rssi for r in pos_reads ]
ind = np.argmax( rssi )
best = pos_reads[ ind ] # RecorderRead
best_read = best.read
best_ant = best.ps_ant_map
best_base = best.ps_base_map
#print best_read, best_ant, best_base
# We're going to keep the <x,y> location from the baselink (mapframe),
# but keep <ang> (mapframe) from the antenna.
rv = PoseStamped()
rv.header.stamp = rospy.Time.now()
rv.header.frame_id = best_base.header.frame_id
rv.pose.position = best_base.pose.position
rv.pose.orientation = best_ant.pose.orientation
return rv
if __name__ == '__main__':
rec = Recorder()
rospy.spin()
| [
[
1,
0,
0.0142,
0.0071,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0213,
0.0071,
0,
0.66,
0.0909,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0284,
0.0071,
0,
0.... | [
"import roslib",
"roslib.load_manifest('rfid_behaviors')",
"import rospy",
"import tf",
"from rfid_behaviors.srv import RecorderSrv, RecorderSrvResponse",
"from rfid_behaviors.srv import NextBestVantage",
"from rfid_behaviors.msg import RecorderReads",
"import hrl_rfid.ros_M5e_client as rmc",
"from ... |
import subprocess as sb
import os
class CmdProcess:
def __init__(self, cmd_list):
self.cmd_list= cmd_list
self.process = None
def run(self):
self.process = sb.Popen(self.cmd_list)
def kill(self):
os.system('kill -2 %d' % self.process.pid)
| [
[
1,
0,
0.0714,
0.0714,
0,
0.66,
0,
394,
0,
1,
0,
0,
394,
0,
0
],
[
1,
0,
0.1429,
0.0714,
0,
0.66,
0.5,
688,
0,
1,
0,
0,
688,
0,
0
],
[
3,
0,
0.6429,
0.7857,
0,
0.6... | [
"import subprocess as sb",
"import os",
"class CmdProcess:\n \n def __init__(self, cmd_list):\n self.cmd_list= cmd_list\n self.process = None\n\n def run(self):\n self.process = sb.Popen(self.cmd_list)",
" def __init__(self, cmd_list):\n self.cmd_list= cmd_list\n ... |
#! /usr/bin/python
import roslib;
roslib.load_manifest('hrl_pr2_kinematics_tutorials')
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('std_srvs')
import rospy
import hrl_pr2_kinematics_tutorials.hrl_pr2 as hrl_pr2
import hrl_lib.transforms as tr
import rfid_behaviors.tactile_sensors as tsen
from rfid_behaviors.srv import ArmSrv
from rfid_behaviors.srv import HandoffSrv
import numpy as np, math
import os
import time
class HandoffNode():
def __init__( self ):
rospy.init_node( 'handoff', anonymous = True )
rospy.logout( 'handoff_node: Have run hrl_pr2_gains/change_gains.sh yet?' )
self.robot = hrl_pr2.HRL_PR2()
if not (os.environ.has_key('ROBOT') and os.environ['ROBOT'] == 'sim'):
self.ts = tsen.TactileSensor()
self.arm = 'right_arm'
self.start_ja = [0.040304940763152608, 1.2398003444166741, -1.2204088251845415, -1.9324078526157087, -31.197472992401149, -1.7430222641585842, -1.5358378047038517]
#self.target_ja = [0.35891507126604916, 0.13778228113494312, -0.01277662779292843, -1.4992538841561938, -28.605807802842136, -0.96590944225972863, -3.0950669743130161]
self.target_ja = [0.0818, 0.377, -0.860, -2.144, -3.975, -1.479, 3.907]
self.grasp_ja = [ -1.57263428749, -0.347376409246, -1.58724516843, -1.61707941489, -51.4022142048, -1.36894875484, -5.9965378332 ]
self.stowgrasp_ja = [-0.130, 1.18, -1.410, -1.638, -141.06, -1.695, 48.616 ]
self._sh = rospy.Service( '/rfid_handoff/handoff' , HandoffSrv, self.handoff )
self._si = rospy.Service( '/rfid_handoff/initialize' , HandoffSrv, self.initialize )
self._sj = rospy.Service( '/rfid_handoff/handoff_pos' , ArmSrv, self.handoff_pos )
self._ss = rospy.Service( '/rfid_handoff/stow' , HandoffSrv, self.stow )
self._sp = rospy.Service( '/rfid_handoff/pre_stow' , HandoffSrv, self.pre_stow )
self._sg = rospy.Service( '/rfid_handoff/grasp' , HandoffSrv, self.grasp )
self._senough = rospy.Service( '/rfid_handoff/stow_grasp' , HandoffSrv, self.stow_grasp )
self._swave = rospy.Service( '/rfid_handoff/wave' , HandoffSrv, self.wave )
# self.initialize() # Prefer to do this manually... (rosservice call /rfid_handoff/initialize)
rospy.logout( 'handoff_node: Waiting for service calls.' )
def initialize( self, msg = None ):
rospy.logout( 'handoff_node: Initializing. Hand me an object!' )
# Put into handoff position, ready to accept object
self.robot.set_jointangles( self.arm, self.target_ja, 3.0 )
rospy.sleep( rospy.Duration( 3.0 ))
self.robot.open_gripper( self.arm )
rospy.sleep( rospy.Duration( 2.0 ))
self.stow()
return True
def wave( self, msg = None ):
wave_a = [0.0131, 0.325, -0.832, -1.762,-6.511, -0.191, 0.162]
wave_b = [-0.180, 0.034, 0.108, -1.295, -6.224, -0.383, 0.119]
self.robot.set_jointangles( self.arm, wave_a, 2.0 )
rospy.sleep( rospy.Duration( 2.0 ))
self.robot.set_jointangles( self.arm, wave_b, 1.0 )
rospy.sleep( rospy.Duration( 1.0 ))
self.robot.set_jointangles( self.arm, wave_a, 1.0 )
rospy.sleep( rospy.Duration( 1.0 ))
self.robot.set_jointangles( self.arm, wave_b, 1.0 )
rospy.sleep( rospy.Duration( 1.0 ))
self.robot.set_jointangles( self.arm, wave_a, 1.0 )
rospy.sleep( rospy.Duration( 1.0 ))
self.robot.set_jointangles( self.arm, self.start_ja, 3.0 )
rospy.sleep( rospy.Duration( 3.0 ))
return True
def stow( self, msg=None ):
# Grab object
self.robot.close_gripper( self.arm )
rospy.sleep( rospy.Duration( 2.5 ))
# Stow
self.robot.set_jointangles( self.arm, self.start_ja, 3.0 )
rospy.sleep( rospy.Duration( 3.0 ))
return True
def pre_stow( self, msg=None ):
# Grab object
self.robot.close_gripper( self.arm )
rospy.sleep( rospy.Duration( 2.0 ))
# Stow
self.robot.set_jointangles( self.arm, self.target_ja, 3.0 )
rospy.sleep( rospy.Duration( 3.0 ))
return True
def grasp( self, msg=None ):
# Grab object
self.robot.close_gripper( self.arm )
rospy.sleep( rospy.Duration( 2.0 ))
# Stow
self.robot.set_jointangles( self.arm, self.grasp_ja, 3.0 )
rospy.sleep( rospy.Duration( 3.0 ))
return True
def stow_grasp( self, msg=None ):
# Stow
self.robot.set_jointangles( self.arm, self.stowgrasp_ja, 3.0 )
rospy.sleep( rospy.Duration( 3.0 ))
return True
def open( self ):
self.robot.open_gripper( self.arm )
rospy.sleep( rospy.Duration( 2.0 ))
def close( self ):
self.robot.close_gripper( self.arm )
rospy.sleep( rospy.Duration( 2.0 ))
def handoff( self, msg = None ):
# Put into handoff position.
self.robot.set_jointangles( self.arm, self.target_ja, 3.0 )
rospy.sleep( rospy.Duration( 3.0 ))
# Tactile Sensor detector
rospy.sleep( rospy.Duration( 0.5 ))
self.ts.thresh_detect( 3000 )
# Release object
self.robot.open_gripper( self.arm )
rospy.sleep( rospy.Duration( 2.0 ))
# Stow
self.robot.set_jointangles( self.arm, self.start_ja, 3.0 )
rospy.sleep( rospy.Duration( 3.0 ))
return True
def handoff_pos( self, msg ):
#pos = np.matrix([0.6977, -0.03622, 0.2015]).T
#ang = tr.Rx(math.radians(0.))
print msg
pos = np.matrix([ msg.x, msg.y, msg.z ]).T
ang = tr.Rx( msg.ang )
q = [0, 0, 0, 0, 0, 0, 0]
j = self.robot.IK('right_arm', pos, ang, self.target_ja)
#j = self.robot.IK('right_arm', pos, ang, q)
self.robot.set_jointangles( 'right_arm', j, 3.0 )
# Tactile Sensor detector
rospy.sleep( rospy.Duration( 0.5 ))
self.ts.thresh_detect( 3000 )
# Release object
self.robot.open_gripper( self.arm )
rospy.sleep( rospy.Duration( 2.0 ))
# Stow
self.robot.set_jointangles( self.arm, self.start_ja, 3.0 )
rospy.sleep( rospy.Duration( 3.0 ))
return True
if __name__ == '__main__':
hon = HandoffNode()
#hon.handoff_pos()
rospy.spin()
#ho.handoff()
| [
[
1,
0,
0.0167,
0.0056,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0222,
0.0056,
0,
0.66,
0.0667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0278,
0.0056,
0,
0.... | [
"import roslib;",
"roslib.load_manifest('hrl_pr2_kinematics_tutorials')",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('std_srvs')",
"import rospy",
"import hrl_pr2_kinematics_tutorials.hrl_pr2 as hrl_pr2",
"import hrl_lib.transforms as tr",
"imp... |
#!/usr/bin/python
import numpy as np, math
import cPickle as pkl
import yaml
import scipy.stats as stats
class RfidModel:
def __init__( self, yaml_fname ):
# yaml_fname is yaml file something like: cap_360/rad_plot_combined.yaml
# you will need to have run: ./../process_radpat_plots.py --yaml rad_plot_combined.yaml
# which generates rad_plot_combined_MODEL.pkl
# Loading the YAML file that specified how the data is to be processed.
# This file was used to generate the 'use_combined' data set.
f = open( yaml_fname )
yaml_config = yaml.load( f )
f.close()
# Loading condensed data
f = open( yaml_config['use_combined'].replace('.pkl','_MODEL.pkl'), 'r' )
self.model = pkl.load( f )
f.close()
self.default_detect = 0.0 # These will be multiplied. Will be zero outside the model range.
self.default_rssi = 1.0
self.num = 1
def lookup( self, x, y ):
# Note: The xind and yind are flipped for the model ind, since
# the edges are defined from the histogram ( which is
# transposed compared to the models )
xind = np.sum( x > self.model['xedges'] ) - 1
yind = np.sum( y > self.model['yedges'] ) - 1
# Outside the bounds of our model? Return no reads.
if xind < 0 or xind >= len(self.model['xedges'])-1 or yind < 0 or yind >= len(self.model['yedges'])-1:
return ( None, None, None )
# Return value from models.
return ( self.model['detect_model'][yind,xind],
self.model['rssi_model'][yind,xind],
self.model['stddev_model'][yind,xind] )
def sample( self, x, y ):
# Returns a sample (eg. detection yes or no, and RSSI)
detect, rssi, stddev = self.lookup( x, y )
if (not detect) or (not rssi) or (not stddev):
return -1 # No read RSSI
detected = np.random.uniform() <= detect # all the mass below the detection thresh
if not detected:
return -1 # Probability
return np.clip( np.random.normal( loc=rssi, scale=stddev ),
np.min( self.model['rssi_model'] ),
np.max( self.model['rssi_model'] )) # Bound the results by the model bounds
def prob( self, x, y, measurement ):
# Returns the detection and RSSI probabilities given the data-driven model
detect, rssi, stddev = self.lookup( x, y )
if (not detect) or (not rssi) or (not stddev):
return self.default_detect, self.default_rssi
rv = stats.norm( loc = rssi, scale = stddev )
return detect, rv.pdf( measurement )
def weight( self, measurement, particle ):
# particle is 1Dx2: X,Y. Measurement is RSSI
detect_prob, rssi_prob = self.prob( particle[0], particle[1], measurement )
return detect_prob * rssi_prob
def weight_set( self, measurement, particle_set ):
self.num += 1
if self.num % 10 == 0:
print '\tProcessing sample %d' % self.num
xy = particle_set[:,0:2].T # 2xN
# Will automatically snap all particles to nearest bin (will fix below)
bins_ind_x = np.sum( xy[0][:,np.newaxis] > self.model['xedges'][:-1], axis = 1 ) - 1
bins_ind_y = np.sum( xy[1][:,np.newaxis] > self.model['yedges'][:-1], axis = 1 ) - 1
# When the particle is outside the bin edges, lookup => (None, None, None), so
# detect_prob = 0. Thus, handle these cases by setting weight to 0.0
ind_x_less = np.where( xy[0] < self.model['xedges'][0] )[0]
ind_x_more = np.where( xy[0] > self.model['xedges'][-1] )[0]
ind_y_less = np.where( xy[1] < self.model['yedges'][0] )[0]
ind_y_more = np.where( xy[1] > self.model['yedges'][-1] )[0]
# Lookup values from model
detect = self.model['detect_model'][bins_ind_y,bins_ind_x] # bins are flipped from histogram
rssi = self.model['rssi_model'][bins_ind_y,bins_ind_x] # bins are flipped from histogram
stddev = self.model['stddev_model'][bins_ind_y,bins_ind_x] # bins are flipped from histogram
# Detection prob = model + uniform
uniform_detect = 0.2
detect_prob = detect.filled( 0.0 ) # for masked values, we assign a zero probability
detect_prob += uniform_detect
detect_prob[ ind_x_less ] = uniform_detect # When outside the bin edges, assume detect prob is 0.0
detect_prob[ ind_x_more ] = uniform_detect # When outside the bin edges, assume detect prob is 0.0
detect_prob[ ind_y_less ] = uniform_detect # When outside the bin edges, assume detect prob is 0.0
detect_prob[ ind_y_more ] = uniform_detect # When outside the bin edges, assume detect prob is 0.0
detect_prob = np.clip( detect_prob, 0.0, 1.0 ) # force it to be a legit probability (since we're adding two distributions!)
# RSSI prob is gaussian at each cell
uniform_rssi = 0.2
rssi_prob = 1.0 / np.sqrt(2.0 * np.pi * np.power(stddev,2.0))
rssi_prob *= np.exp( -1.0 * np.power( measurement - rssi, 2.0 ) / (2.0*np.power(stddev,2.0)))
rssi_prob = rssi_prob.filled( 0.0 ) # for masked values, we assign a zero probability
rssi_prob += uniform_rssi
rssi_prob[ ind_x_less ] = uniform_rssi # When outside the bin edges, assume detect prob is 0.0
rssi_prob[ ind_x_more ] = uniform_rssi # When outside the bin edges, assume detect prob is 0.0
rssi_prob[ ind_y_less ] = uniform_rssi # When outside the bin edges, assume detect prob is 0.0
rssi_prob[ ind_y_more ] = uniform_rssi # When outside the bin edges, assume detect prob is 0.0
rssi_prob = np.clip( rssi_prob, 0.0, 1.0 ) # force it to be a legit probability (since we're adding two distributions!)
# Weight is multiplication of the two
weight = detect_prob * rssi_prob
# Setting the probability to 0.0 is harsh (it kills any later updates).
# Add a small (uniform) distribution to account for this.
# minw = np.min( weight[np.where( weight > 1e-20 )] ) * 0.9
# weight = np.clip( weight, minw, np.max( weight ))
# Update particle_set in-place
particle_set[:,2] *= weight
# particle_set[:,2] = np.clip( particle_set[:,2], 1e-10, np.max(particle_set[:,2]) )
# Normalize so that the sum is 1.0
particle_set[:,2] /= np.sum( particle_set[:,2] )
return np.copy( particle_set )
yaml_fname = '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/rad_pattern_cap/rad_plot_shoulder_table_both_SpectrMedBot.yaml'
# yaml_fname = '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/cap_360/rad_plot_shoulder_left_datacap2.yaml'
# yaml_fname = '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/cap_360/rad_plot_combined.yaml'
#yaml_fname = '/home/travis/svn/robot1/src/projects/rfid_pf/src/rfid_pf/pencil_beam_model.yaml'
class NoMotion:
def __init__( self ):
print 'Starting NoMotion.'
def predict_set( self, control, p_set ):
return p_set
if __name__ == '__main__':
import time
X,Y = np.meshgrid( np.arange(-10,10,0.1), np.arange(-10,10,0.1))
xyw = np.row_stack([ X.flatten(), Y.flatten(), np.ones( X.shape ).flatten() ]).T
def test1( rm, measurement, particles ):
t0 = time.time()
w = np.array([ rm.weight( measurement, p[0:2] ) for p in particles ])
rv = np.column_stack([ particles[:,0:2], w ])
td = time.time() - t0
return rv, td
def test2( rm, measurement, particles ):
t0 = time.time()
rv = rm.weight_set( measurement, particles )
td = time.time() - t0
return rv, td
rm = RfidModel( yaml_fname )
print 'Starting 1'
r1,t1 = test1( rm, 80, xyw )
print 'Done 1 in %2.2f sec.' % t1
print 'Starting 2'
r2,t2 = test2( rm, 80, xyw )
print 'Done 2 in %2.2f sec.' % t2
print 'Speedup: %3.2fx' % (t1 / t2)
| [
[
1,
0,
0.0146,
0.0049,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.0195,
0.0049,
0,
0.66,
0.1429,
279,
0,
1,
0,
0,
279,
0,
0
],
[
1,
0,
0.0244,
0.0049,
0,
... | [
"import numpy as np, math",
"import cPickle as pkl",
"import yaml",
"import scipy.stats as stats",
"class RfidModel:\n def __init__( self, yaml_fname ):\n # yaml_fname is yaml file something like: cap_360/rad_plot_combined.yaml\n # you will need to have run: ./../process_radpat_plots.py --y... |
#!/usr/bin/python
## I'm less than enthused about the ("optimization") bloat in
## hrl/pfilter. This implementation draws from that one, but
## eliminates a bunch of cruft and focuses on our particular details.
import random as rd
import numpy as np, math
class PFilter:
def __init__(self, motion_model, measurement_model, particles):
self.motion_model = motion_model # class. Provides 'predict' method: 1Dx2 particle -> 1Dx2 particle
self.measurement_model = measurement_model # class. Provides 'weight' method: 1Dx2 particle, measurement -> double
# Particles. Should be np.matrix Nx3: [[X,Y,Weight],...]
self.p = particles
def motion( self, control_input ):
print 'Motioning'
new_xy = np.row_stack([ self.motion_model.predict( i[0:2] ) # for each [X,Y].T => new [X,Y].T
for i in self.p ]) # stack them into Nx2
new_p = np.column_stack([ new_xy, self.p[:,2] ]) # Particles keep weights => 3xN
self.p = np.copy( new_p )
return np.copy( new_p )
def measurement( self, measurement ):
# Takes in a single measurement, computes new weights for each
# particle and combines them (multplicative) with the old weights.
print 'Measuring.'
w = np.array([ self.measurement_model.weight( measurement, i[0:2] ) for i in self.p ])
new_w = self.p[:,2] * w
new_p = np.column_stack([ self.p[:,0:2], new_w ])
self.p = np.copy( new_p )
return np.copy( new_p )
def resample( self ):
print 'Resampling'
weighted_set = [ ( i[0:2], i[2] ) for i in self.p ]
normalized_set = normalize_likelihood(weighted_set)
new_xy = np.row_stack([ i for i in resample_uss( len(self.p), normalized_set )])
new_p = np.column_stack([ new_xy, np.ones( new_xy.shape[0] ) ])
self.p = np.copy( new_p )
return np.copy( new_p )
def step( self, control_input, measurement ):
self.motion( control_input )
self.measurement( measurement )
self.resample()
def resample_uss(num_samples, particles):
"""
Universal stochastic sampler (low variance resampling)
num_samples - number of samples desired
particles - pairs of (state, weight) tuples
"""
samples = []
r = rd.random() * (1.0 / float(num_samples))
c = (particles[0])[1]
i = 0
for m in xrange(num_samples):
U = r + m * (1.0 / float(num_samples))
#print "U", U
while U > c:
i = i + 1
if i >= len(particles):
i = 0
c = c + (particles[i])[1]
samples.append((particles[i])[0])
return samples
def normalize_likelihood(weighted_particles):
""" Make all the particle weights sum up to 1 """
def add(a,b):
apart, aw = a
bpart, bw = b
return ('', aw+bw)
total_weight = (reduce(add, weighted_particles, ('',0.0)))[1]
def normalize(a):
part, weight = a
return (part, weight/total_weight)
return map(normalize, weighted_particles)
class NoMotion:
def __init__( self ):
print 'Starting NoMotion.'
def predict( self, p ):
return p
class NoMeasure:
def __init__( self ):
print 'Starting NoMeasure.'
def weight( self, p, m ):
return 0.5
if __name__ == '__main__':
X,Y = np.meshgrid( np.linspace(0,3,4),
np.linspace(0,3,4) )
xyw = np.row_stack([ X.flatten(), # Build Nx3
Y.flatten(),
np.ones( X.shape ).flatten() ]).T # weights (multiplicative)
pf = PFilter( NoMotion(), NoMeasure(), xyw )
pf.motion( 0 )
pf.measurement( 0 )
pf.resample()
print pf.p
pf.measurement( 0 )
pf.p[0,2] = 5 # 10 times more likely
pf.resample()
print pf.p
| [
[
1,
0,
0.0574,
0.0082,
0,
0.66,
0,
715,
0,
1,
0,
0,
715,
0,
0
],
[
1,
0,
0.0656,
0.0082,
0,
0.66,
0.1429,
954,
0,
2,
0,
0,
954,
0,
0
],
[
3,
0,
0.2459,
0.3361,
0,
... | [
"import random as rd",
"import numpy as np, math",
"class PFilter:\n def __init__(self, motion_model, measurement_model, particles):\n self.motion_model = motion_model # class. Provides 'predict' method: 1Dx2 particle -> 1Dx2 particle\n self.measurement_model = measurement_model # class. Pro... |
import numpy as np, math
import cPickle as pkl
EXT = 10.0
DIVS = 200 # Keep this even
d = {}
d['xedges'] = np.linspace( -1 * EXT, EXT, DIVS )
d['yedges'] = np.linspace( -1 * EXT, EXT, DIVS )
d['stddev_model'] = np.ma.array(np.ones( (DIVS,DIVS) ))
d['detect_model'] = np.ma.array(np.ones( (DIVS,DIVS) ))
d['rssi_model'] = np.ma.array( np.ones( (DIVS,DIVS) ) * 55 )
t = d['rssi_model'][DIVS/2-1,DIVS/2-1:]
d['rssi_model'][DIVS/2-1,DIVS/2-1:] = np.array([-35.0 / (DIVS / 2) * i + 100
for i in xrange( len( t ))])
# Need to build a fake yaml config file for this fake beam model.
f = open( 'pencil_beam_MODEL.pkl', 'w' )
pkl.dump( d, f )
f.close()
| [
[
1,
0,
0.0385,
0.0385,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.0769,
0.0385,
0,
0.66,
0.0714,
279,
0,
1,
0,
0,
279,
0,
0
],
[
14,
0,
0.1538,
0.0385,
0,
... | [
"import numpy as np, math",
"import cPickle as pkl",
"EXT = 10.0",
"DIVS = 200 # Keep this even",
"d = {}",
"d['xedges'] = np.linspace( -1 * EXT, EXT, DIVS )",
"d['yedges'] = np.linspace( -1 * EXT, EXT, DIVS )",
"d['stddev_model'] = np.ma.array(np.ones( (DIVS,DIVS) ))",
"d['detect_model'] = np.ma.a... |
import roslib
roslib.load_manifest( 'costmap_services' )
import rospy
import numpy as np, math
import costmap_services.python_client as costmap
from display_particles import DisplayParticles
import cPickle as pkl
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--recalc', action='store_true', dest='recalc',
help='recalculate the costmap points?', default = False)
p.add_option('--fname', action='store', type='string', dest='fname',
help='pkl file to use', default = False)
p.add_option('--combine', action='store_true', dest='combine',
help='Combine results of multiple costmaps', default = False)
opt, args = p.parse_args()
rospy.init_node( 'tmp2243425' )
# fname = 'costmap_costs.pkl'
fname = opt.fname
if opt.combine:
# OK, this is just a hack. I need a way to combine a masked map with a previously captured costmap.
print 'YAY!'
f = open( 'gen_costmap_mask.pkl', 'r' )
mask = pkl.load( f )
f.close()
f = open( 'gen_costmap_aware_home.pkl', 'r' )
obs = pkl.load( f )
f.close()
# fname = 'pf_costmap.pkl'
ind_m = np.where( mask[:,2] < 127.0 )[0]
ind_o = np.where( obs[:,2] < 127.0 )[0]
ind = np.intersect1d( ind_m, ind_o ) # Locations that are good in both costmaps
p_set = np.copy( obs )
p_set[:,2] = np.zeros( obs.shape[0] )
p_set[:,2][ind] = True
f = open( fname, 'w' )
pkl.dump( p_set, f )
f.close()
if not opt.fname:
print 'fname required here on out.'
exit()
if opt.recalc:
res = 0.05
cs = costmap.CostmapServices()
X,Y = np.meshgrid( np.arange( -5, 8, res ), np.arange( -5, 8, res ))
xy = zip( X.flatten(), Y.flatten() )
print 'Computing Map Costs...'
mc = []
for i,xyi in enumerate( xy ):
if i % 100 == 0:
print 'Still working ( %d of %d -- %3.2f%%)' % (i, len(xy), 100.0*i/len(xy))
mc += [ cs.getMapCost( *xyi ) ]
print 'Done.'
p_set = np.column_stack([ np.array( X.flatten() ),
np.array( Y.flatten() ),
np.array( mc ) ])
f = open( fname, 'w' )
pkl.dump( p_set, f )
f.close()
dp = DisplayParticles()
f = open( fname, 'r' )
p_set = pkl.load( f )
f.close()
while not rospy.is_shutdown():
print 'Displaying particles'
dp.update( p_set )
rospy.sleep( 0.3 )
| [
[
1,
0,
0.0105,
0.0105,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0211,
0.0105,
0,
0.66,
0.1429,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0316,
0.0105,
0,
0.... | [
"import roslib",
"roslib.load_manifest( 'costmap_services' )",
"import rospy",
"import numpy as np, math",
"import costmap_services.python_client as costmap",
"from display_particles import DisplayParticles",
"import cPickle as pkl",
"if __name__ == '__main__':\n import optparse\n p = optparse.O... |
#!/usr/bin/python
import time
import roslib
roslib.load_manifest( 'rosgraph_msgs' )
roslib.load_manifest( 'rospy' )
import rospy
from rosgraph_msgs.msg import Clock
rospy.init_node( 'clock_pub' )
time.sleep( 0.2 )
pub = rospy.Publisher( '/clock', Clock )
while not rospy.is_shutdown():
pub.publish( Clock().clock.from_sec( time.time() ) )
time.sleep( 0.001 )
| [
[
1,
0,
0.1579,
0.0526,
0,
0.66,
0,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.2632,
0.0526,
0,
0.66,
0.1111,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.3158,
0.0526,
0,
... | [
"import time",
"import roslib",
"roslib.load_manifest( 'rosgraph_msgs' )",
"roslib.load_manifest( 'rospy' )",
"import rospy",
"from rosgraph_msgs.msg import Clock",
"rospy.init_node( 'clock_pub' )",
"time.sleep( 0.2 )",
"pub = rospy.Publisher( '/clock', Clock )",
"while not rospy.is_shutdown():\n ... |
__all__ = [
'pf_stats',
'pub_clock',
'stats_best_uniform'
]
| [
[
14,
0,
0.5,
0.8333,
0,
0.66,
0,
272,
0,
0,
0,
0,
0,
5,
0
]
] | [
"__all__ = [\n 'pf_stats',\n 'pub_clock',\n 'stats_best_uniform' \n]"
] |
#!/usr/bin/python
import rfid_model
import lib_pfilter
import roslib
roslib.load_manifest('rfid_behaviors')
roslib.load_manifest('rfid_datacapture')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('visualization_msgs')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('pfilter')
import rospy
import hrl_lib.transforms as tr
import tf.transformations as tft
import display_particles
from display_particles import DisplayParticles
from geometry_msgs.msg import Quaternion, Point
import rfid_datacapture.math_util as mu
import visualization_msgs.msg as vm
import hrl_lib.transforms as tr
import hrl_lib.viz as viz
vsm = viz.single_marker
import random as rd
import cPickle as pkl
import numpy as np, math
import pylab as pl
import time
import glob
# trial_num = 4
# obj_num = 3
# servo_yn = False
def pprint(r):
print '\nLocation %d, Object %d, Trial %d' % (r['loc'], r['obj_num'], r['trial_num'])
if r['servo_yn']:
print '\tSEARCH plus SERVO'
else:
print '\tSEARCH ONLY'
print '\tPos Reads: %d' % (r['pos_readings'])
print '\tTot Reads: %d' % (r['tot_readings'])
print '\tPercent Reads: %2.1f' % (r['pos_readings']*100.0/r['tot_readings'])
if r['best_pos'].__class__ == ''.__class__:
print '\tEstimate Loc: --'
else:
print '\tEstimate Loc: <%2.3f, %2.3f>' % ( r['best_pos'][0], r['best_pos'][1] )
if r['orient_est'].__class__ == ''.__class__:
print '\tEstimate Theta: --'
else:
print '\tEstimate Theta: %2.1f (deg)' % ( math.degrees( r['orient_est'] ))
if r['dxy'].__class__ == ''.__class__:
print '\tDist Err (m): --'
else:
print '\tDist Err (m): %2.3f' % ( r['dxy'] )
if r['dtheta'].__class__ == ''.__class__:
print '\tAng Err (deg): --'
else:
print '\tAng Err (deg): %2.1f' % ( math.degrees( r['dtheta'] ))
# if r.has_key('other'):
# print '\tOther params: ', r['other']
print '\n\n\n'
tdb = { 0: ['OrangeMedBot',[]],
1: ['TravisTVremo',[]],
2: ['RedBottle ',[]],
3: ['OnMetalKeys ',[]],
4: ['WhiteMedsBot',[]],
5: ['BlueMedsBox ',[]],
6: ['TeddyBearToy',[]],
7: ['CordlessPhon',[]],
8: ['BlueHairBrus',[]]}
pts = { 0: ['BehindTree',[3.757, 6.017, 0.036]],
1: ['FireplaceMantle',[5.090, 4.238, 1.514]],
2: ['CircleEndTable',[5.399, 2.857, 0.607]],
3: ['Couch',[3.944, 1.425, 0.527]],
4: ['RectEndTable',[3.302, 0.932, 0.534]],
5: ['BehindKitchenTable',[-0.339, -2.393, 0.793]],
6: ['NearDishwaser',[-1.926, -0.835, 0.946]],
7: ['InCupboard',[-3.257, 1.294, 1.397]],
8: ['OnFilingCabinet',[-0.083, 2.332, 0.670]]}
def process_trialobj( trial_num, obj_num, servo_yn ):
obj_name = tdb[obj_num][0]
tname = obj_name.replace( ' ', '' )
loc = (trial_num + obj_num) % 9
loc_name = pts[loc][0]
loc_pos = np.array(pts[loc][1]) # Tag ground-truth location
fname_prefix = '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/search_cap/'
servo_fname = fname_prefix
servo_fname += 'search_aware_home/'
servo_fname += 'woot_150_'+str(trial_num)+'_tag_'+obj_name.replace(' ','')+'_servo.pkl'
pf_search = servo_fname.replace('_servo.pkl', '_pf_search.pkl')
pf_servo = servo_fname.replace('_servo.pkl', '_pf_servo.pkl')
# Search only
search_reads_fname = fname_prefix
search_reads_fname += 'search_aware_home/woot_150_'+str(trial_num)+'_reads.pkl'
f = open( search_reads_fname, 'r' )
summary_search = pkl.load( f )
f.close()
pos_readings_search = sum([ True for p in summary_search if p.read.rssi != -1 and p.read.tagID == obj_name ])
tot_readings_search = len( summary_search )
if pos_readings_search == 0: # No results!
print '\t No results for this instance.'
res = { 'loc': loc,
'obj_num': obj_num,
'trial_num': trial_num,
'pos_readings': pos_readings_search,
'tot_readings':tot_readings_search,
'best_pos': '--',
'orient_est': '--',
'dxy': '--',
'dtheta': '--',
'servo_yn': servo_yn,
'other': { 'w_mass': 0.0, # specific to pf
'orient_fit': 0.0 }}
return False, res, None
f = open( pf_search ) # load the particle set from Search data. Will overwrite if using search plus servo
p_set_loaded = pkl.load( f )
f.close()
pos_readings = pos_readings_search
tot_readings = tot_readings_search
# If this is Search PLUS servo...
if servo_yn:
# update the stats for pos reads
f = open( servo_fname, 'r' )
summary_servo = pkl.load( f )
f.close()
pos_readings_servo = sum([ True for p in summary_servo if p.read.rssi != -1 and p.read.tagID == obj_name ])
tot_readings_servo = len( summary_servo )
pos_readings = pos_readings_search + pos_readings_servo
tot_readings = tot_readings_search + tot_readings_servo
# use the particle set from the SERVO data
f = open( pf_servo )
p_set_loaded = pkl.load( f )
f.close()
# print '\t Positive Reads: %d of %d (%2.1f)' % ( pos_readings,
# tot_readings,
# 100.0 * pos_readings / tot_readings )
# maxw = np.max( p_set_loaded[:,2] )
# minw = np.min( p_set_loaded[:,2] )
# w_normed = 1.0 * ( p_set_loaded[:,2] - minw ) / ( maxw - minw )
# # Only keep particles in the top 98% of likelihoods (250 / 255). This
# # makes the selection only consider locations where the probability
# # mass is actually located. (These are the ones displayed in the
# # screen captures!)
# p_set = p_set_loaded[ np.where( w_normed > 0.02 )[0] ]
# print 'Shape: ', p_set.shape
# # print 'p_set size (pre-cap): ', p_set.shape
# # p_set_precap = np.copy( p_set )
# # p_set = p_set[np.argsort(p_set[:,2])][:1000] # Cap the total number: keep at most the top 1000
# # print 'p_set size (pre-cap): ', p_set.shape
p_set = np.copy( p_set_loaded )
p_set = p_set[ np.argsort( p_set[:,2] )[::-1] ] # sort by decreasing weight
w_norm = p_set[:,2] / np.sum( p_set[:,2] )
w_cum = np.cumsum( w_norm )
# ONLY KEEP top 8000 particles (computation)
w_mass = w_cum[:8000][-1] * 100.0 # Ratio of mass in p_set to total:
p_set = p_set[:8000] # only keep top 8000 for computational reasons!
# # ONLY KEEP particles that are in top 98% of normalized values. (these are the ones displayed)
# maxw = np.max( p_set[:,2] )
# minw = np.min( p_set[:,2] )
# w_scaled = 1.0 * ( p_set[:,2] - minw ) / ( maxw - minw )
# p_set = p_set[ np.where( w_scaled > 0.02 )[0] ]
# p_set = p_set[:8000] # Only keep top 8000 max
# w_mass = w_cum[ p_set.shape[0] ]
# print 'p_set size (pre-cap): ', p_set.shape
# print w_mass
# print '\tShape: ', p_set.shape
pf_costmap = '/home/travis/svn/robot1/src/projects/rfid_pf/src/rfid_pf/pf_costmap.pkl'
f = open( pf_costmap )
costmap = pkl.load( f )
f.close()
cm = costmap[ np.where( costmap[:,2] )[0] ] # Locations where the robot can be located
# Determine the score for each possible xy robot location
def score_loc( xy ):
# The xy location under consideration for the robot
mag = np.sqrt( np.sum( np.power( p_set[:,0:2] - xy, 2.0 ), axis = 1)) # || dist ||
score = mag * p_set[:,2] # || dist || * w's
return np.sum( score )
# Compute all the scores for possible robot locations
t0 = time.time()
pos_scores = [ score_loc( i[0:2] ) for i in cm ]
dt = time.time() - t0
# print '\tScore computations per second: ', len( pos_scores ) * 1.0 / dt
best_ind = np.argmin( pos_scores )
best_pos = cm[ best_ind ][0:2]
# # Calculate the angle that is the mean
# # Now that we have best_pos, we need to find the best orientation.
# def score_orient( xyw, best_pos ): # returns 1x2
# # xyw is 1x3
# dxy = xyw[0:2] - best_pos # move into best_pose frame (1x2)
# dxy_unit = dxy / np.linalg.norm( dxy ) # normalize to unit circle
# return dxy_unit * xyw[2] # 1x2; [x_circ => x / |dxy| * w, y_circ...]
# so = np.array([ score_orient( i, best_pos ) for i in p_set ])
# so[ np.where(np.isnan( so )) ] = 0.0 # for positions where the particle is one and the same, the norm is 0.0 so score is nan.
# x_so = np.sum( so[:,0] )
# y_so = np.sum( so[:,1] )
# orient_est = np.arctan2( y_so, x_so )
# orient_fit = np.sqrt( x_so**2.0 + y_so**2.0 ) / ( np.sum( p_set[:,2] ))
# Brute force calculate the angle that yields the minimum |dtheta|
theta_hats = np.linspace( -1.0 * np.pi, np.pi, 360, endpoint = False )
theta_hats = np.array([ mu.standard_rad( i ) for i in theta_hats ])
dxy = p_set[:,0:2] - best_pos # put the p_set into the best_pos frame!
pset_thetas = np.arctan2( dxy[:,1], dxy[:,0] )
pset_thetas = np.array([ mu.standard_rad( i ) for i in pset_thetas ])
pset_w_normed = p_set[:,2] / np.sum( p_set[:,2] )
def exp_err( th ):
errs = np.abs([ mu.standard_rad( i ) for i in th - pset_thetas ])
weighted_errs = pset_w_normed * errs
mean_we = np.mean( weighted_errs )
return mean_we
theta_hats_res = np.array([ exp_err( i ) for i in theta_hats ])
# rrr = theta_hats
# res = theta_hats_res
orient_est_ind = np.argmin( theta_hats_res )
orient_est = theta_hats[ orient_est_ind ]
# Compute errors:
dxy = np.linalg.norm( best_pos - loc_pos[0:2] )
true_theta = np.arctan2( loc_pos[1] - best_pos[1], # y / x
loc_pos[0] - best_pos[0] )
dtheta = mu.standard_rad( orient_est - true_theta )
res = { 'loc': loc,
'obj_num': obj_num,
'trial_num': trial_num,
'pos_readings': pos_readings,
'tot_readings':tot_readings,
'best_pos': best_pos,
'orient_est': orient_est,
'dxy': dxy,
'dtheta': dtheta,
'servo_yn': servo_yn,
'other': { 'w_mass': w_mass, # specific to pf
'theta_hats': theta_hats,
'theta_hats_res': theta_hats_res,
'p_set': p_set }}
return True, res, np.copy( p_set ) # Was_reads?, results dict, particles (for display)
def MAIN_PROCESS( trial_num, obj_num, servo_yn, screen_cap = False ):
print 'In MP: ', trial_num, obj_num, servo_yn, screen_cap
# best_pos, orient_est, p_set = process_trialobj( 4, 3, True )
was_reads, res, p_set = process_trialobj( trial_num, obj_num, servo_yn )
print 'RESULTS'
pprint( res )
# save results:
if screen_cap:
f = open( 'Obj%d_Trial%d_Servo%d_pf_results.pkl' % (obj_num, trial_num, int( servo_yn )), 'w')
pkl.dump( res, f )
f.close()
# Make screen capture.
if not was_reads: # Skip step if no reads.
return
best_pos = res[ 'best_pos' ]
orient_est = res[ 'orient_est' ]
pub_mark = rospy.Publisher( '/tag_poses', vm.Marker )
if servo_yn:
c_tm = [0./255, 205./255, 255./255, 1.0] # rgba
else:
c_tm = [255./255, 123./255, 1./255, 1.0] # rgba
tm = vsm( np.matrix([ best_pos[0],best_pos[1], 0.0 ]).T,
np.matrix(tft.quaternion_from_euler( 0.0, 0.0, orient_est )).T,
'arrow', '/map',
scale = [0.5, 1.0, 1.0],
color = c_tm,
duration = 50.0,
m_id = 2*p_set.shape[0] + 1 )
def pub_tm( ):
tm.header.stamp = rospy.Time.now()
for i in xrange( 10 ):
pub_mark.publish( tm )
rospy.sleep( 0.3 )
print 'Click on RVIZ!'
time.sleep( 3 )
pub_tm()
pub_tm()
display_particles.display_trialobj( trial_num, obj_num, servo_yn, screen_cap = screen_cap )
print 'Done.\n\n\n'
return
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--trial', action='store', type='int', dest='trial',
help='trial number (0-8)')
p.add_option('--obj', action='store', type='int', dest='obj',
help='object number (0-8)')
p.add_option('--servo', action='store_true', dest='servo',
help='Use combined search and servo?', default = False)
opt, args = p.parse_args()
obj_num = opt.obj
trial_num = opt.trial
servo_yn = opt.servo
rospy.init_node( 'goober' )
if trial_num < 9:
while not rospy.is_shutdown():
print 'Publishing.'
MAIN_PROCESS( trial_num, obj_num, servo_yn )
else:
# for i in range( 0, 1 ): # trial
# for j in range( 4, 5 ): # obj
for i in range( 0, 9 ): # trial
for j in range( 0, 9 ): # obj
for s in [False, True]: # servo_yn
MAIN_PROCESS( i, j, s, screen_cap = True )
| [
[
1,
0,
0.0075,
0.0025,
0,
0.66,
0,
973,
0,
1,
0,
0,
973,
0,
0
],
[
1,
0,
0.01,
0.0025,
0,
0.66,
0.0323,
170,
0,
1,
0,
0,
170,
0,
0
],
[
1,
0,
0.0149,
0.0025,
0,
0.... | [
"import rfid_model",
"import lib_pfilter",
"import roslib",
"roslib.load_manifest('rfid_behaviors')",
"roslib.load_manifest('rfid_datacapture')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('visualization_msgs')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('pfilter')",
"... |
#!/usr/bin/python
import rfid_model
import lib_pfilter
import roslib
roslib.load_manifest('visualization_msgs')
roslib.load_manifest('hrl_lib')
roslib.load_manifest('pfilter')
import rospy
import visualization_msgs.msg as vm
import hrl_lib.transforms as tr
import hrl_lib.viz as viz
vsm = viz.single_marker
import pfilter.pfilter as pfilter
import random as rd
import numpy as np, math
import pylab as pl
import time
import cPickle as pkl
import os
import glob
class DisplayParticles:
def __init__( self, pub_topic = '/particles' ):
self.m = None
self.pub_mark = rospy.Publisher( pub_topic, vm.Marker )
self.mid = 0
try:
rospy.init_node('display_particles')
except:
rospy.logout( 'DisplayParticles: Node already initialized' )
pass
def create_mark( self, p, c = [1.0, 0.0, 0.0, 0.8], mid = None ):
if mid == None:
self.mid += 1
mid = self.mid
m = vsm( np.matrix([ p[0], p[1], 0.0 ]).T,
np.matrix([ 0.0, 0.0, 0.0, 1.0 ]).T,
'sphere', '/map',
scale = [0.025, 0.025, 0.025],
color = [1.0, 0.0, 0.0, 0.3], # rgba,
duration = 10.0,
m_id = mid )
m.header.stamp = rospy.Time.now()
return m
def update( self, particles ):
xyz = np.column_stack([ particles[:,0:2], np.zeros( particles.shape[0] )]).T
w = particles[:,2] # becomes 1D
# print w
wmin = np.min( w )
wmax = np.max( w )
# import pdb
# pdb.set_trace()
if wmin == wmax:
colors = np.row_stack([ np.ones( particles.shape[0] ),
np.zeros( particles.shape[0] ),
np.zeros( particles.shape[0] ),
np.ones( particles.shape[0] ) ])
else:
colors = np.array([ pl.cm.jet( int( 1.0 * ( wi - wmin ) / (wmax - wmin) * 255.0 ))
for wi in w ]).T
m = viz.list_marker( xyz, colors, [0.025, 0.025, 0.025], 'points', '/map', 300 )
m.header.stamp = rospy.Time.now()
for i in xrange( 10 ):
self.pub_mark.publish( m )
rospy.sleep( 0.2 )
return
def update2( self, particles ):
xyz = np.column_stack([ particles[:,0:2], np.zeros( particles.shape[0] )]).T
w = particles[:,2] # becomes 1D
# print w
wmin = np.min( w )
wmax = np.max( w )
# import pdb
# pdb.set_trace()
if wmin == wmax:
colors = np.row_stack([ np.ones( particles.shape[0] ),
np.zeros( particles.shape[0] ),
np.zeros( particles.shape[0] ),
np.ones( particles.shape[0] ) ])
iv = np.ones( len( w )).tolist()
else:
iv = [ int( 1.0 * ( wi - wmin ) / (wmax - wmin) * 255.0 ) for wi in w ]
colors = np.array([ pl.cm.jet( ivi ) for ivi in iv ]).T
# colors[3] *= 0.3
print np.array(iv)
ind = np.where( np.array(iv) > 5 )[0]
aind = np.argsort( w[ind] ) # sort them so that some come to top.
m = viz.list_marker( xyz[:,ind][:,aind], colors[:,ind][:,aind], [0.05, 0.05, 0.025], 'points', '/map', 30 )
m.header.stamp = rospy.Time.now()
for i in xrange( 10 ):
self.pub_mark.publish( m )
rospy.sleep( 0.2 )
return
def display_trialobj( trial_num, obj_num, servo_yn, screen_cap = False ):
try:
rospy.init_node( 'ros_pf' )
except:
print 'display_trialobj: node already initialized'
pass
tdb = { 0: ['OrangeMedBot',[]],
1: ['TravisTVremo',[]],
2: ['RedBottle ',[]],
3: ['OnMetalKeys ',[]],
4: ['WhiteMedsBot',[]],
5: ['BlueMedsBox ',[]],
6: ['TeddyBearToy',[]],
7: ['CordlessPhon',[]],
8: ['BlueHairBrus',[]]}
pts = { 0: ['BehindTree',[3.757, 6.017, 0.036]],
1: ['FireplaceMantle',[5.090, 4.238, 1.514]],
2: ['CircleEndTable',[5.399, 2.857, 0.607]],
3: ['Couch',[3.944, 1.425, 0.527]],
4: ['RectEndTable',[3.302, 0.932, 0.534]],
5: ['BehindKitchenTable',[-0.339, -2.393, 0.793]],
6: ['NearDishwaser',[-1.926, -0.835, 0.946]],
7: ['InCupboard',[-3.257, 1.294, 1.397]],
8: ['OnFilingCabinet',[-0.083, 2.332, 0.670]]}
obj_name = tdb[obj_num][0]
tname = obj_name.replace( ' ', '' )
loc = (trial_num + obj_num) % 9
loc_name = pts[loc][0]
loc_pos = np.array(pts[loc][1]) # Tag ground-truth location
fname_prefix = '/home/travis/svn/robot1/src/projects/rfid_datacapture/src/rfid_datacapture/search_cap/'
servo_fname = fname_prefix
servo_fname += 'search_aware_home/'
servo_fname += 'woot_150_'+str(trial_num)+'_tag_'+obj_name.replace(' ','')+'_servo.pkl'
pf_search = servo_fname.replace('_servo.pkl', '_pf_search.pkl')
pf_servo = servo_fname.replace('_servo.pkl', '_pf_servo.pkl')
glob_r = glob.glob( servo_fname )
if glob_r == []:
print '\t No results for this instance.\n\n'
return
if len(glob_r) > 1:
print '\t Multiple results...?! Weirdness. Skipping.'
return
if servo_yn:
f = open( pf_servo, 'r' )
p_set = pkl.load( f )
f.close()
else:
f = open( pf_search, 'r' )
p_set = pkl.load( f )
f.close()
dp = DisplayParticles()
pub_mark = rospy.Publisher( '/tag_poses', vm.Marker )
tm = vsm( np.matrix([ pts[loc][1][0],
pts[loc][1][1],
pts[loc][1][2] ]).T,
np.matrix([ [0.0], [0.0], [0.0], [1.0] ]),
'sphere', '/map',
color = [0.0, 1.0, 0.0, 1.0], # rgba
duration = 150.0,
m_id = 2*p_set.shape[0] + 1 )
def pub_tm():
tm.header.stamp = rospy.Time.now()
for i in xrange( 10 ):
pub_mark.publish( tm )
rospy.sleep( 0.3 )
def update_display():
print 'UPDATING DISPLAY... '
pub_tm()
pub_tm()
dp.update2( p_set )
rospy.sleep( 0.3 )
print 'DONE.'
print 'UPDATING'
update_display()
rospy.sleep( 1.0 )
if screen_cap:
os.system( 'scrot -d 2 -u Obj%d_Trial%d_Servo%d_pf_results.png' % ( obj_num, trial_num, int(servo_yn) ))
rospy.sleep( 2.0 )
print 'DERP'
return
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('--trial', action='store', type='int', dest='trial',
help='trial number (0-8)')
p.add_option('--obj', action='store', type='int', dest='obj',
help='object number (0-8)')
p.add_option('--servo', action='store_true', dest='servo',
help='Use combined search and servo?', default = False)
opt, args = p.parse_args()
obj_num = opt.obj
trial_num = opt.trial
servo_yn = opt.servo
if trial_num < 9:
while not rospy.is_shutdown():
display_trialobj( trial_num, obj_num, servo_yn )
else:
print 'Click on RVIZ!'
time.sleep( 3 )
for trial_num in range( 0, 9 ):
for obj_num in range( 0, 9 ):
for servo_yn in [False, True]:
print 'Calling display trialobj: ', trial_num, obj_num, servo_yn
display_trialobj( trial_num, obj_num, servo_yn, screen_cap = True )
print 'Done.\n\n\n'
| [
[
1,
0,
0.0121,
0.004,
0,
0.66,
0,
973,
0,
1,
0,
0,
973,
0,
0
],
[
1,
0,
0.0162,
0.004,
0,
0.66,
0.0476,
170,
0,
1,
0,
0,
170,
0,
0
],
[
1,
0,
0.0243,
0.004,
0,
0.6... | [
"import rfid_model",
"import lib_pfilter",
"import roslib",
"roslib.load_manifest('visualization_msgs')",
"roslib.load_manifest('hrl_lib')",
"roslib.load_manifest('pfilter')",
"import rospy",
"import visualization_msgs.msg as vm",
"import hrl_lib.transforms as tr",
"import hrl_lib.viz as viz",
"... |
#!/usr/bin/python
import roslib
roslib.load_manifest('rfid_hardware')
import rospy
from std_msgs.msg import Float64
import tf
import time
import numpy as np, math
import functools
class tf_updater():
def __init__(self, name,
right_pan = '/robotis/servo_right_pan',
right_tilt = '/robotis/servo_right_tilt',
left_pan = '/robotis/servo_left_pan',
left_tilt = '/robotis/servo_left_tilt'):
try:
rospy.init_node( name )
except:
pass
# Right pan
rospy.Subscriber( right_pan, Float64,
functools.partial( self.rpan_cb, bc = tf.TransformBroadcaster() ))
# Right tilt
rospy.Subscriber( right_tilt, Float64,
functools.partial( self.rtilt_cb, bc = tf.TransformBroadcaster() ))
# Left pan
rospy.Subscriber( left_pan, Float64,
functools.partial( self.lpan_cb, bc = tf.TransformBroadcaster() ))
# Left tilt
rospy.Subscriber( left_tilt, Float64,
functools.partial( self.ltilt_cb, bc = tf.TransformBroadcaster() ))
def rpan_cb( self, ang_msg, bc ):
# bc is a specific TransformBroadcaster
bc.sendTransform( (-0.0655, -0.0510, 0.0675),
tf.transformations.quaternion_from_euler( 0.0, 0.0, -1.0 * ang_msg.data - math.radians( 60.0 ) ),
rospy.Time.now(),
'ear_pan_right',
'plate_right_base' )
def rtilt_cb( self, ang_msg, bc ):
# bc is a specific TransformBroadcaster
bc.sendTransform( (0.0673, 0.0, 0.0),
tf.transformations.quaternion_from_euler( 0.0, ang_msg.data, 0.0 ),
rospy.Time.now(),
'ear_tilt_right',
'ear_pan_right' )
def lpan_cb( self, ang_msg, bc ):
# bc is a specific TransformBroadcaster
bc.sendTransform( (-0.0655, +0.0510, 0.0675),
tf.transformations.quaternion_from_euler( 0.0, 0.0, -1.0 * ang_msg.data + math.radians( 60.0 )),
rospy.Time.now(),
'ear_pan_left',
'plate_left_base' )
def ltilt_cb( self, ang_msg, bc ):
# bc is a specific TransformBroadcaster
bc.sendTransform( (0.0673, 0.0, 0.0),
tf.transformations.quaternion_from_euler( 0.0, -1.0 * ang_msg.data, 0.0 ),
rospy.Time.now(),
'ear_tilt_left',
'ear_pan_left' )
if __name__ == '__main__':
tfs = tf_updater('servo_tf_updater')
rospy.spin()
| [
[
1,
0,
0.0366,
0.0122,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0488,
0.0122,
0,
0.66,
0.1111,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.061,
0.0122,
0,
0.6... | [
"import roslib",
"roslib.load_manifest('rfid_hardware')",
"import rospy",
"from std_msgs.msg import Float64",
"import tf",
"import time",
"import numpy as np, math",
"import functools",
"class tf_updater():\n def __init__(self, name, \n right_pan = '/robotis/servo_right_pan',\n ... |
#!/usr/bin/python
import roslib
roslib.load_manifest( 'rfid_hardware' )
import rospy
import robotis.ros_robotis as rr
import time
import math
if __name__ == '__main__':
p_left = rr.ROS_Robotis_Client( 'left_pan' )
t_left = rr.ROS_Robotis_Client( 'left_tilt' )
p_left.move_angle( 1.370, math.radians(10), blocking = False )
t_left.move_angle( 0.0, math.radians(10), blocking = False )
p_right = rr.ROS_Robotis_Client( 'right_pan' )
t_right = rr.ROS_Robotis_Client( 'right_tilt' )
p_right.move_angle( -1.370, math.radians(10), blocking = False )
t_right.move_angle( 0.0, math.radians(10), blocking = False )
| [
[
1,
0,
0.125,
0.0417,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.1667,
0.0417,
0,
0.66,
0.1667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.2083,
0.0417,
0,
0.6... | [
"import roslib",
"roslib.load_manifest( 'rfid_hardware' )",
"import rospy",
"import robotis.ros_robotis as rr",
"import time",
"import math",
"if __name__ == '__main__':\n p_left = rr.ROS_Robotis_Client( 'left_pan' )\n t_left = rr.ROS_Robotis_Client( 'left_tilt' )\n \n p_left.move_angle( 1.3... |
#!/usr/bin/python
# Merges functionality from ros_M5e in hrl_rfid package.
# if opt.device == 'ears':
# print 'Starting Ears RFID Services'
# ros_rfid = ROS_M5e( name = 'ears', readPwr = opt.power,
# portStr = '/dev/robot/RFIDreader',
# antFuncs = [EleLeftEar, EleRightEar],
# callbacks = [] )
# rospy.spin()
# ros_rfid.stop()
import roslib
roslib.load_manifest( 'rfid_hardware' )
import rospy
import time
from threading import Thread
# The Servers need to distinguish b/w real vs. simulated!
# They implement same services / functions. (Clients unaffected)
import os
if os.environ.has_key('ROBOT') and os.environ['ROBOT'] == 'sim':
roslib.load_manifest( 'rfid_people_following' ) # this code should really be moved to "RFID Sim" or some such thing, but I'm in a hurry.
import rfid_people_following.robotis_servo_sim as rr
import rfid_people_following.M5e_reader_sim as rM5e
else:
import robotis.ros_robotis as rr
import hrl_rfid.ros_M5e as rM5e
if __name__ == '__main__':
# p_right = rr.ROS_Robotis_Poller( '/dev/robot/servo1', [29,30], ['right_pan', 'right_tilt'] )
# p_left = rr.ROS_Robotis_Poller( '/dev/robot/servo0', [27,28], ['left_pan', 'left_tilt'] )
p_left = rr.ROS_Robotis_Poller( '/dev/robot/servo1', [29,30], ['left_pan', 'left_tilt'] )
p_right = rr.ROS_Robotis_Poller( '/dev/robot/servo0', [27,28], ['right_pan', 'right_tilt'] )
# Hack to prevent the right servo from shaking.
p_right.servos[0].write_address( 27, [3] ) # change the right pan compliance region
p_right.servos[0].write_address( 26, [3] ) # change the right pan compliance region
# p_right = rr.ROS_Robotis_Poller( '/dev/robot/servo0', [29,30], ['right_pan', 'right_tilt'] )
# p_left = rr.ROS_Robotis_Poller( '/dev/robot/servo1', [27,28], ['left_pan', 'left_tilt'] )
ros_rfid = rM5e.ROS_M5e( name = 'ears', readPwr = 3000,
portStr = '/dev/robot/RFIDreader',
antFuncs = [ rM5e.EleLeftEar, rM5e.EleRightEar ],
callbacks = [] )
rospy.spin()
ros_rfid.stop()
p_right.stop()
p_left.stop()
| [
[
1,
0,
0.2642,
0.0189,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.283,
0.0189,
0,
0.66,
0.1429,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.3019,
0.0189,
0,
0.6... | [
"import roslib",
"roslib.load_manifest( 'rfid_hardware' )",
"import rospy",
"import time",
"from threading import Thread",
"import os",
"if os.environ.has_key('ROBOT') and os.environ['ROBOT'] == 'sim':\n roslib.load_manifest( 'rfid_people_following' ) # this code should really be moved to \"RFID Sim... |
#!/usr/bin/python
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Travis Deyle (Healthcare Robotics Lab, Georgia Tech.)
# ROS imports
import roslib; roslib.load_manifest('hrl_rfid')
import rospy
from hrl_rfid.msg import RFIDread
from hrl_rfid.msg import RFIDreadArr
from hrl_rfid.srv import RfidSrv
import hrl_rfid.lib_M5e as M5e
import time
from threading import Thread
# Modeled off lib_M5e.M5e_Poller
class ROS_M5e( Thread ):
QUERY_MODE = 'query'
TRACK_MODE = 'track'
def __init__(self, name = 'reader1', readPwr = 2300,
portStr = '/dev/robot/RFIDreader',
antFuncs = [], callbacks = []):
Thread.__init__(self)
self.should_run = True
try:
rospy.init_node( 'rfid_m5e_' + name )
except rospy.ROSException:
pass
self.mode = ''
self.name = name + '_reader'
rospy.logout( 'ROS_M5e: Launching RFID Reader' )
rospy.logout( 'ROS_M5e: Please check out our related work @ http://www.hsi.gatech.edu/hrl/project_rfid.shtml' )
rospy.logout( 'ROS_M5e: '+self.name+' Building & Connecting to reader' )
def prin( x ): rospy.logout( 'ROS_M5e: lib_M5e: ' + x ) # use rospy.logout in underlying lib's output
self.reader = M5e.M5e(readPwr=readPwr, portSTR = portStr, verbosity_func = prin)
self.antFuncs = antFuncs
self.callbacks = callbacks + [self.broadcast]
rospy.logout( 'ROS_M5e: publishing RFID reader with type RFIDread to channel /rfid/'+name+'_reader' )
self.channel = rospy.Publisher('/rfid/'+name+'_reader', RFIDread)
self.pub_arr = rospy.Publisher('/rfid/'+name+'_reader_arr', RFIDreadArr)
self._mode_service_obj = rospy.Service('/rfid/'+name+'_mode',
RfidSrv, self._mode_service)
rospy.logout( 'ROS_M5e: '+self.name+' Inialized and awaiting instructions' )
self.start() # Thread: calls self.run()
def run( self ):
while self.should_run and not rospy.is_shutdown():
if self.mode == self.QUERY_MODE:
for aF in self.antFuncs:
antennaName = aF(self.reader) # let current antFunc make appropriate changes
results = self.reader.QueryEnvironment()
if len(results) == 0:
results = [[ '', -1 ]] # [[ tagid, rssi ], ...]
#datum = [antennaName, '', -1]
#[cF(datum) for cF in self.callbacks]
arr = []
t_now = rospy.Time.now()
for tagid, rssi in results:
rv = RFIDread( None, antennaName, tagid, rssi )
rv.header.stamp = t_now
arr.append( rv )
datum = [antennaName, tagid, rssi]
[cF(datum) for cF in self.callbacks]
rfid_arr = RFIDreadArr()
rfid_arr.header.stamp = t_now
rfid_arr.arr = arr
self.pub_arr.publish( rfid_arr )
elif self.mode == self.TRACK_MODE:
for aF in self.antFuncs:
antennaName = aF(self.reader) # let current antFunc make appropriate changes
tagid = self.tag_to_track
rssi = self.reader.TrackSingleTag(tagid, timeout=50)
t_now = rospy.Time.now()
rv = RFIDread( None, antennaName, tagid, rssi )
rv.header.stamp = t_now
rfid_arr = RFIDreadArr()
rfid_arr.header.stamp = t_now
rfid_arr.arr = [rv]
self.pub_arr.publish( rfid_arr )
#if rssi != -1:
datum = [antennaName, tagid, rssi]
[cF(datum) for cF in self.callbacks]
else:
time.sleep(0.005)
rospy.logout( 'ROS_M5e: '+self.name+' Shutting down reader' )
def stop( self ):
self.should_run = False
self.join(3)
if (self.isAlive()):
raise RuntimeError("ROS_M5e: unable to stop thread")
def broadcast(self, data):
antName, tagid, rssi = data
rv = RFIDread( None, antName, tagid, rssi )
rv.header.stamp = rospy.Time.now()
self.channel.publish( rv )
# For internal use only
def _mode_service(self, data):
val = data.data
if len(val) == 0:
rospy.logout( 'ROS_M5e: Mode Service called with invalid argument: ' + str(val) )
elif len(val) == 1:
if val[0] == self.QUERY_MODE:
rospy.logout( 'ROS_M5e: '+self.name+' Entering Query Mode' )
self.mode = self.QUERY_MODE
else:
rospy.logout( 'ROS_M5e: '+self.name+' Stopping Reader' )
self.mode = ''
elif len(val) == 2:
if val[0] == self.TRACK_MODE and len(val[1]) == 12:
rospy.logout( 'ROS_M5e: '+self.name+' Entering Track Mode: ' + str(val[1]) )
self.mode = self.TRACK_MODE
self.tag_to_track = val[1]
else:
rospy.logout( 'ROS_M5e: Mode Service called with invalid argument: ' + str(val) )
else:
rospy.logout( 'ROS_M5e: Mode Service called with invalid argument: ' + str(val) )
return True
# -----------------------------------------------
# Likely Callbacks: (various antennas)
# -----------------------------------------------
def EleLeftEar(M5e):
M5e.ChangeAntennaPorts(1,1)
time.sleep(0.010)
return 'EleLeftEar'
def EleRightEar(M5e):
M5e.ChangeAntennaPorts(2,2)
time.sleep(0.010)
return 'EleRightEar'
def Hand_Right_1(M5e):
# GPIO1 = 1, GPIO2 = 0
M5e.TransmitCommand('\x02\x96\x01\x01')
M5e.ReceiveResponse()
M5e.TransmitCommand('\x02\x96\x02\x00')
M5e.ReceiveResponse()
return 'Hand_Right_1'
def Hand_Right_2(M5e):
# GPIO1 = 1, GPIO2 = 1
M5e.TransmitCommand('\x02\x96\x01\x01')
M5e.ReceiveResponse()
M5e.TransmitCommand('\x02\x96\x02\x01')
M5e.ReceiveResponse()
return 'Hand_Right_2'
def Hand_Left_1(M5e):
# GPIO1 = 0, GPIO2 = 0
M5e.TransmitCommand('\x02\x96\x01\x00')
M5e.ReceiveResponse()
M5e.TransmitCommand('\x02\x96\x02\x00')
M5e.ReceiveResponse()
return 'Hand_Left_1'
def Hand_Left_2(M5e):
# GPIO1 = 0, GPIO2 = 1
M5e.TransmitCommand('\x02\x96\x01\x00')
M5e.ReceiveResponse()
M5e.TransmitCommand('\x02\x96\x02\x01')
M5e.ReceiveResponse()
return 'Hand_Left_2'
def PrintDatum(data):
ant, ids, rssi = data
print data
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('-d', action='store', type='string', dest='device',
help='Which RFID device to initialize.')
p.add_option('-p', action='store', type='int', dest='power', default=3000,
help='Which RFID device to initialize.')
opt, args = p.parse_args()
if opt.device == 'ears':
print 'Starting Ears RFID Services'
ros_rfid = ROS_M5e( name = 'ears', readPwr = opt.power,
portStr = '/dev/robot/RFIDreader',
antFuncs = [EleLeftEar, EleRightEar],
callbacks = [] )
rospy.spin()
ros_rfid.stop()
if opt.device == 'inhand':
print 'Starting Inhand RFID Services'
ros_rfid = ROS_M5e( name = 'inhand', readPwr = opt.power,
portStr = '/dev/robot/inHandReader',
antFuncs = [Hand_Right_1, Hand_Right_2,
Hand_Left_1, Hand_Left_2 ],
callbacks = [] )
rospy.spin()
ros_rfid.stop()
if opt.device == 'head':
print 'Starting PR2 Head RFID Services'
def PR2_Head(M5e):
M5e.ChangeAntennaPorts(1,1)
time.sleep(0.010)
return 'PR2_Head'
ros_rfid = ROS_M5e( name = 'head', readPwr = opt.power,
portStr = '/dev/robot/RFIDreader',
antFuncs = [PR2_Head],
callbacks = [] )
rospy.spin()
ros_rfid.stop()
| [
[
1,
0,
0.1318,
0.0039,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.1318,
0.0039,
0,
0.66,
0.0588,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1357,
0.0039,
0,
0.... | [
"import roslib; roslib.load_manifest('hrl_rfid')",
"import roslib; roslib.load_manifest('hrl_rfid')",
"import rospy",
"from hrl_rfid.msg import RFIDread",
"from hrl_rfid.msg import RFIDreadArr",
"from hrl_rfid.srv import RfidSrv",
"import hrl_rfid.lib_M5e as M5e",
"import time",
"from threading impo... |
#!/usr/bin/python
import roslib; roslib.load_manifest('hrl_lib'); import rospy
# from hrl_lib.msg import String
# from hrl_lib.msg import RFID_Interface
import hrl_lib.util as ut
import hrl_lib.rutils as ru
import time
import pygame
import pygame.display
import pygame.locals
import pygame.transform
import numpy as np, math
class ROS_UI_Robot():
def __init__(self, init=True):
if init:
try:
print 'Initializing RFID UI on Robot'
rospy.init_node('RFID_UI_Robot',anonymous=True)
except rospy.ROSException:
pass
self.tag_pub = rospy.Publisher('/hrl/ele/UI_Robot',RFID_Interface)
print 'RFID UI on Robot Running'
print 'Connecting to RFID UI on PC'
self.selected_tag = ''
self.selection = []
rospy.Subscriber('/hrl/ele/UI_PC_Selection', RFID_Interface, self.selection_cb, None, 1)
# def publish_tag_ids(self, tagIDs):
# ''' tagIDs is a list of strings '''
# for tag in tagIDs:
# rt = String()
# rt.data = tag
# self.tag_pub.publish( rt )
# time.sleep(0.2)
# rt.data = '-1'
# self.tag_pub.publish( rt )
# time.sleep(0.002)
def publish_tag_ids(self, tagIDs):
pub_msg = RFID_Interface()
pub_msg.tagIDs = tagIDs
pub_msg.humanread = []
pub_msg.actions = []
self.tag_pub.publish( pub_msg )
time.sleep(0.002)
# def selection_cb(self, pc_msg):
# self.selected_tag = pc_msg.data
# print 'Received tag selection from UI: ', self.selected_tag
# return self.selected_tag
def selection_cb(self, rfid_msg):
tagID = rfid_msg.tagIDs[0]
action = rfid_msg.actions[0]
humanread = rfid_msg.humanread[0]
self.selection = [tagID, humanread, action]
#print 'Received selection from UI: ', self.selection
def receive_response(self):
while self.selection == []:
time.sleep(0.02)
rv = self.selection
self.selection = []
return rv
# def receive_response(self):
# rt = String()
# rt = self.listener.read()
# return rt.data
class ROS_UI_PC():
def __init__(self, init=True, graphical=False):
if init:
try:
print 'Initializing RFID UI on PC'
rospy.init_node('RFID_UI_PC',anonymous=True)
except rospy.ROSException:
pass
#self.tag_pub = rospy.Publisher('/hrl/ele/UI_PC',String)
self.ui_selection = rospy.Publisher('/hrl/ele/UI_PC_Selection',RFID_Interface)
print 'RFID UI on PC Running'
print 'Connecting to RFID UI on Robot'
#rospy.Subscriber('/hrl/ele/UI_Robot', String, self.selection_cb, None, 1)
rospy.Subscriber('/hrl/ele/UI_Robot', RFID_Interface, self.process_robot_request, None, 1)
# self.listener = ru.GenericListener('RFID_UI_Robot', String,
# '/hrl/ele/UI_Robot', 20)
self.ids = []
self.ids_done = False
self.graphical = graphical
self.images_db = '/home/travis/svn/robot1/src/projects/08_03_dog_commands/images_db/'
self.pps_db = ut.load_pickle('/home/travis/svn/robot1/src/projects/08_03_dog_commands/ele_rfid.pickle')
def process_robot_request(self, rfid_interface_msg):
msg = rfid_interface_msg
# Get data out of msg to prevent overwriting!
# From the robot, msg: tagIDs = ['id1','id2',...], humanread = [], actions = []
tagIDs = msg.tagIDs
humanread = []
actions = []
# Remote interface is responsible for populating other categories
ids_in_pps = []
for i, tag in enumerate( tagIDs ):
if self.pps_db.has_key( tag ):
ids_in_pps.append( tag )
humanread.append( self.pps_db[tag]['properties']['name'] )
acts = self.pps_db[tag]['actions'].keys()
actions.append( acts )
# else:
# humanread.append( tag )
# actions.append( ['fetch'] )
# Get the selection (returned as RFID_Interface message)
if self.graphical:
selection = self.get_selection_graphical( ids_in_pps, humanread, actions )
else:
selection = self.get_selection_text( ids_in_pps, humanread, actions )
# Publish the message
self.ui_selection.publish( selection )
print '\n\n Waiting for next request... \n\n'
def get_selection_text( self, tagIDs, humanread, actions ):
print '\n\nSelect a tag:'
if len(tagIDs) == 1:
print '\tOnly one option available: ', humanread[0]
tag_ind = 0
else:
for i, tag in enumerate( tagIDs ):
print '\t(%d) %s' % (i, humanread[i])
tag_ind = int(raw_input())
print 'Select an action for that tag:'
if len( actions[tag_ind] ) == 1:
print '\tOnly one option available: ', actions[tag_ind][0]
act_ind = 0
else:
for i, act in enumerate( actions[tag_ind] ):
print '\t(%d) %s' % (i, actions[tag_ind][i])
act_ind = int(raw_input())
retmsg = RFID_Interface()
retmsg.tagIDs = [ tagIDs[tag_ind] ]
retmsg.humanread = [ humanread[tag_ind] ]
retmsg.actions = [ actions[tag_ind][act_ind] ]
return retmsg
def smart_scale(self, image):
ims = np.array(image.get_size(),dtype='float')
scale = self.imheight / np.max(ims)
return pygame.transform.scale(image, tuple(ims*scale))
def calc_blit_loc(self, image, center_pos):
ims = np.array(image.get_size(), dtype='float')
horiz = center_pos[0] - self.imheight/2 + (self.imheight - ims[0]) / 2.
vert = center_pos[1] - self.imheight/2 + (self.imheight - ims[1]) / 2.
return (horiz, vert)
def get_selection_graphical(self, tagIDs, humanread, actions):
pygame.init()
self.s_width = 600
self.s_height = 700
srf = pygame.display.set_mode((self.s_width,self.s_height))
fps = 100
loopFlag = True
clk = pygame.time.Clock()
obj = [srf, fps, clk]
self.imheight = 175.
w = self.s_width * 1.0
h = self.s_height * 1.0
blit_pos = [[ w/3-w/6, h/3-h/6],
[ w/3-w/6+w/3, h/3-h/6],
[ w/3-w/6+2*w/3, h/3-h/6],
[ w/3-w/6, h/3-h/6+h/3],
[ w/3-w/6+w/3, h/3-h/6+h/3],
[ w/3-w/6+2*w/3, h/3-h/6+h/3],
[ w/3-w/6, h/3-h/6+2*h/3],
[ w/3-w/6+w/3, h/3-h/6+2*h/3],
[ w/3-w/6+2*w/3, h/3-h/6+2*h/3]]
tag_images = []
tag_surfaces = []
blit_loc = []
for i, tag in enumerate(tagIDs):
print 'Loading image for tag ', tag
tag_image = pygame.image.load(self.images_db + tag + '.jpg').convert()
tag_image = self.smart_scale(tag_image)
tag_images.append( tag_image )
blit_loc.append( blit_pos[i] )
#pygame.display.set_mode(tag_images[i].get_size())
#tag_surfaces.append(pygame.display.get_surface())
srf.blit(tag_image, self.calc_blit_loc(tag_image,blit_loc[i]))
tag_ind = self.get_selection( obj, tag_images, humanread, blit_loc )
print 'Selected tag ', tag_ind, ': ', humanread[tag_ind]
act_images = []
act_surfaces = []
blit_loc = []
for i, act in enumerate(actions[tag_ind]):
print 'Loading image for act ', act
act_image = pygame.image.load(self.images_db + tag + act + '.jpg').convert()
act_image = self.smart_scale(act_image)
act_images.append( act_image )
blit_loc.append( blit_pos[i] )
#pygame.display.set_mode(tag_images[i].get_size())
#tag_surfaces.append(pygame.display.get_surface())
srf.blit(act_image, self.calc_blit_loc(tag_image,blit_loc[i]))
act_ind = self.get_selection( obj, act_images, actions[tag_ind], blit_loc )
print 'Selected action ', act_ind, ': ', actions[tag_ind][act_ind]
retmsg = RFID_Interface()
retmsg.tagIDs = [ tagIDs[tag_ind] ]
retmsg.humanread = [ humanread[tag_ind] ]
retmsg.actions = [ actions[tag_ind][act_ind] ]
return retmsg
def put_bottom_text( self, srf, text ):
font = pygame.font.Font(None, 25)
box = font.render(text, 1,(10, 10, 10, 0))
ts = box.get_size()
horiz = self.s_width / 2.0 - ts[0]/2.0
vt = self.s_height - 50.0 - ts[1]/2.0
srf.blit(box, (horiz, vt))
return True
def draw_rect(self, srf, blit_loc):
width = self.imheight * 1.10
height = self.imheight *1.10
horiz = blit_loc[0] - width / 2.0
vert = blit_loc[1] - height / 2.0
pygame.draw.rect(srf, (255, 0, 0), (horiz, vert, width, height))
width = self.imheight * 1.01
height = self.imheight *1.01
horiz = blit_loc[0] - width / 2.0
vert = blit_loc[1] - height / 2.0
pygame.draw.rect(srf, (255, 255, 255), (horiz, vert, width, height))
return True
def get_selection( self, obj, images, humanread, blit_loc ):
[srf, fps, clk] = obj
loopFlag = True
ind = 0
pos = (0,0)
while loopFlag:
# Clear the screen
srf.fill((255,255,255))
diffs = np.array(blit_loc) - np.array(pos)
ind = np.argmin( ut.norm( diffs.T ))
self.put_bottom_text( srf, humanread[ind] )
self.draw_rect(srf, blit_loc[ind])
for i, image in enumerate(images):
srf.blit(image, self.calc_blit_loc(image,blit_loc[i]))
#print 'going'
pygame.display.flip()
events = pygame.event.get()
for e in events:
if e.type==pygame.locals.QUIT:
loopFlag=False
if e.type==pygame.locals.KEYDOWN:
if e.key == 27: # Esc
loopFlag=False
if e.type == pygame.locals.MOUSEMOTION:
pos = e.pos
if e.type==pygame.locals.MOUSEBUTTONDOWN:
if e.button == 1:
# left button
pos = e.pos
diffs = np.array(blit_loc) - np.array(pos)
ind = np.argmin( ut.norm( diffs.T ))
loopFlag = False
clk.tick(fps)
srf.fill((255,255,255))
pygame.display.flip()
clk.tick(fps)
return ind
# pc = ROS_UI_PC(init = False, graphical = True)
# pc.get_selection_graphical(['LightSwitch1','LightSwitch1'],
# ['lightswitch','LightSwitch1'],
# [['on','off'],['on','off']])
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('-d', action='store_true', dest='graphical',
help='Use a graphical display.')
p.add_option('-g', action='store_true', dest='client',
help='Build Client?', default=False)
opt, args = p.parse_args()
if opt.client:
pc = ROS_UI_PC(graphical = opt.graphical)
pc.get_selection_graphical( ['person '
rospy.spin()
else:
ro = ROS_UI_Robot()
ro.publish_tag_ids([ 'one', 'two', 'hello' ])
ro.receive_response()
# while True:
# print 'Waiting for robot action(s)...\n'
# pc.publish_selected_id()
# pc.ids = []
# pc.ids_done = False
# On the Robot's side:
# ro = ROS_UI_Robot()
# ro.publish_tag_ids(['one','two','hello'])
# ro.receive_response()
| [
[
1,
0,
0.0092,
0.0031,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0092,
0.0031,
0,
0.66,
0.0769,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0092,
0.0031,
0,
0.... | [
"import roslib; roslib.load_manifest('hrl_lib'); import rospy",
"import roslib; roslib.load_manifest('hrl_lib'); import rospy",
"import roslib; roslib.load_manifest('hrl_lib'); import rospy",
"import hrl_lib.util as ut",
"import hrl_lib.rutils as ru",
"import time",
"import pygame",
"import pygame.dis... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Travis Deyle (Healthcare Robotics Lab, Georgia Tech.)
__all__ = [
'lib_M5e',
'ros_M5e',
'ros_M5e_client',
'ui'
]
| [
[
14,
0,
0.9054,
0.1622,
0,
0.66,
0,
272,
0,
0,
0,
0,
0,
5,
0
]
] | [
"__all__ = [\n'lib_M5e',\n'ros_M5e',\n'ros_M5e_client',\n'ui'\n]"
] |
#!/usr/bin/python
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Travis Deyle (Healthcare Robotics Lab, Georgia Tech.)
# ROS imports
import roslib; roslib.load_manifest('hrl_rfid')
import rospy
from hrl_rfid.msg import RFIDread
from hrl_rfid.srv import RfidSrv
import hrl_rfid.lib_M5e as M5e
import time
from threading import Thread
# Modeled off lib_M5e.M5e_Poller
class ROS_M5e( ):
QUERY_MODE = 'query'
TRACK_MODE = 'track'
def __init__(self, name = 'reader1', readPwr = 2300,
portStr = '/dev/robot/RFIDreader',
antFuncs = [], callbacks = []):
try:
rospy.init_node( 'rfid_m5e_' + name )
except rospy.ROSException:
pass
self.mode = 'track'
self.tag_to_track = 'In Hand Tag '
self.name = name + '_reader'
rospy.logout( 'ROS_M5e: Launching RFID Reader' )
rospy.logout( 'ROS_M5e: Please check out our related work @ http://www.hsi.gatech.edu/hrl/project_rfid.shtml' )
rospy.logout( 'ROS_M5e: '+self.name+' Building & Connecting to reader' )
def prin( x ): rospy.logout( 'ROS_M5e: lib_M5e: ' + x ) # use rospy.logout in underlying lib's output
self.reader = M5e.M5e(readPwr=readPwr, portSTR = portStr, verbosity_func = prin)
self.antFuncs = antFuncs
self.callbacks = callbacks + [self.broadcast]
rospy.logout( 'ROS_M5e: publishing RFID reader with type RFIDread to channel /rfid/'+name+'_reader' )
self.channel = rospy.Publisher('/rfid/'+name+'_reader', RFIDread)
self._mode_service_obj = rospy.Service('/rfid/'+name+'_mode',
RfidSrv, self._mode_service)
rospy.logout( 'ROS_M5e: '+self.name+' Inialized and awaiting instructions' )
def run( self, total_iter = 1000 ):
for i in xrange( total_iter ):
if self.mode == self.QUERY_MODE:
for aF in self.antFuncs:
antennaName = aF(self.reader) # let current antFunc make appropriate changes
results = self.reader.QueryEnvironment()
if len(results) == 0:
datum = [antennaName, '', -1]
[cF(datum) for cF in self.callbacks]
for tagid, rssi in results:
datum = [antennaName, tagid, rssi]
[cF(datum) for cF in self.callbacks]
elif self.mode == self.TRACK_MODE:
for aF in self.antFuncs:
antennaName = aF(self.reader) # let current antFunc make appropriate changes
tagid = self.tag_to_track
rssi = self.reader.TrackSingleTag(tagid, timeout = 10, safe_response = False)
#if rssi != -1:
datum = [antennaName, tagid, rssi]
[cF(datum) for cF in self.callbacks]
else:
time.sleep(0.005)
rospy.logout( 'ROS_M5e: '+self.name+' Shutting down reader' )
def broadcast(self, data):
antName, tagid, rssi = data
rv = RFIDread( None, antName, tagid, rssi )
rv.header.stamp = rospy.Time.now()
self.channel.publish( rv )
# For internal use only
def _mode_service(self, data):
val = data.data
if len(val) == 0:
rospy.logout( 'ROS_M5e: Mode Service called with invalid argument: ' + str(val) )
elif len(val) == 1:
if val[0] == self.QUERY_MODE:
rospy.logout( 'ROS_M5e: '+self.name+' Entering Query Mode' )
self.mode = self.QUERY_MODE
else:
rospy.logout( 'ROS_M5e: '+self.name+' Stopping Reader' )
self.mode = ''
elif len(val) == 2:
if val[0] == self.TRACK_MODE and len(val[1]) == 12:
rospy.logout( 'ROS_M5e: '+self.name+' Entering Track Mode: ' + str(val[1]) )
self.mode = self.TRACK_MODE
self.tag_to_track = val[1]
else:
rospy.logout( 'ROS_M5e: Mode Service called with invalid argument: ' + str(val) )
else:
rospy.logout( 'ROS_M5e: Mode Service called with invalid argument: ' + str(val) )
return True
# -----------------------------------------------
# Likely Callbacks: (various antennas)
# -----------------------------------------------
def EleLeftEar(M5e):
M5e.ChangeAntennaPorts(1,1)
time.sleep(0.010)
return 'EleLeftEar'
def EleRightEar(M5e):
M5e.ChangeAntennaPorts(2,2)
time.sleep(0.010)
return 'EleRightEar'
def Hand_Right_1(M5e):
# GPIO1 = 1, GPIO2 = 0
M5e.TransmitCommand('\x02\x96\x01\x01')
M5e.ReceiveResponse()
M5e.TransmitCommand('\x02\x96\x02\x00')
M5e.ReceiveResponse()
return 'Hand_Right_1'
def Hand_Right_2(M5e):
# GPIO1 = 1, GPIO2 = 1
M5e.TransmitCommand('\x02\x96\x01\x01')
M5e.ReceiveResponse()
M5e.TransmitCommand('\x02\x96\x02\x01')
M5e.ReceiveResponse()
return 'Hand_Right_2'
def Hand_Left_1(M5e):
# GPIO1 = 0, GPIO2 = 0
M5e.TransmitCommand('\x02\x96\x01\x00')
M5e.ReceiveResponse()
M5e.TransmitCommand('\x02\x96\x02\x00')
M5e.ReceiveResponse()
return 'Hand_Left_1'
def Hand_Left_2(M5e):
# GPIO1 = 0, GPIO2 = 1
M5e.TransmitCommand('\x02\x96\x01\x00')
M5e.ReceiveResponse()
M5e.TransmitCommand('\x02\x96\x02\x01')
M5e.ReceiveResponse()
return 'Hand_Left_2'
def PrintDatum(data):
ant, ids, rssi = data
print data
if __name__ == '__main__':
print 'Starting Ears RFID Services'
ros_rfid = ROS_M5e( name = 'ears', readPwr = 2300,
portStr = '/dev/robot/RFIDreader',
antFuncs = [EleLeftEar, EleRightEar],
callbacks = [] )
total_iter = 500
t0 = time.time()
ros_rfid.run( total_iter )
t1 = time.time()
print 'Reads per sec: ', (total_iter * 2.0) / ( 1.0 * (t1 - t0))
| [
[
1,
0,
0.1735,
0.0051,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.1735,
0.0051,
0,
0.66,
0.0625,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1786,
0.0051,
0,
0.... | [
"import roslib; roslib.load_manifest('hrl_rfid')",
"import roslib; roslib.load_manifest('hrl_rfid')",
"import rospy",
"from hrl_rfid.msg import RFIDread",
"from hrl_rfid.srv import RfidSrv",
"import hrl_rfid.lib_M5e as M5e",
"import time",
"from threading import Thread",
"class ROS_M5e( ):\n QUER... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Travis Deyle (Healthcare Robotics Lab, Georgia Tech.)
# ROS imports
import roslib; roslib.load_manifest('hrl_rfid')
import rospy
from hrl_rfid.msg import RFIDread
from hrl_rfid.srv import RfidSrv
import hrl_rfid.lib_M5e as M5e
import time
import thread
class ROS_M5e_Client():
QUERY_MODE = 'query'
TRACK_MODE = 'track'
def __init__(self, name = 'reader1', callbacks=[]):
self.name = name
self._create_ros_objects()
self.callbacks = callbacks
self.last_read = ['', '', -1] # antenna_name, tagID, rssi
try:
rospy.init_node( self.name + '_listener', anonymous=True )
except rospy.ROSException:
pass
self._sub = rospy.Subscriber( '/rfid/' + self.name + '_reader', RFIDread, self._sub_cb)
def _sub_cb(self, datum):
[ cb( datum ) for cb in self.callbacks ]
self.last_read = [datum.antenna_name, datum.tagID, datum.rssi]
def unregister( self ):
# Stop processing new reads.
self._sub.unregister()
# ROS Services
def stop(self):
self._mode_service_obj([ '' ])
def track_mode(self, tag_id):
self._mode_service_obj([ self.TRACK_MODE, tag_id ])
def query_mode(self):
self._mode_service_obj([ self.QUERY_MODE ])
# Create ROS Objects (for internal use only)
def _create_ros_objects(self):
reader_service_name = '/rfid/'+self.name+'_mode'
rospy.wait_for_service(reader_service_name)
self._mode_service_obj = rospy.ServiceProxy(reader_service_name,
RfidSrv)
def read(self, antenna = ''):
if antenna == '':
return self.last_read
else:
r = self.last_read
while r[0] != antenna and not rospy.is_shutdown():
time.sleep(0.02)
r = self.last_read
return r
| [
[
1,
0,
0.3511,
0.0106,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.3511,
0.0106,
0,
0.66,
0.125,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.3617,
0.0106,
0,
0.6... | [
"import roslib; roslib.load_manifest('hrl_rfid')",
"import roslib; roslib.load_manifest('hrl_rfid')",
"import rospy",
"from hrl_rfid.msg import RFIDread",
"from hrl_rfid.srv import RfidSrv",
"import hrl_rfid.lib_M5e as M5e",
"import time",
"import thread",
"class ROS_M5e_Client():\n QUERY_MODE = ... |
#!/usr/bin/python
import numpy as np
import pickle
import matplotlib.pyplot as plt
def main():
with open('pickled_mn_std.pkl','rb') as f:
(means, stds) = pickle.load(f)
width = 1./means.shape[1]
ind = np.arange(means.shape[0])*2
#
# b = ['']*means.shape[0]
colors=[(1,0,0),
(0,1,0),
(0,0,1),
(1,1,0),
(0,1,1),
(0.3,0.3,0.3),
(0.6,0.6,0.6),
(0.6,0,0.3)]
for i in range(means.shape[1]):
for j in range(means.shape[0]):
plt.bar(ind[i]+j*width, means[j,i], width, color=colors[j], yerr=stds[j,i])
plt.show()
if __name__=='__main__':
main()
| [
[
1,
0,
0.1034,
0.0345,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.1379,
0.0345,
0,
0.66,
0.25,
848,
0,
1,
0,
0,
848,
0,
0
],
[
1,
0,
0.1724,
0.0345,
0,
0.... | [
"import numpy as np",
"import pickle",
"import matplotlib.pyplot as plt",
"def main():\n with open('pickled_mn_std.pkl','rb') as f:\n (means, stds) = pickle.load(f)\n width = 1./means.shape[1]\n ind = np.arange(means.shape[0])*2\n \n#\n # b = ['']*means.shape[0]",
" (means, stds)... |
#!/usr/bin/python
import sys
import csv
def extract_data(files):
data = []
for data_file in files:
with open(data_file, 'rb') as f:
reader = csv.reader(f)
for row in reader:
data.append(row)
print "Processing: %s , %s rows" %(data_file, reader.line_num)
print "Final Length: ", len(data)
return data
if __name__=='__main__':
files = sys.argv[1:]
data = extract_data(files)
with open('condensed_data.csv', 'wb') as f_out:
writer = csv.writer(f_out)
writer.writerows(data)
| [
[
1,
0,
0.0952,
0.0476,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.1429,
0.0476,
0,
0.66,
0.3333,
312,
0,
1,
0,
0,
312,
0,
0
],
[
2,
0,
0.4524,
0.4762,
0,
... | [
"import sys",
"import csv",
"def extract_data(files):\n data = []\n for data_file in files:\n with open(data_file, 'rb') as f:\n reader = csv.reader(f)\n for row in reader:\n data.append(row)\n print(\"Processing: %s , %s rows\" %(data_file, reader.li... |
#!/usr/bin/env python
import sys
import roslib; roslib.load_manifest('wouse')
import rospy
from wouse.srv import WouseRunStop, WouseRunStopRequest
rospy.init_node('wouse_reset')
try:
rospy.wait_for_service('/wouse_run_stop', 10)
reset_client = rospy.ServiceProxy('wouse_run_stop', WouseRunStop)
rospy.loginfo("[Wouse Reset]: Found wouse run stop service.")
except:
rospy.logwarn("[Wouse Reset]: Could not find wouse run stop service.")
sys.exit()
req = WouseRunStopRequest()
req.start = True
success = reset_client(req)
if success:
rospy.loginfo("[Wouse Reset]: Reset returned successfully.")
else:
rospy.logwarn("[Wouse Reset]: Reset reported failure.")
| [
[
1,
0,
0.1111,
0.037,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.1852,
0.037,
0,
0.66,
0.1,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.1852,
0.037,
0,
0.66,
... | [
"import sys",
"import roslib; roslib.load_manifest('wouse')",
"import roslib; roslib.load_manifest('wouse')",
"import rospy",
"from wouse.srv import WouseRunStop, WouseRunStopRequest",
"rospy.init_node('wouse_reset')",
"try:\n rospy.wait_for_service('/wouse_run_stop', 10)\n reset_client = rospy.Se... |
#!/usr/bin/env python
import pickle
import numpy as np
from scipy import interp
import pylab as pl
from sklearn import preprocessing as pps, svm
from sklearn.metrics import roc_curve, auc
from sklearn.cross_validation import StratifiedKFold, LeaveOneOut
with open('../data/svm_data.pkl', 'rb') as f:
svm_data = pickle.load(f)
labels = svm_data['labels']
data = svm_data['data']
scaler = pps.Scaler().fit(data)
print "Mean: ", scaler.mean_
print "Std: ", scaler.std_
data_scaled = scaler.transform(data)
classifier = svm.SVC(probability=True)
classifier.fit(data_scaled, labels)
#print "Support Vectors: \r\n", classifier.support_vectors_
print "SV's per class: \r\n", classifier.n_support_
###############################################################################
## Code below modified from http://scikit-learn.org/stable/auto_examples/plot_roc_crossval.html#example-plot-roc-crossval-py
X, y = data_scaled, np.array(labels)
n_samples, n_features = X.shape
print n_samples, n_features
###############################################################################
# Classification and ROC analysis
# Run classifier with crossvalidation and plot ROC curves
cv = StratifiedKFold(y, k=9)
mean_tpr = 0.0
mean_fpr = np.linspace(0, 1, n_samples)
all_tpr = []
for i, (train, test) in enumerate(cv):
probas_ = classifier.fit(X[train], y[train]).predict_proba(X[test])
# Compute ROC curve and area the curve
fpr, tpr, thresholds = roc_curve(y[test], probas_[:, 1])
mean_tpr += interp(mean_fpr, fpr, tpr)
mean_tpr[0] = 0.0
roc_auc = auc(fpr, tpr)
pl.plot(fpr, tpr, '--', lw=1, label='ROC fold %d (area = %0.2f)' % (i, roc_auc))
pl.plot([0, 1], [0, 1], '--', color=(0.6, 0.6, 0.6), label='Luck')
mean_tpr /= len(cv)
mean_tpr[-1] = 1.0
mean_auc = auc(mean_fpr, mean_tpr)
pl.plot(mean_fpr, mean_tpr, 'k-', lw=3,
label='Mean ROC (area = %0.2f)' % mean_auc)
pl.xlim([0, 1])
pl.ylim([0, 1])
pl.xlabel('False Positive Rate')
pl.ylabel('True Positive Rate')
pl.title('Receiver Operating Characteristic')
pl.legend(loc="lower right")
pl.show()
| [
[
1,
0,
0.0448,
0.0149,
0,
0.66,
0,
848,
0,
1,
0,
0,
848,
0,
0
],
[
1,
0,
0.0597,
0.0149,
0,
0.66,
0.0294,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0746,
0.0149,
0,
... | [
"import pickle",
"import numpy as np",
"from scipy import interp",
"import pylab as pl",
"from sklearn import preprocessing as pps, svm",
"from sklearn.metrics import roc_curve, auc",
"from sklearn.cross_validation import StratifiedKFold, LeaveOneOut",
" svm_data = pickle.load(f)",
"labels = svm_... |
#!/usr/bin/env python
import roslib; roslib.load_manifest('wouse')
import rospy
from std_msgs.msg import Header, Bool, String
from pr2_power_board.srv import PowerBoardCommand2, PowerBoardCommand2Request
from std_srvs.srv import Empty, EmptyRequest
from sound_play.libsoundplay import SoundClient
from wouse.srv import WouseRunStop
CIRCUITS=[0,1,2] #Base, Right arm, Left Arm circuits
DEAD_MAN_CONFIGURATION=False
class RunStop(object):
"""Provide utility functions for starting/stopping PR2."""
def __init__(self):
"""Establish service connections for motors, power board."""
self.init_successful = True
try:
rospy.wait_for_service('pr2_etherCAT/halt_motors', 5)
self.halt_motors_client=rospy.ServiceProxy('pr2_etherCAT/halt_motors',Empty)
rospy.loginfo("Found halt motors service")
except:
rospy.logerr("Cannot find halt motors service")
self.init_successful = False
try:
rospy.wait_for_service('pr2_etherCAT/reset_motors',5)
self.reset_motors_client=rospy.ServiceProxy('pr2_etherCAT/reset_motors',Empty)
rospy.loginfo("Found reset motors service")
except:
rospy.logerr("Cannot find halt motors service")
self.init_successful = False
try:
rospy.wait_for_service('power_board/control2',5)
self.power_board_client=rospy.ServiceProxy('power_board/control2',PowerBoardCommand2)
rospy.loginfo("Found power_board/control2 service")
except:
rospy.logerr("Cannot find power_board/control2 service")
self.init_successful = False
def stop(self):
"""Halt motors, place power board into standboy. Stops robot."""
self.halt_motors_client(EmptyRequest()) #Halt motors immediately
success = [False, False, False]
for circuit in CIRCUITS:
success[circuit] = self.standby_power(circuit)
if success[0] and success[1] and success[2]:
return True
else:
return False
def start(self):
"""Reset power board, reset motors. Un-does 'run_stop'."""
success = [False, False, False]
for circuit in CIRCUITS:
success[circuit] = self.reset_power(circuit)
if success[0] and success[1] and success[2]:
rospy.sleep(2.0)
self.reset_motors_client(EmptyRequest())
return True
else:
return False
def standby_power(self, circuit):
"""Place PR2 power board into standby"""
stdby_cmd = PowerBoardCommand2Request()
stdby_cmd.circuit = circuit
stdby_cmd.command = "stop"
return self.power_board_client(stdby_cmd)
def reset_power(self,circuit):
"""Reset PR2 power board to active from standby"""
reset_cmd = PowerBoardCommand2Request()
reset_cmd.circuit = circuit
reset_cmd.command = "start"
return self.power_board_client(reset_cmd)
class RunStopServer(object):
def __init__(self):
"""Provide dead-man-switch like server for handling wouse run-stops."""
rospy.Service("wouse_run_stop", WouseRunStop, self.service_cb)
self.run_stop = RunStop()
if DEAD_MAN_CONFIGURATION:
self.sound_client = SoundClient()
self.timeout = rospy.Duration(rospy.get_param('wouse_timeout', 1.5))
self.tone_period = rospy.Duration(10)
self.last_active_time = rospy.Time.now()
self.last_sound = rospy.Time.now()
rospy.Timer(self.timeout, self.check_receiving)
def check_receiving(self, event):
"""After timeout, check to ensure that activity is seen from wouse."""
silence = rospy.Time.now() - self.last_active_time
#print silence, " / ", self.timeout
if silence < self.timeout:
# print "Receiving"
return
#else:
# print "NOT receiving"
if (silence > self.timeout and (rospy.Time.now() - self.last_sound) > self.tone_period):
rospy.logwarn("RunStopServer has not heard from wouse recently.")
self.sound_client.play(3)#1
self.last_sound = rospy.Time.now()
def service_cb(self, req):
"""Handle service requests to start/stop run-stop. Used to reset."""
#print "Separation: ", req.time-self.last_active_time
self.last_active_time = req.time
#print "Delay: ", rospy.Time.now() - self.last_active_time
if req.stop:
return self.run_stop.stop()
elif req.start:
return self.run_stop.start()
else:
return True #only update timestamp
if __name__=='__main__':
rospy.init_node('run_stop_server')
rss = RunStopServer()
rospy.spin()
| [
[
1,
0,
0.0242,
0.0081,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0242,
0.0081,
0,
0.66,
0.0833,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0323,
0.0081,
0,
0.... | [
"import roslib; roslib.load_manifest('wouse')",
"import roslib; roslib.load_manifest('wouse')",
"import rospy",
"from std_msgs.msg import Header, Bool, String",
"from pr2_power_board.srv import PowerBoardCommand2, PowerBoardCommand2Request",
"from std_srvs.srv import Empty, EmptyRequest",
"from sound_pl... |
#!/usr/bin/env python
import sys
import numpy as np
from threading import Condition
import pickle
import csv
import pygame
from sklearn import preprocessing as pps, svm
import roslib; roslib.load_manifest('wouse')
from Object import ROSObject
import rospy
from std_msgs.msg import Header
from geometry_msgs.msg import Vector3Stamped
from wouse.srv import WouseRunStop, WouseRunStopRequest
class Wouse(object):
"""
Subscribes to mouse movements, detects wincing, and signals e-stop.
"""
def __init__(self, svm_datafile):
"""Initialize svm classifier and needed services."""
try:
rospy.wait_for_service('wouse_run_stop', 5)
self.runstop_client=rospy.ServiceProxy('wouse_run_stop',
WouseRunStop, True)
rospy.loginfo("Found wouse run-stop service")
except:
rospy.logerr("Cannot find wouse run-stop service")
sys.exit()
(self.scaler, self.classifier) = self.init_classifier(svm_datafile)
rospy.loginfo("SVM Trained on data from %s" %svm_datafile)
rospy.Subscriber('wouse_movement', Vector3Stamped, self.movement_cb)
rospy.Timer(rospy.Duration(0.1), self.ping_server)
self.window = []
self.data = []
pygame.init()
self.sound_new = pygame.mixer.Sound('../../sounds/new_item.wav')
#self.csv_writer = csv.writer(open(out_file, 'ab'))
rospy.loginfo("[%s]: Ready" %rospy.get_name())
def init_classifier(self, filename):
"""Unpickle svm training data, train classifier"""
with open(filename, 'rb') as f:
svm_data = pickle.load(f)
labels = svm_data['labels']
data = svm_data['data']
scaler = pps.Scaler().fit(data)
data_scaled = scaler.transform(data)
classifier = svm.SVC()
classifier.fit(data_scaled, labels)
return (scaler, classifier)
def ping_server(self, event):
"""Send updated timestamp to Runstop server."""
req = WouseRunStopRequest(False, False, rospy.Time.now())
self.runstop_client(req)
def movement_cb(self, v3):
"""Filter out small movements, check classifier, call stop if needed."""
#line = [v3.header.stamp.to_sec(), v3.vector.x, v3.vector.y]
#self.csv_writer.writerow(line)
if (v3.vector.x**2+v3.vector.y**2)**(0.5) < 2.5:
return
if self.classify_svm(v3.vector.x, v3.vector.y, v3.header.stamp):
self.runstop_client(WouseRunStopRequest(True, False, rospy.Time.now()))
self.sound_new.play()
rospy.loginfo("Wince Detected, stopping robot!")
else:
rospy.loginfo("Not a wince")
def classify_svm(self, x, y, time):
"""Build the descriptor vector for the incoming mouse event, and classify with the svm."""
datum = []
mag = (x**2+y**2)**(0.5)
angle = np.arctan2(y,x)
datum.append(mag)
datum.append(angle)
self.window.append([x,y,time])
while (self.window[-1][-1] - self.window[0][-1]) > rospy.Duration(0.25):
self.window.pop(0)
win = np.array(self.window)
datum.append(len(self.window))
win_x = np.sum(win[:,0])
win_y = np.sum(win[:,1])
win_mag = (win_x**2+win_y**2)**(0.5)
datum.append(win_mag/len(self.window))
datum.append(np.arctan2(win_y, win_x))
datum_scaled = self.scaler.transform(datum)
prediction = self.classifier.predict(datum_scaled)
if prediction[0] == 1.:
return True
if __name__=='__main__':
rospy.init_node('wouse_node')
wouse = Wouse(sys.argv[1])
rospy.spin()
| [
[
1,
0,
0.0481,
0.0096,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0577,
0.0096,
0,
0.66,
0.0667,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0673,
0.0096,
0,
... | [
"import sys",
"import numpy as np",
"from threading import Condition",
"import pickle",
"import csv",
"import pygame",
"from sklearn import preprocessing as pps, svm",
"import roslib; roslib.load_manifest('wouse')",
"import roslib; roslib.load_manifest('wouse')",
"from Object import ROSObject",
... |
#!/usr/bin/env python
import sys
import os
import random
import csv
import math
import pygame
import roslib; roslib.load_manifest('wouse')
import rospy
from geometry_msgs.msg import Vector3Stamped
from PySide.QtCore import *
from PySide.QtGui import *
from PySide.QtUiTools import QUiLoader
#DEGREES = ['WEAK', 'AVERAGE', 'STRONG']
DEGREES = ['']
ACTIONS = ['WINCE', 'NOD', 'SHAKE', 'JOY', 'SUPRISE', 'FEAR', 'ANGER',
'DISGUST', 'SADNESS']
SYMBOLS = ["**"*25, "%%"*25, "^v"*25, '##'*25, '&&'*25, '$$'*25]
class WouseSetupDialog(object):
"""A dialog box for setting session parameters for training the wouse."""
def __init__(self):
""" Load .ui file from QtDesigner, add callbacks as necessary"""
ui_file = WOUSE_PKG+'/src/wouse/wouse_train_options.ui'
self.dialog = QUiLoader().load(ui_file)
self.dialog.rounds_spin.valueChanged.connect(self.update_time)
self.dialog.recording_spin.valueChanged.connect(self.update_time)
self.dialog.recovery_spin.valueChanged.connect(self.update_time)
self.dialog.file_button.clicked.connect(self.file_button_cb)
self.dialog.file_field_edit.setText(WOUSE_PKG+'/data/')
self.dialog.buttonBox.accepted.connect(self.ok_cb)
self.dialog.buttonBox.rejected.connect(self.cancel_cb)
self.update_time()
def file_button_cb(self):
"""Use file dialog to get .csv file. Check for csv, update Lineedit"""
direc = self.dialog.file_field_edit.text()
filename = QFileDialog.getOpenFileName(self.dialog,
caption="File to stop wouse training data",
dir=direc,
filter="*.csv")
if len(filename[0]) != 0:
if filename[0][-4:] != '.csv':
QMessageBox.warning(self.dialog, "Warning: Invalid File",
"Warning: Selected File does not appear to be a CSV (.csv)\
data file.")
self.dialog.file_field_edit.setText(filename[0])
def calc_time(self):
"""Calculate the time (s) required for full run with current settings"""
tot_time = (self.dialog.recording_spin.value()+
self.dialog.recovery_spin.value())
return len(ACTIONS)*self.dialog.rounds_spin.value()*tot_time
def update_time(self):
"""Parse time to minutes:seconds format, update interface"""
time = self.calc_time()
mins = str(int(time)/60)
secs = str(int(round(time%60.)))
if len(secs)==1:
secs = "".join(['0',secs])
self.dialog.duration.setText('%s:%s' %(mins,secs))
def ok_cb(self):
"""Check for acceptable file. Warn if bad, if good, close, return 1"""
if self.dialog.file_field_edit.text()[-4:] != '.csv':
return QMessageBox.warning(self.dialog, "Warning: Invalid File",
"Please choose a valid CSV (.csv) data file.")
self.dialog.accept()
def cancel_cb(self):
""" Close dialog, return 0/Rejected"""
self.dialog.reject()
class WouseTrainer(object):
""" A class for printing random facial expression commands,\\
and saving data from a topic of wouse movement data."""
def __init__(self):
#QtDialog for setting parameters for session
app = QApplication([])
self.setup_gui = WouseSetupDialog()
self.setup_gui.dialog.show()
if self.setup_gui.dialog.exec_() == 0:
sys.exit()
self.rounds = self.setup_gui.dialog.rounds_spin.value()
self.record_dur = self.setup_gui.dialog.recording_spin.value()
self.recovery_dur = self.setup_gui.dialog.recovery_spin.value()
rospy.Subscriber('/wouse_movement', Vector3Stamped, self.movement_cb)
#Open file for recoding data
output_file = self.setup_gui.dialog.file_field_edit.text()
self.csv_writer = csv.writer(open(output_file, 'ab'))
#Init pygame, used for playing sounds
pygame.init()
self.sound_new = pygame.mixer.Sound(WOUSE_PKG+'/sounds/new_item.wav')
self.sound_done = pygame.mixer.Sound(WOUSE_PKG+'/sounds/item_done.wav')
self.degree='AVERAGE'
self.recording = False
def movement_cb(self, v3s):
"""Write a new line to the csv file for incoming data."""
if self.recording:
line = [self.degree, self.behavior, v3s.header.stamp.to_sec(),
v3s.vector.x, v3s.vector.y]
self.csv_writer.writerow(line)
def run(self, rounds, record_dur, recovery_dur):
"""Perform training given parameters and actions."""
act_list = rounds*ACTIONS
random.shuffle(act_list)
count = 1
print "Starting in: "
countdown = range(1,10)
countdown.reverse()
for number in countdown:
print "%s" %number
rospy.sleep(1)
while len(act_list) > 0 and not rospy.is_shutdown():
self.behavior = act_list.pop()
bar = random.choice(SYMBOLS)
self.recording = True
self.sound_new.play()
print "\r\n"*15
print bar
print "%s: %s" %(count, self.behavior)
print bar
print "\r\n"*15
rospy.sleep(record_dur)
self.recording = False
self.sound_done.play()
count += 1
rospy.sleep(recovery_dur)
if not act_list:
print "Training Session Completed"
else:
print "Training Session Ended Early"
if __name__=='__main__':
rospy.init_node('wouse_trainer')
WOUSE_PKG = roslib.packages.get_pkg_dir('wouse')
wt = WouseTrainer()
wt.run(wt.rounds, wt.record_dur, wt.recovery_dur)
| [
[
1,
0,
0.0135,
0.0068,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0203,
0.0068,
0,
0.66,
0.0556,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.027,
0.0068,
0,
0... | [
"import sys",
"import os",
"import random",
"import csv",
"import math",
"import pygame",
"import roslib; roslib.load_manifest('wouse')",
"import roslib; roslib.load_manifest('wouse')",
"import rospy",
"from geometry_msgs.msg import Vector3Stamped",
"from PySide.QtCore import *",
"from PySide.... |
#!/usr/bin/env python
import sys
import numpy as np
from threading import Condition
import pickle
import csv
import pygame
from sklearn import preprocessing as pps, svm
import roslib; roslib.load_manifest('wouse')
import rospy
from std_msgs.msg import Header
from geometry_msgs.msg import Vector3Stamped
from wouse.srv import WouseRunStop, WouseRunStopRequest
class Wouse(object):
"""
Subscribes to mouse movements, detects wincing, and signals e-stop.
"""
def __init__(self, svm_datafile):
"""Initialize svm classifier and needed services."""
(self.scaler, self.classifier) = self.init_classifier(svm_datafile)
rospy.loginfo("SVM Trained on data from %s" %svm_datafile)
rospy.Subscriber('wouse_movement', Vector3Stamped, self.movement_cb)
self.window = []
self.data = []
pygame.init()
self.sound_new = pygame.mixer.Sound('../../sounds/new_item.wav')
#self.csv_writer = csv.writer(open(out_file, 'ab'))
rospy.loginfo("[%s]: Ready" %rospy.get_name())
def init_classifier(self, filename):
"""Unpickle svm training data, train classifier"""
with open(filename, 'rb') as f:
svm_data = pickle.load(f)
labels = svm_data['labels']
data = svm_data['data']
scaler = pps.Scaler().fit(data)
data_scaled = scaler.transform(data)
classifier = svm.SVC()
classifier.fit(data_scaled, labels)
return (scaler, classifier)
def movement_cb(self, v3):
"""Filter out small movements, check classifier, call stop if needed."""
#line = [v3.header.stamp.to_sec(), v3.vector.x, v3.vector.y]
#self.csv_writer.writerow(line)
if (v3.vector.x**2+v3.vector.y**2)**(0.5) < 2.5:
return
if self.classify_svm(v3.vector.x, v3.vector.y, v3.header.stamp):
self.sound_new.play()
rospy.loginfo("Wince Detected, stopping robot!")
else:
rospy.loginfo("Not a wince")
def classify_svm(self, x, y, time):
"""Build the descriptor vector for the incoming mouse event, and classify with the svm."""
datum = []
mag = (x**2+y**2)**(0.5)
angle = np.arctan2(y,x)
datum.append(mag)
datum.append(angle)
self.window.append([x,y,time])
while (self.window[-1][-1] - self.window[0][-1]) > rospy.Duration(0.25):
self.window.pop(0)
win = np.array(self.window)
datum.append(len(self.window))
win_x = np.sum(win[:,0])
win_y = np.sum(win[:,1])
win_mag = (win_x**2+win_y**2)**(0.5)
datum.append(win_mag/len(self.window))
datum.append(np.arctan2(win_y, win_x))
datum_scaled = self.scaler.transform(datum)
prediction = self.classifier.predict(datum_scaled)
if prediction[0] == 1.:
return True
if __name__=='__main__':
rospy.init_node('wouse_node')
wouse = Wouse(sys.argv[1])
rospy.spin()
| [
[
1,
0,
0.0353,
0.0118,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0471,
0.0118,
0,
0.66,
0.0714,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0588,
0.0118,
0,
... | [
"import sys",
"import numpy as np",
"from threading import Condition",
"import pickle",
"import csv",
"import pygame",
"from sklearn import preprocessing as pps, svm",
"import roslib; roslib.load_manifest('wouse')",
"import roslib; roslib.load_manifest('wouse')",
"import rospy",
"from std_msgs.m... |
#!/usr/bin/env python
import camera_setup_lib as csl
import camera_config as cc
##
# Createss a dictionary mapping camera UUID to OpenCV IDs
def camera_ids():
uidIndexDict = {}
numCameras = csl.init_bus1394()
for i in range(numCameras):
uidIndexDict[csl.getCameraUID(i)] = i
csl.endCameraSetup()
return uidIndexDict
##
# Returns a dictionary mapping from camera names to OpenCV ID for all hooked up
# cameras
def camera_names():
ids = camera_ids()
all_ids = {}
for k in cc.camera_parameters.keys():
all_ids[cc.camera_parameters[k]['uid']] = k
name_camera_map = {}
for k in ids.keys():
name_camera_map[all_ids[k]] = ids[k]
return name_camera_map
##
# Returns the OpenCV ID of a named camera
# @param camera_name
def lookup_by_name(camera_name):
ids = camera_ids()
print 'ids:', ids
return ids[cc.camera_parameters[camera_name]['uid']]
if __name__ == '__main__':
print 'Camera UUIDs', camera_ids()
print 'Available cameras:', camera_names()
| [
[
1,
0,
0.05,
0.025,
0,
0.66,
0,
303,
0,
1,
0,
0,
303,
0,
0
],
[
1,
0,
0.075,
0.025,
0,
0.66,
0.2,
94,
0,
1,
0,
0,
94,
0,
0
],
[
2,
0,
0.25,
0.175,
0,
0.66,
0.4... | [
"import camera_setup_lib as csl",
"import camera_config as cc",
"def camera_ids():\n\tuidIndexDict = {}\n\tnumCameras = csl.init_bus1394()\n\tfor i in range(numCameras):\n\t\tuidIndexDict[csl.getCameraUID(i)] = i\n\tcsl.endCameraSetup()\n\treturn uidIndexDict",
"\tuidIndexDict = {}",
"\tnumCameras = csl.ini... |
import roslib
roslib.load_manifest('hrl_camera')
import cv
camera_parameters = {
#A sample configuration
'default' :
{
'calibration_image_width' : 320.0,
'calibration_image_height' : 240.0,
'focal_length_x_in_pixels' : 161.80593,
'focal_length_y_in_pixels' : 163.49099,
'optical_center_x_in_pixels' : 159.78997,
'optical_center_y_in_pixels' : 136.73113,
'lens_distortion_radial_1' : -0.26334,
'lens_distortion_radial_2' : 0.05096,
'lens_distortion_tangential_1' : 0.00105,
'lens_distortion_tangential_2' : -0.00016,
'opencv_bayer_pattern' : None,
#whether this camera was mounted upside down
'upside_down': True,
'color': False,
#the class to load in a normal python import statement
'class': 'firefly',
#UUID obtained by calling 'python camera_uuid.py'
'uid': None
},
'dummy_deepthought':
{
'calibration_image_width' : 640.0,
'calibration_image_height' : 480.0,
'focal_length_x_in_pixels' : 362.381,
'focal_length_y_in_pixels' : 362.260,
'optical_center_x_in_pixels' : 275.630,
'optical_center_y_in_pixels' : 267.914,
'lens_distortion_radial_1' : -0.270544,
'lens_distortion_radial_2' : 0.0530850,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : cv.CV_BayerBG2BGR,
'upside_down': False,
'color': True,
'class': 'firefly',
'uid': 7281161
},
#########################################################################
# Start of cameras on Cody
#########################################################################
'mekabotUTM':
{
'calibration_image_width' : 640.0,
'calibration_image_height' : 480.0,
'focal_length_x_in_pixels' : 362.381,
'focal_length_y_in_pixels' : 362.260,
'optical_center_x_in_pixels' : 275.630,
'optical_center_y_in_pixels' : 267.914,
'lens_distortion_radial_1' : -0.270544,
'lens_distortion_radial_2' : 0.0530850,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : cv.CV_BayerGB2BGR,
#'opencv_bayer_pattern' : cv.CV_BayerBG2BGR,
'type': 'Point Grey Firefly',
'class': 'firefly',
'color': True,
'uid': 8520228
},
#########################################################################
# Start of camera on ELE
#########################################################################
'catadioptric' :
{
'calibration_image_width' : 640.0,
'calibration_image_height' : 480.0,
'focal_length_x_in_pixels' : 360,
'focal_length_y_in_pixels' : 360,
'optical_center_x_in_pixels' : 320,
'optical_center_y_in_pixels' : 240,
'lens_distortion_radial_1' : 0,
'lens_distortion_radial_2' : 0,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : None,
'type': 'Point Grey Firefly',
'class': 'firefly',
'color': False,
'uid': 7281154
},
'stereo_left':
{
'calibration_image_width' : 640.0,
'calibration_image_height' : 480.0,
'focal_length_x_in_pixels' : 1017.33,
'focal_length_y_in_pixels' : 1018.47,
'optical_center_x_in_pixels' : 306.264,
'optical_center_y_in_pixels' : 226.465,
'lens_distortion_radial_1' : -0.480961,
'lens_distortion_radial_2' : 0.341886,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : cv.CV_BayerGR2BGR,
'type': 'Point Grey Firefly',
'class': 'firefly',
'color': True,
'frame_rate': 7.5,
'ros_topic': '/stereohead/left/color_image',
'uid': 7140923
},
'stereo_right':
{
'calibration_image_width' : 640.0,
'calibration_image_height' : 480.0,
'focal_length_x_in_pixels' : 1013.70,
'focal_length_y_in_pixels' : 1015.33,
'optical_center_x_in_pixels' : 303.834,
'optical_center_y_in_pixels' : 219.792,
'lens_distortion_radial_1' : -0.530238,
'lens_distortion_radial_2' : 0.766580,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : cv.CV_BayerGR2BGR,
'type': 'Point Grey Firefly',
'class': 'firefly',
'color': True,
'frame_rate': 7.5,
'ros_topic': '/stereohead/right/color_image',
'uid': 7041054
},
'snozzberry_hand' :
{
'calibration_image_width' : 640.0,
'calibration_image_height' : 480.0,
'focal_length_x_in_pixels' : 351.38 ,
'focal_length_y_in_pixels' : 351.01,
'optical_center_x_in_pixels' : 301.92,
'optical_center_y_in_pixels' : 203.98,
'lens_distortion_radial_1' : -0.258504,
'lens_distortion_radial_2' : 0.0482161,
'lens_distortion_tangential_1' : 0.0,
'lens_distortion_tangential_2' : 0.0,
'opencv_bayer_pattern' : cv.CV_BayerGR2BGR,
'color': True,
'type': 'Point Grey Firefly',
'class': 'firefly',
'uid': 7140879,
'fovy': 62.
},
'ele_carriage' :
{
'calibration_image_width' : 1024.0,
'calibration_image_height' : 768.0,
'focal_length_x_in_pixels' : 624.043,
'focal_length_y_in_pixels' : 625.488,
'optical_center_x_in_pixels' : 531.805 ,
'optical_center_y_in_pixels' : 404.651,
'lens_distortion_radial_1' : -0.314033,
'lens_distortion_radial_2' : 0.0973255,
'lens_distortion_tangential_1' : 0.,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : cv.CV_BayerGR2RGB,
'cv_cap_prop_mode' : 101,
'upside_down': False,
'color': True,
'type': 'Point Grey DragonFly2',
'class': 'dragonfly2',
'uid': 9030523
},
'ele_utm_old' :
{
'calibration_image_width' : 640.0,
'calibration_image_height' : 480.0,
'focal_length_x_in_pixels' : 358.804,
'focal_length_y_in_pixels' : 359.702,
'optical_center_x_in_pixels' : 309.151,
'optical_center_y_in_pixels' : 226.581,
'lens_distortion_radial_1' : -0.273398,
'lens_distortion_radial_2' : 0.0546037,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'cv_cap_prop_mode' : 101,
'opencv_bayer_pattern' : cv.CV_BayerBG2BGR,
'color': True,
'type': 'Point Grey Firefly',
'class': 'firefly',
'uid': 8520136
},
#########################################################################
# End of cameras on ELE.
#########################################################################
'remote_head' :
{
'calibration_image_width': 1024.0,
'calibration_image_height': 768.0,
'focal_length_x_in_pixels': 863.136719,
'focal_length_y_in_pixels': 863.513672,
'optical_center_x_in_pixels': 546.340088,
'optical_center_y_in_pixels': 403.253998,
'lens_distortion_radial_1': -0.417464,
'lens_distortion_radial_2': 0.217398 ,
'lens_distortion_tangential_1': 0.002538 ,
'lens_distortion_tangential_2': 0.000321 ,
'cv_cap_prop_mode': 101,
'opencv_bayer_pattern': cv.CV_BayerGR2RGB,
'color': True,
'type': 'Point Grey DragonFly2',
'class': 'dragonfly2',
'uid': 9030543
},
'lab_overhead' :
{
'calibration_image_width' : 1024.0,
'calibration_image_height' : 768.0,
'focal_length_x_in_pixels' : 462.794,
'focal_length_y_in_pixels' : 462.041,
'optical_center_x_in_pixels' : 488.590,
'optical_center_y_in_pixels' : 428.419,
'lens_distortion_radial_1' : -0.240018,
'lens_distortion_radial_2' : 0.0372740,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : None,
#whether this camera was mounted upside down
'upside_down': True,
'color': True,
'type': 'Point Grey DragonFly2',
#the class to load in a normal python import statement
'class': 'dragonfly2',
#UUID obtained by calling 'python camera_uuid.py'
'uid': None
}
}
| [
[
1,
0,
0.004,
0.004,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0081,
0.004,
0,
0.66,
0.3333,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0121,
0.004,
0,
0.66,
... | [
"import roslib",
"roslib.load_manifest('hrl_camera')",
"import cv",
"camera_parameters = {\n #A sample configuration\n 'default' :\n {\n 'calibration_image_width' : 320.0,\n 'calibration_image_height' : 240.0, \n 'focal_length_x_in_pixels' : 161.80593,\n ... |
import camera
class firefly(camera.camera):
def __init__(self, camera_configuration, opencv_id):
camera.camera.__init__(self, camera_configuration, opencv_id)
| [
[
1,
0,
0.2,
0.2,
0,
0.66,
0,
848,
0,
1,
0,
0,
848,
0,
0
],
[
3,
0,
0.8,
0.6,
0,
0.66,
1,
753,
0,
1,
0,
0,
2,
0,
1
],
[
2,
1,
0.9,
0.4,
1,
0.47,
0,
555,
... | [
"import camera",
"class firefly(camera.camera):\n def __init__(self, camera_configuration, opencv_id):\n camera.camera.__init__(self, camera_configuration, opencv_id)",
" def __init__(self, camera_configuration, opencv_id):\n camera.camera.__init__(self, camera_configuration, opencv_id)",
... |
import roslib # Needed to load opencv
roslib.load_manifest('hrl_camera') #
import cv
import camera_setup_lib as csl
import camera
class dragonfly2(camera.camera):
def __init__(self, camera_configuration, opencv_id):
self.config = camera_configuration
self.device = opencv_id
self._set_registers()
camera.camera.__init__(self, camera_configuration, opencv_id)
#create capture and related attributes
#self.capture = cv.CaptureFromCAM(self.device)
#if not self.capture:
# raise RuntimeError("Cannot open camera!\n")
cur_codec = cv.GetCaptureProperty(self.capture, cv.CV_CAP_PROP_MODE)
print "dragonfly2: current codec interpretation is : ", cur_codec
integ = cv.SetCaptureProperty(self.capture,cv.CV_CAP_PROP_MODE,
self.config['cv_cap_prop_mode'])
#self.set_frame_rate(3.75) # set it really low to start out.
# increase later.
fps = cv.GetCaptureProperty(self.capture, cv.CV_CAP_PROP_FPS)
print "dragonfly2: fps : ", fps
next_codec = cv.GetCaptureProperty(self.capture, cv.CV_CAP_PROP_MODE)
print "dragonfly2: current codec interpretation is : ", next_codec
def _set_registers(self):
csl.init_bus1394()
# Mode
csl.setRegister(self.device,0x604,0xA0000000)
# Format
csl.setRegister(self.device,0x608,0x20000000)
#sets raw bayer image format for mono image format modes
csl.setRegister(self.device, 0x1048, 0x80000081)
mode = csl.getRegister(self.device, 0x604)
format= csl.getRegister(self.device, 0x608)
rate = csl.getRegister(self.device, 0x600)
#software_trigger
print "dragonfly2: mode", hex(mode)
print "dragonfly2: format", hex(format)
print "dragonfly2: rate", hex(rate)
| [
[
1,
0,
0.0208,
0.0208,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0417,
0.0208,
0,
0.66,
0.2,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0625,
0.0208,
0,
0.66,... | [
"import roslib # Needed to load opencv",
"roslib.load_manifest('hrl_camera') #",
"import cv",
"import camera_setup_lib as csl",
"import camera",
"class dragonfly2(camera.camera):\n def __init__(self, camera_configuration, opencv_id):\n self.config = camera_configuration\n ... |
import camera_config as cc
import camera_uuid as cu
def find_camera(name):
parameters = cc.camera_parameters[name]
opencv_id = cu.lookup_by_name(name)
classname = parameters['class']
import_statement = 'import ' + classname
instantiation = classname + '.' + classname + '(parameters, opencv_id)'
exec import_statement
return eval(instantiation)
if __name__ == '__main__':
import sys
import roslib
roslib.load_manifest('hrl_camera')
import cv
name = sys.argv[1]
cv.NamedWindow(name, cv.CV_WINDOW_AUTOSIZE)
c = find_camera(name)
while True:
f = c.get_frame()
cv.ShowImage(name, f)
cv.WaitKey(33)
| [
[
1,
0,
0.0385,
0.0385,
0,
0.66,
0,
94,
0,
1,
0,
0,
94,
0,
0
],
[
1,
0,
0.0769,
0.0385,
0,
0.66,
0.3333,
707,
0,
1,
0,
0,
707,
0,
0
],
[
2,
0,
0.2885,
0.3077,
0,
0.... | [
"import camera_config as cc",
"import camera_uuid as cu",
"def find_camera(name):\n parameters = cc.camera_parameters[name]\n opencv_id = cu.lookup_by_name(name)\n classname = parameters['class']\n import_statement = 'import ' + classname\n instantiation = classname + '.' + classname + '(paramete... |
import _camera_setup_lib
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
import types
try:
_object = types.ObjectType
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
del types
init_bus1394 = _camera_setup_lib.init_bus1394
getCameraUID = _camera_setup_lib.getCameraUID
endCameraSetup = _camera_setup_lib.endCameraSetup
setFrameRate = _camera_setup_lib.setFrameRate
getRegister = _camera_setup_lib.getRegister
setRegister = _camera_setup_lib.setRegister
#All hex setting to the cameras confined to this code
brightness_register_dict = { 'brightness': 0x800, 'exposure': 0x804,
'shutter_time': 0x81c, 'gain': 0x820,
'white_balance': 0x80c, 'gamma': 0x818}
register_dict = { 'frame_rate': 0x600, 'trigger_mode': 0x830,
'trigger_delay': 0x834, 'pio_direction': 0x11f8,
'strobe_0_cnt': 0x1500 }
fps_dict = { 60:0xa0000000,
30:0x80000000,
# 20:0x70000000, 20 does not seem to be an
# option
15:0x60000000,
7.5:0x40000000,
3.75:0x20000000,
1.875:0x10000000 }
# Dictionary from registers to fps. It's named sps because sps is fps backawards ;)
spf_dict = { '0xa':60,
'0x8':30,
'0x6':15,
'0x4':7.5 }
register_dict.update( brightness_register_dict )
def get_registers( index ):
"""
index would be the .device property of a Camera object
[register function, register value]"""
return [(key,getRegister( index, register_dict[key])) for key in register_dict.keys()]
def get_frame_rate( index ):
"""
Get the frame rate
"""
val = (0xe0000000 & getRegister( index, register_dict['frame_rate'])) >> 7*4
return spf_dict[hex(int(val))]
def set_frame_rate( index, rate ):
frame_rate = fps_dict[rate]
setRegister( index, register_dict['frame_rate'], frame_rate)
def set_stereo_slaving( master, slave ):
'''
master and slave would be the .device property of Camera objects
This function assumes that the right camera is slave off of the left'''
setRegister( master, register_dict['pio_direction'], 0x80000000)
setRegister( slave, register_dict['strobe_0_cnt'], 0x82000000)
setRegister( slave, register_dict['trigger_mode'], 0x83110000 )
setRegister( master, register_dict['trigger_mode'], 0x80100000 )
for key in brightness_register_dict.keys():
rdval = getRegister( master, register_dict[key])
setRegister( slave, register_dict[key] ,rdval)
def get_brightness_settings( index ):
"""
index would be the .device property of a Camera object
[register function, register value]"""
return [(key, 0xfff & getRegister( index, brightness_register_dict[key])) for key in brightness_register_dict.keys()]
def set_auto( index ):
"""Set a particular camera to automatically ajdust brightness and exposure"""
setRegister( index, register_dict['brightness'], 0x83000000)
setRegister( index, register_dict['exposure'], 0x83000000)
setRegister( index, register_dict['shutter_time'], 0x8300000e)
setRegister( index, register_dict['gain'], 0x8300000f)
print 'set auto being called'
def get_gamma(index):
return getRegister(index, brightness_register_dict['gamma'])
def set_gamma(index,gamma):
''' gamma: 0 or 1
'''
setRegister(index, brightness_register_dict['gamma'], 0x82000000+gamma)
def get_whitebalance(index):
return getRegister(index, brightness_register_dict['white_balance'])
def set_whitebalance(index,r_val,b_val):
setRegister(index, brightness_register_dict['white_balance'], 0x82000000+r_val+b_val*4096)
def set_brightness( index, brightness=None, exposure=None, shutter_time=None, gain=None ):
"""If brightness is not specified auto mode is used for all settings. If shutter_time
and gain are specified, exposure does nothing. All values should be set between 0-4095"""
def limit_fff( parameter ):
if parameter > 0xfff:
parameter = 0xfff
elif parameter < 0:
parameter = 0
return parameter
if brightness == None and exposure != None:
setRegister( index, register_dict['brightness'], 0x83000000 )
setRegister( index, register_dict['exposure'], 0x82000000+limit_fff(exposure))
setRegister( index, register_dict['shutter_time'], 0x83000000)
setRegister( index, register_dict['gain'], 0x83000000)
elif brightness == None:
set_auto( index )
else:
if shutter_time != None or gain != None:
setRegister( index, register_dict['brightness'], 0x82000000+limit_fff(brightness))
setRegister( index, register_dict['exposure'], 0x80000000)
setRegister( index, register_dict['shutter_time'], 0x82000000+limit_fff(shutter_time))
setRegister( index, register_dict['gain'], 0x82000000+limit_fff(gain))
else:
if exposure == None:
setRegister( index, register_dict['brightness'], 0x82000000+limit_fff(brightness))
setRegister( index, register_dict['exposure'], 0x83000000)
setRegister( index, register_dict['shutter_time'], 0x83000000)
setRegister( index, register_dict['gain'], 0x83000000)
else:
setRegister( index, register_dict['brightness'], 0x82000000+limit_fff(brightness))
setRegister( index, register_dict['exposure'], 0x82000000+limit_fff(exposure))
setRegister( index, register_dict['shutter_time'], 0x83000000)
setRegister( index, register_dict['gain'], 0x83000000)
| [
[
1,
0,
0.0063,
0.0063,
0,
0.66,
0,
740,
0,
1,
0,
0,
740,
0,
0
],
[
2,
0,
0.0281,
0.025,
0,
0.66,
0.04,
276,
0,
1,
1,
0,
0,
0,
1
],
[
7,
1,
0.0281,
0.0125,
1,
0.99,... | [
"import _camera_setup_lib",
"def _swig_repr(self):\n try: strthis = \"proxy of \" + self.this.__repr__()\n except: strthis = \"\"\n return \"<%s.%s; %s >\" % (self.__class__.__module__, self.__class__.__name__, strthis,)",
" try: strthis = \"proxy of \" + self.this.__repr__()\n except: strthis = ... |
import roslib # Needed to load opencv
roslib.load_manifest('hrl_camera') #
import cv
import hrl_opencv.adaptors as ad
import camera_setup_lib as csl
import numpy as np
class NoFrameException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class camera:
##
# @param camera_configuration a dictionary of parameters needed for this camera
def __init__(self, camera_configuration, opencv_id):
self.config = camera_configuration
self.device = opencv_id
#self._set_registers()
#create capture and related attributes
self.capture = cv.CaptureFromCAM(self.device)
if not self.capture:
raise RuntimeError("Cannot open camera!\n")
self._make_undistort_matrices()
def _make_undistort_matrices(self):
p = self.config
some_arr = np.array([[p['focal_length_x_in_pixels'], 0, p['optical_center_x_in_pixels']],
[0, p['focal_length_y_in_pixels'], p['optical_center_y_in_pixels']],
[0, 0, 1.0]])
self.intrinsic_cvmat = ad.array2cvmat(some_arr)
self.distortion_cvmat = ad.array2cvmat(np.array([[p['lens_distortion_radial_1'],
p['lens_distortion_radial_2'],
p['lens_distortion_tangential_1'],
p['lens_distortion_tangential_2']]]))
self.size = (int(p['calibration_image_width']), int(p['calibration_image_height']))
#Sanity check
size_image = cv.QueryFrame(self.capture)
camera_image_size = cv.GetSize(size_image)
if not ((camera_image_size[0] == self.size[0]) and (camera_image_size[1] == self.size[1])):
raise RuntimeError('Size of image returned by camera and size declared in config. file do not match.'
+ ' Config:' + str(self.size) + ' Camera: ' + str(camera_image_size))
#Set up buffers for undistortion
self.raw_image = cv.CreateImage(self.size, cv.IPL_DEPTH_8U, 1)
self.gray_image = cv.CreateImage(self.size, cv.IPL_DEPTH_8U, 1)
self.undistort_mapx = cv.CreateImage(self.size, cv.IPL_DEPTH_32F, 1)
self.undistort_mapy = cv.CreateImage(self.size, cv.IPL_DEPTH_32F, 1)
self.unbayer_image = cv.CreateImage(self.size, cv.IPL_DEPTH_8U, 3)
self.color = p['color']
if self.color == True:
self.cvbayer_pattern = p['opencv_bayer_pattern']
if self.color == True:
self.undistort_image = cv.CreateImage(self.size, cv.IPL_DEPTH_8U, 3)
else:
self.undistort_image = cv.CreateImage(self.size, cv.IPL_DEPTH_8U, 1)
cv.InitUndistortMap(self.intrinsic_cvmat, self.distortion_cvmat,
self.undistort_mapx, self.undistort_mapy)
self.corrected_orientation = None
def get_frame(self):
self.raw_image = self.get_raw_frame()
im = self.undistort_frame()
if self.config.has_key('upside_down'):
if self.config['upside_down']:
if self.corrected_orientation == None:
self.corrected_orientation = cv.CloneImage(im)
cv.Flip(im, self.corrected_orientation, -1)
im = self.corrected_orientation
return im
## returns color image. does NOT undistort the image.
def get_frame_debayered(self):
self.raw_image = self.get_raw_frame()
return self.convert_color()
def get_raw_frame(self):
# Assumes that we are going to debayer the image later, so
# returns a single channel image.
im = cv.QueryFrame(self.capture)
if im == None:
raise NoFrameException('')
cv.Split(im, self.gray_image, None, None, None)
return self.gray_image
def undistort_frame(self):
img = self.convert_color()
cv.Remap(img, self.undistort_image, self.undistort_mapx, self.undistort_mapy,
cv.CV_INTER_LINEAR, cv.ScalarAll(0))
return self.undistort_image
def convert_color(self):
if self.color == True:
cv.CvtColor(self.raw_image, self.unbayer_image, self.cvbayer_pattern)
return self.unbayer_image
else:
return self.raw_image
##
# Set frame rate: 7.5, 15, 30, or 60Hz
# by default we set this to a low value
# to not have to deal with firewire bandwidth
# issues
def set_frame_rate(self, rate=7.5):
cv.SetCaptureProperty(self.capture, cv.CV_CAP_PROP_FPS, rate)
#csl.set_frame_rate(self.device, rate)
def get_frame_rate(self):
fps = cv.GetCaptureProperty(self.capture, cv.CV_CAP_PROP_FPS)
return fps
def set_brightness(self, brightness=150, shutter_time=97,
gain=450, exposure=None):
csl.set_brightness(self.device, brightness, exposure, shutter_time, gain)
## auto - probably only used for displaying images.
def set_auto(self):
csl.set_auto(self.device)
#if __name__ == '__main__':
# cva = ad.array2cv(np.ones((2,3)))
| [
[
1,
0,
0.0075,
0.0075,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0149,
0.0075,
0,
0.66,
0.1429,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0299,
0.0075,
0,
0.... | [
"import roslib # Needed to load opencv",
"roslib.load_manifest('hrl_camera') #",
"import cv",
"import hrl_opencv.adaptors as ad",
"import camera_setup_lib as csl",
"import numpy as np",
"class NoFrameException(Exception):\n \n def __init__(self, value):\n self.value = v... |
#!/usr/bin/env python
import roslib
roslib.load_manifest('hrl_camera')
import ros_camera as rc
import sys
import cv
import rospy
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Views images published through ROS.'
print 'Usage: ./image_listener.py ROS_TOPIC_NAME'
else:
ros_topic_name = sys.argv[1]
camera = rc.ROSImageClient(ros_topic_name)
cv.NamedWindow(ros_topic_name, cv.CV_WINDOW_AUTOSIZE)
while not rospy.is_shutdown():
f = camera.get_frame()
cv.ShowImage(ros_topic_name, f)
cv.WaitKey(10)
| [
[
1,
0,
0.0909,
0.0455,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.1364,
0.0455,
0,
0.66,
0.1667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1818,
0.0455,
0,
0.... | [
"import roslib",
"roslib.load_manifest('hrl_camera')",
"import ros_camera as rc",
"import sys",
"import cv",
"import rospy",
"if __name__ == '__main__':\n if len(sys.argv) < 2:\n print('Views images published through ROS.')\n print('Usage: ./image_listener.py ROS_TOPIC_NAME')\n else:... |
## Copyright (c) 2004-2007, Andrew D. Straw. All rights reserved.
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above
## copyright notice, this list of conditions and the following
## disclaimer in the documentation and/or other materials provided
## with the distribution.
## * Neither the name of the Andrew D. Straw nor the names of its
## contributors may be used to endorse or promote products derived
## from this software without specific prior written permission.
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
##Original code was modified, PlaneLeastSquaresModel() added:
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import numpy as np
import scipy # use numpy if scipy unavailable
import scipy.linalg # use numpy if scipy unavailable
### Optional imports below
# import processor [Both only for testing obsolete dataset, testPlanePointcloud() ]
# import configuration
# import pylab; import sys
TEST_FOLDER ='/home/jokerman/svn/robot1_data/usr/martin/laser_camera_segmentation/labeling'
def ransac(data,model,n,k,t,d,debug=False,return_all=False):
print 'INFO: running RANSAC for k=',k,'iterations'
"""fit model parameters to data using the RANSAC algorithm
This implementation written from pseudocode found at
http://en.wikipedia.org/w/index.php?title=RANSAC&oldid=116358182
{{{
Given:
data - a set of observed data points
model - a model that can be fitted to data points
n - the minimum number of data values required to fit the model
k - the maximum number of iterations allowed in the algorithm
t - a threshold value for determining when a data point fits a model
d - the number of close data values required to assert that a model fits well to data
Return:
bestfit - model parameters which best fit the data (or nil if no good model is found)
iterations = 0
bestfit = nil
besterr = something really large
while iterations < k {
maybeinliers = n randomly selected values from data
maybemodel = model parameters fitted to maybeinliers
alsoinliers = empty set
for every point in data not in maybeinliers {
if point fits maybemodel with an error smaller than t
add point to alsoinliers
}
if the number of elements in alsoinliers is > d {
% this implies that we may have found a good model
% now test how good it is
bettermodel = model parameters fitted to all points in maybeinliers and alsoinliers
thiserr = a measure of how well model fits these points
if thiserr < besterr {
bestfit = bettermodel
besterr = thiserr
}
}
increment iterations
}
return bestfit
}}}
"""
# iterations = 0
# bestfit = None
# besterr = np.inf
# best_inlier_idxs = None
# while iterations < k:
# maybe_idxs, test_idxs = random_partition(n,data.shape[0])
# print n
# maybeinliers = data[maybe_idxs,:]
# #print 'z',maybeinliers
# test_points = data[test_idxs]
# maybemodel = model.fit(maybeinliers)
# test_err = model.get_error( test_points, maybemodel)
# also_idxs = test_idxs[test_err < t] # select indices of rows with accepted points
# alsoinliers = data[also_idxs,:]
# if debug:
# print 'test_err.min()',test_err.min()
# print 'test_err.max()',test_err.max()
# print 'np.mean(test_err)',np.mean(test_err)
# print 'iteration %d:len(alsoinliers) = %d'%(
# iterations,len(alsoinliers))
# if len(alsoinliers) > d:
# print np.asmatrix(maybeinliers), np.asmatrix(alsoinliers)
# betterdata = np.concatenate( (maybeinliers, np.asmatrix(alsoinliers)) )
# bettermodel = model.fit(np.asarray(betterdata))
# better_errs = model.get_error( betterdata, bettermodel)
# thiserr = np.mean( better_errs )
# if thiserr < besterr:
# bestfit = bettermodel
# besterr = thiserr
# print maybe_idxs, also_idxs
# best_inlier_idxs = np.concatenate( (maybe_idxs, [also_idxs]) )
# iterations+=1
# if bestfit is None:
# raise ValueError("did not meet fit acceptance criteria")
# if return_all:
# return bestfit, {'inliers':best_inlier_idxs}
# else:
# return bestfit
iterations = 0
bestfit = None
besterr = np.inf
best_inlier_idxs = None
while iterations < k:
#print data
maybe_idxs, test_idxs = random_partition(n,data.shape[0])
maybeinliers = data[maybe_idxs,:]
test_points = data[test_idxs]
maybemodel = model.fit(maybeinliers)
test_err = model.get_error( test_points, maybemodel)
also_idxs = test_idxs[test_err < t] # select indices of rows with accepted points
alsoinliers = data[also_idxs,:]
if debug:
print 'test_err.min()',test_err.min()
print 'test_err.max()',test_err.max()
print 'np.mean(test_err)',np.mean(test_err)
print 'iteration %d:len(alsoinliers) = %d'%(
iterations,len(alsoinliers))
if len(alsoinliers) > d:
betterdata = np.concatenate( (maybeinliers, alsoinliers) )
bettermodel = model.fit(betterdata)
better_errs = model.get_error( betterdata, bettermodel)
thiserr = np.mean( better_errs )
if thiserr < besterr:
bestfit = bettermodel
besterr = thiserr
best_inlier_idxs = np.concatenate( (maybe_idxs, also_idxs) )
iterations+=1
if bestfit is None:
print "\n\n[ransac.py - line 152]"
print "Ransac plane fitting did not meet fit accaptance criteria at current settings."
print "Consider editing Ransac Parameters to be more generous or trying scan again."
print "This error often happens when no table is present in front of the robot.\n\n"
import sys;
sys.exit()
#Lets NOT raise an error. raise ValueError("did not meet fit acceptance criteria")
if return_all:
return bestfit, {'inliers':best_inlier_idxs}
else:
return bestfit
def random_partition(n,n_data):
"""return n random rows of data (and also the other len(data)-n rows)"""
all_idxs = np.arange( n_data )
np.random.shuffle(all_idxs)
idxs1 = all_idxs[:n]
idxs2 = all_idxs[n:]
return idxs1, idxs2
class LinearLeastSquaresModel:
"""linear system solved using linear least squares
This class serves as an example that fulfills the model interface
needed by the ransac() function.
"""
def __init__(self,input_columns,output_columns,debug=False):
self.input_columns = input_columns
self.output_columns = output_columns
self.debug = debug
def fit(self, data):
A = np.vstack([data[:,i] for i in self.input_columns]).T
B = np.vstack([data[:,i] for i in self.output_columns]).T
x,resids,rank,s = scipy.linalg.lstsq(A,B)
return x
def get_error( self, data, model):
A = np.vstack([data[:,i] for i in self.input_columns]).T
B = np.vstack([data[:,i] for i in self.output_columns]).T
B_fit = scipy.dot(A,model)
err_per_point = np.sum((B-B_fit)**2,axis=1) # sum squared error per row
print err_per_point
return err_per_point
class PlaneLeastSquaresModel:
def __init__(self, debug=False):
self.debug = debug
def fit(self, data):
#print 'fit',data
model = [data[0],np.cross(data[1] - data[0], data[2] - data[1])] #point, normal
model[1] = model[1] / np.linalg.norm(model[1]) #normalize
return model
def get_error( self, data, model):
#reject model if it's not horizontal
max_angle = 30.0 * np.pi / 180.0
angle = np.arccos(scipy.dot(np.array([0,0,1]),model[1].T)) #normal is normalized
#print 'angle', angle / np.pi * 180.0
if abs(angle) > max_angle:
return np.ones(np.shape(data)[0]) * 999999999999999999999999999999
#http://de.wikipedia.org/wiki/Hessesche_Normalform
#print model[0], model[1]
d = scipy.dot(model[0],model[1].T)
#print 'd',d
s = scipy.dot(data, model[1].T) - d
#print 'dmds',data, model, d, 's',s
#err_per_point = np.sum(np.asarray(s)**2, axis=1) # sum squared error per row
#return err_per_point
return abs(s)
def test():
# generate perfect input data
n_samples = 500
n_inputs = 1
n_outputs = 1
A_exact = 20*np.random.random((n_samples,n_inputs) )
perfect_fit = 60*np.random.normal(size=(n_inputs,n_outputs) ) # the model
B_exact = scipy.dot(A_exact,perfect_fit)
assert B_exact.shape == (n_samples,n_outputs)
# add a little gaussian noise (linear least squares alone should handle this well)
A_noisy = A_exact + np.random.normal(size=A_exact.shape )
B_noisy = B_exact + np.random.normal(size=B_exact.shape )
if 1:
# add some outliers
n_outliers = 100
all_idxs = np.arange( A_noisy.shape[0] )
np.random.shuffle(all_idxs)
outlier_idxs = all_idxs[:n_outliers]
non_outlier_idxs = all_idxs[n_outliers:]
A_noisy[outlier_idxs] = 20*np.random.random((n_outliers,n_inputs) )
B_noisy[outlier_idxs] = 50*np.random.normal(size=(n_outliers,n_outputs) )
# setup model
all_data = np.hstack( (A_noisy,B_noisy) )
input_columns = range(n_inputs) # the first columns of the array
output_columns = [n_inputs+i for i in range(n_outputs)] # the last columns of the array
debug = False
model = LinearLeastSquaresModel(input_columns,output_columns,debug=debug)
linear_fit,resids,rank,s = scipy.linalg.lstsq(all_data[:,input_columns],
all_data[:,output_columns])
# run RANSAC algorithm
ransac_fit, ransac_data = ransac(all_data,model,
50, 1000, 7e3, 300, # misc. parameters
debug=debug,return_all=True)
if 1:
import pylab
sort_idxs = np.argsort(A_exact[:,0])
A_col0_sorted = A_exact[sort_idxs] # maintain as rank-2 array
if 1:
pylab.plot( A_noisy[:,0], B_noisy[:,0], 'k.', label='data' )
pylab.plot( A_noisy[ransac_data['inliers'],0], B_noisy[ransac_data['inliers'],0], 'bx', label='RANSAC data' )
else:
pylab.plot( A_noisy[non_outlier_idxs,0], B_noisy[non_outlier_idxs,0], 'k.', label='noisy data' )
pylab.plot( A_noisy[outlier_idxs,0], B_noisy[outlier_idxs,0], 'r.', label='outlier data' )
pylab.plot( A_col0_sorted[:,0],
np.dot(A_col0_sorted,ransac_fit)[:,0],
label='RANSAC fit' )
pylab.plot( A_col0_sorted[:,0],
np.dot(A_col0_sorted,perfect_fit)[:,0],
label='exact system' )
pylab.plot( A_col0_sorted[:,0],
np.dot(A_col0_sorted,linear_fit)[:,0],
label='linear fit' )
pylab.legend()
pylab.show()
def testPlane():
debug = True
model = PlaneLeastSquaresModel(debug)
data = np.array([[0,0,0],[0,1,0],[0.1,12,0.1],[0,0,12],[1,0,0],[1,2,13]])
# run RANSAC algorithm
ransac_fit, ransac_data = ransac(data,model,
3, 1000, 1, 2, # misc. parameters
debug=debug,return_all=True)
print ransac_fit
print ransac_data
def testPlanePointcloud():
import processor
import configuration
cfg = configuration.configuration(TEST_FOLDER)
#sc = scanner.scanner(cfg)
pc = processor.processor(cfg)
#pc.load_data('2009Oct30_162400')
pc.load_data('2009Nov04_141226')
pc.process_raw_data()
debug = False
model = PlaneLeastSquaresModel(debug)
data = np.asarray(pc.pts3d_bound).T
# run RANSAC algorithm
ransac_fit, ransac_data = ransac(data,model,
3, 1000, 0.02, 300, # misc. parameters
debug=debug,return_all=True)
print ransac_fit
print ransac_data
print 'len inlier',len(ransac_data['inliers']),'shape pts',np.shape(pc.pts3d_bound)
pc.pts3d_bound = pc.pts3d_bound[:,ransac_data['inliers']]
pc.display_3d('height')
if __name__=='__main__':
#testPlane()
testPlanePointcloud()
| [
[
1,
0,
0.1071,
0.003,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.1101,
0.003,
0,
0.66,
0.0909,
265,
0,
1,
0,
0,
265,
0,
0
],
[
1,
0,
0.1131,
0.003,
0,
0.6... | [
"import numpy as np",
"import scipy # use numpy if scipy unavailable",
"import scipy.linalg # use numpy if scipy unavailable",
"TEST_FOLDER ='/home/jokerman/svn/robot1_data/usr/martin/laser_camera_segmentation/labeling'",
"def ransac(data,model,n,k,t,d,debug=False,return_all=False):\n print('INFO: runnin... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
class label_object(object):
def __init__(self):
'''
Constructor
'''
self.points = []
self.label = 'object'
def get_points(self):
return self.points
def add_point(self, x):
self.points.append(x)
def set_label(self, label):
self.label = label
def get_label(self):
return self.label
def get_type(self):
if self.label == 'edge' or self.label == 'edge_up' or self.label == 'edge_down':
return 'line'
else:
return 'polygon'
def set_type(self, type):
self.type = type
def delete_last_point(self):
self.points.pop()
def is_empty(self):
return len(self.points)==0 | [
[
3,
0,
0.7164,
0.5821,
0,
0.66,
0,
589,
0,
9,
0,
0,
186,
0,
3
],
[
2,
1,
0.5149,
0.0896,
1,
0.07,
0,
555,
0,
1,
0,
0,
0,
0,
0
],
[
8,
2,
0.5075,
0.0448,
2,
0.05,
... | [
"class label_object(object):\n\n\n def __init__(self):\n '''\n Constructor\n '''\n self.points = []",
" def __init__(self):\n '''\n Constructor\n '''\n self.points = []\n self.label = 'object'",
" '''\n Constructor\n '''",... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import label_object, scan_dataset
import util as ut #Uses: load_pickle, save_pickle, formatted_time
import shutil #file operations
class scans_database(object):
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
self.datasets = []
self.current_index = 0
def load(self, path, filename):
self.filename = filename
self.path = path
#try:
dict = ut.load_pickle(self.path+'/'+self.filename)
#except:
# print 'loading of '+self.path+'/'+filename+' failed. WARNING: it will be overwritten on save()!'
# return
self.datasets = dict['datasets']
def save(self):
dict = {'datasets': self.datasets,'version': 0.1}
#for now: make a backup first:
database_filename = self.path+'/'+self.filename
backup_filename = self.path+'/'+self.filename+'_backup_'+ut.formatted_time()
print 'Backing up old database to ' + backup_filename
shutil.copy(database_filename, backup_filename)
print "Saving: "+database_filename
ut.save_pickle(dict,database_filename)
def get_path(self):
return self.path
def get_dataset(self, index):
self.current_index = index
return self.datasets[index]
def get_dataset_by_id(self, id):
#TODO: faster lookup, probably using a dictionary instead of a list?
for dataset in self.datasets:
if dataset.id == id:
return dataset
return False
def set_internal_pointer_to_dataset(self, id):
self.current_index = 0
for dataset in self.datasets:
if dataset.id == id:
return True
self.current_index += 1
return False
def get_next_dataset(self):
if self.current_index < len(self.datasets) - 1:
self.current_index = self.current_index + 1
return self.datasets[self.current_index]
else:
return False
def get_prev_dataset(self):
if self.current_index > 0:
self.current_index = self.current_index - 1
return self.datasets[self.current_index]
else:
return False
def get_first_dataset(self):
if len(self.datasets) > 0:
self.current_index = 0
return self.datasets[self.current_index]
else:
return False
def get_last_dataset(self):
if len(self.datasets) > 0:
self.current_index = len(self.datasets) - 1
return self.datasets[self.current_index]
else:
return False
def get_count(self):
return len(self.datasets)
def add_dataset(self, dataset):
self.datasets.append(dataset)
def delete_current_dataset(self):
del self.datasets[self.current_index]
dataset = self.get_prev_dataset()
if False != dataset:
return dataset
else:
dataset = self.get_next_dataset()
return dataset #TODO: still fails if there is only one dataset!
def add_attribute_to_every_dataset(self, name):
for dataset in self.datasets:
dataset.dict[name]=''
| [
[
1,
0,
0.2083,
0.0069,
0,
0.66,
0,
589,
0,
2,
0,
0,
589,
0,
0
],
[
1,
0,
0.2153,
0.0069,
0,
0.66,
0.3333,
811,
0,
1,
0,
0,
811,
0,
0
],
[
1,
0,
0.2222,
0.0069,
0,
... | [
"import label_object, scan_dataset",
"import util as ut #Uses: load_pickle, save_pickle, formatted_time",
"import shutil #file operations",
"class scans_database(object):\n '''\n classdocs\n '''\n\n\n def __init__(self):\n '''",
" '''\n classdocs\n '''",
" def __init__(self... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
from classifier import classifier
import util as ut #Uses: load_pickle, getTime
import numpy as np
import processor ###
# Other Imports (classify_baseline_code):
# x import hrl_tilting_hokuyo.processing_3d as p3d
# import hrl_tilting_hokuyo.occupancy_grid_3d as og3d [for occupancy_grid_3d()]
# x import hrl_tilting_hokuyo.display_3d_mayavi as d3m
class baseline_classifier(classifier):
'''
classdocs
'''
#def __init__(selfparams):
# '''
# Constructor
# '''
def test(self, feature_data = None):
#test on current scan:
print ut.getTime(), 'test on:', self.processor.scan_dataset.id
if feature_data == None:
filename = self.processor.get_features_filename()
dict = ut.load_pickle(filename)
else:
dict = feature_data
baseline_labels = self.classify_baseline_code()
return baseline_labels, self.test_results(dict, baseline_labels)
def classify_baseline_code(self):
###import hrl_tilting_hokuyo.processing_3d as p3d
###import hrl_tilting_hokuyo.display_3d_mayavi as d3m
import hrl_tilting_hokuyo.occupancy_grid_3d as og3d
pt = np.matrix(self.processor.point_of_interest).T
#define VOI
width_half = self.processor.voi_width / 2.0
brf = pt+np.matrix([-width_half,-width_half,-width_half]).T
tlb = pt+np.matrix([width_half, width_half, width_half]).T
resolution = np.matrix([0.1,0.1,0.0025]).T
max_dist = 15
min_dist = -15
gr = og3d.occupancy_grid_3d(brf,tlb,resolution)
print 'filling grid...'
gr.fill_grid(self.processor.pts3d_bound)
print '...filled.'
gr.to_binary(1)
l = gr.find_plane_indices(assume_plane=True,hmin=0.3,hmax=2)
z_min = min(l)*gr.resolution[2,0]+gr.brf[2,0]
z_max = max(l)*gr.resolution[2,0]+gr.brf[2,0]
pts = np.asarray(self.processor.pts3d_bound)
conditions_surface = np.multiply(pts[2,:] > z_min, pts[2,:] < z_max)
print 'cf',conditions_surface
conditions_clutter = np.invert(conditions_surface)
conditions_surface = np.multiply(conditions_surface, np.array(self.processor.map_polys) > 0)
print 'cf',conditions_surface
idx_surface = np.where(conditions_surface)
conditions_clutter = np.multiply(conditions_clutter, np.array(self.processor.map_polys) > 0)
idx_clutter = np.where(conditions_clutter)
n, m = np.shape(self.processor.pts3d_bound)
print n,m
labels = np.zeros(m)
print np.shape(labels), labels
print np.shape(idx_surface), idx_surface
labels[idx_surface] = processor.LABEL_SURFACE
labels[idx_clutter] = processor.LABEL_CLUTTER
print labels
return labels
| [
[
1,
0,
0.2632,
0.0088,
0,
0.66,
0,
71,
0,
1,
0,
0,
71,
0,
0
],
[
1,
0,
0.2807,
0.0088,
0,
0.66,
0.25,
811,
0,
1,
0,
0,
811,
0,
0
],
[
1,
0,
0.2895,
0.0088,
0,
0.66... | [
"from classifier import classifier",
"import util as ut #Uses: load_pickle, getTime",
"import numpy as np",
"import processor ###",
"class baseline_classifier(classifier):\n '''\n classdocs\n '''\n\n\n #def __init__(selfparams):\n # '''",
" '''\n classdocs\n '''",
" def tes... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
class features(object):
'''
classdocs
'''
processor = None
def __init__(self,processor):
'''
Constructor
'''
self.processor = processor
def get_indexvector(self):
return None
#get the feature vector for a specific point
def get_featurevector(self):
return None
def prepare(self, features_k_nearest_neighbors):
return None
| [
[
3,
0,
0.7593,
0.463,
0,
0.66,
0,
479,
0,
4,
0,
0,
186,
0,
0
],
[
8,
1,
0.5741,
0.0556,
1,
0.6,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
1,
0.6481,
0.0185,
1,
0.6,
0... | [
"class features(object):\n '''\n classdocs\n '''\n\n\n processor = None",
" '''\n classdocs\n '''",
" processor = None",
" def __init__(self,processor):\n '''\n Constructor\n '''\n self.processor = processor",
" '''\n Constructor\n ... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import numpy as np, math
import roslib; roslib.load_manifest('hrl_lib')
import hrl_lib.transforms as tr
def residualXform( residuals ):
'''
residuals are np.array([ Rz2, Rx, Rz1, dx, dy, dz ])
returns rotResid, dispResid
'''
rotResid = tr.Rz( residuals[0] ) * tr.Rx( residuals[1] ) * tr.Rz( residuals[2] )
dispResid = np.matrix([ residuals[3], residuals[4], residuals[5] ]).T
return rotResid, dispResid
def camTlaser( res = np.zeros(7) ):
# @ Duke, res = np.array([0.8, 0.9, -1.7, 3.1, 0.061, 0.032, -0.035 ])
rot = tr.Ry( math.radians( 0.0 + res[0] )) * tr.Rz( math.radians( 0.0 + res[1] )) * tr.Rx( math.radians( -90.0 + res[2] )) * tr.Rz( math.radians( -90.0 + res[3]))
disp = np.matrix([ res[4], res[5], res[6] ]).T + np.matrix([ 0.0, 0.0, 0.0 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def rollTtool_pointer( residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Rz( math.radians( -10.0 ))
disp = dispResid + np.matrix([ 0.008, 0.0, 0.0 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def rollTtool_MA( residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Ry( math.radians( -90.0 ))
disp = dispResid + np.matrix([ 0.0476, 0.0, 0.0 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def panTroll(rollAng, residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Rx( -1.0 * rollAng )
disp = dispResid + np.matrix([0.02021, 0.0, 0.04236 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def tiltTpan(panAng, residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Rz( -1.0 * panAng )
disp = dispResid + np.matrix([ 0.07124, 0.0, 0.02243 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def laserTtilt(tiltAng, residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Ry( +1.0 * tiltAng )
disp = dispResid + np.matrix([ 0.03354, 0.0, 0.23669 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def laserTtool_pointer(rollAng, panAng, tiltAng, residuals = np.zeros([4,6])):
'''
This is specifically for the off-axis laser pointer! Tool coordinate frame will change for each tool.
Here, residuals are 4x6 array where:
res[0] = rollTtool
res[1] = panTroll
res[2] = tiltTpan
res[3] = laserTtilt
'''
res = residuals
return laserTtilt(tiltAng, res[3] ) * tiltTpan(panAng, res[2] ) * panTroll(rollAng, res[1] ) * rollTtool_pointer(res[0])
def tool_pointerTlaser(rollAng, panAng, tiltAng, residuals = np.zeros([4,6])):
return tr.invertHomogeneousTransform( laserTtool_pointer(rollAng, panAng, tiltAng, residuals) )
def laserTtool_MA(rollAng, panAng, tiltAng, residuals = np.zeros([4,6])):
'''
This is specifically for the multi-antenna (MA) tool attachment! Tool coordinate frame will change for each tool.
Here, residuals are 4x6 array where:
res[0] = rollTtool
res[1] = panTroll
res[2] = tiltTpan
res[3] = laserTtilt
'''
res = residuals
return laserTtilt(tiltAng, res[3] ) * tiltTpan(panAng, res[2] ) * panTroll(rollAng, res[1] ) * rollTtool_MA(res[0])
def tool_MATlaser(rollAng, panAng, tiltAng, residuals = np.zeros([4,6])):
return tr.invertHomogeneousTransform( laserTtool_MA(rollAng, panAng, tiltAng, residuals) )
| [
[
1,
0,
0.2545,
0.0091,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.2636,
0.0091,
0,
0.66,
0.0714,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.2636,
0.0091,
0,
... | [
"import numpy as np, math",
"import roslib; roslib.load_manifest('hrl_lib')",
"import roslib; roslib.load_manifest('hrl_lib')",
"import hrl_lib.transforms as tr",
"def residualXform( residuals ):\n '''\n residuals are np.array([ Rz2, Rx, Rz1, dx, dy, dz ])\n returns rotResid, dispResid\n '''\n ... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import numpy as np,math
import roslib; roslib.load_manifest('tf')
from tf import transformations
# Optional imports below:
# import scanr_transforms as trs [for camTlaser(cam_vec) , when "DesktopScanner"]
# x import codyRobot_camera_config as cc [for camera properties when "CodyRobot"]
# WARNING: THE "PR2" configurations are for a fixed demo setup. Actual values are
# populated by "acquire_pr2_data.py", or something similar
class configuration(object):
'''
Define camera properties matrix: fx, fy, and optical center in x,y
Define rotation/translation matrix between camera origin and laser cloud origin
Three types of robot/device accepted:
"desktopScanner" -- used to collect dataset of tablescans
"codyRobot" -- demonstrated object placement in clutter first on this platform
"dummyScanner" -- esentially same as codyRobot, with no dependancies.
"Other" -- quits with warning
"PR2" or -- requires tf_msg (geometry_msgs.msg.StampedTransform) to be passed
also. This is because the Transform is dynamic, unlike our other robots which have had a
fixed relative placement between the camera and tilting hokuyo.
"PR2example" -- initialized with a fixed transform between pointcloud
and camera, good for example data only.
'''
def __init__(self, path = '../data/', device = 'desktopScanner', tf_msg = None):
'''
set default values
'''
self.path = path
self.pointcloud_max_dist = 5.0
self.pointcloud_min_dist = 0.1
self.device = device
if device == 'PR2' or device == 'PR2example':
self.cam_name = 'wide_stereo/right/image_rect_color'
fx = 428.48 #focal lengths in pixels
fy = 428.35
self.cam_proj_mat = np.matrix([[fx, 0, 0, 0],
[0, fy, 0, 0],
[0, 0, 1, 0]])
cx = 323.4 #in pixels
cy = 242.9 #in pixels
self.cam_centers = (cx, cy)
self.cam_image_height= 480 #px
self.cam_image_width= 640 #px
#Transform properties will depend on wide_stereo_optical_frame
# -to-base_footprint TF since the PR2
#can move its head relative to the tilting hokuyo.
#
######### EXAMPLE result from TF on PR2 ##############
# header: --
# frame_id: /base_footprint
# child_frame_id: /wide_stereo_optical_frame
# transform:
# translation:
x = 0.111181322026
y= 0.0201393251186 #-0.09 #Using right camera is shifted over by 9cm.
z= 1.39969502374 #+0.051 #- 1.32 #****MY data was in BASE_LINK FRAME??
# rotation: (rotation same as optical frame of Gazebo_R and Gazebo_L_ optical)
rx= -0.625821685412
ry= 0.66370971141
rz= -0.30689909515
rw= 0.271384565597
#euler_angles = transformations.euler_from_quaternion([rx,ry,rz,rw])
#In euler: (-132, -1.4, -94) in degrees.
#####################################################
#kill soon
# Initialize THOK
self.thok_l1 = 0
self.thok_l2 = -0.055
self.thok_tilt_angles = (math.radians(40.0),math.radians(-40.0))
self.thok_devname = '/dev/robot/servo0'
self.thok_servonum = 5
self.thok_hoknum = 0
self.thok_scan_speed = math.radians(10.0) #speed=10 in lpi
if device == 'PR2' and tf_msg:
#requires an appropriate TF message (type=TransformStamped) called tf_msg.
#Condition: header.frame_id = '/base_footprint', child_frame_id = '/wide_stereo_optical_frame'
t = tf_msg.transform.translation
r = tf_msg.transform.rotation
(x,y,z) = (t.x, t.y, t.z)
(rx, ry, rz, rw) = (r.x, r.y, r.z, r.w)
T = transformations.translation_matrix([x,y,z])
R = transformations.quaternion_matrix([rx,ry,rz,rw])
print 'R=',R
print 'T=',T
M = np.matrix(R);
M[:3,3] = np.matrix(T)[:3,3]
print 'M=',M
#hack
M = np.linalg.inv(M)
self.camTlaser = M
# (wrong) TRmatrix = [[-0.06939527, -0.66415251, 0.74436936, 0.11118132],
# [-0.99730322, 0.02832033, -0.06770713, -0.06986067],
# [ 0.02388707, -0.74706051, -0.66432673, 1.39969502],
# [ 0. , 0. , 0. , 1. ]]
#Result is a 4x4 array: [ R | t ]
# [ 0 | 1 ]
# np.array([[ 0.74436936, 0.06939527, 0.66415251, 0. ],
# [-0.06770713, 0.99730322, -0.02832033, 0. ],
# [-0.66432673, -0.02388707, 0.74706051, 0. ],
# [ 0. , 0. , 0. , 1. ]])
elif device == 'desktopScanner':
import scanr_transforms as trs
self.webcam_id = 1
#From webcam_config definition formerly in robot1-->hrl_lib
# Parameter definitions for camera used on desktopScanner
webcam_parameters = {
'DesktopWebcam':
{
'calibration_image_width' : 960.0,
'calibration_image_height' : 720.0,
'focal_length_x_in_pixels' : 794.985,
'focal_length_y_in_pixels' : 797.122,
'optical_center_x_in_pixels' : 491.555,
'optical_center_y_in_pixels' : 344.289,
'lens_distortion_radial_1' : 0.0487641,
'lens_distortion_radial_2' : -0.128722,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : None,
'color': True,
}
}
#most code from travis scanr-class:
# Initialize webcam
self.cam_name = 'DesktopWebcam'
cp = webcam_parameters[self.cam_name]
fx = cp['focal_length_x_in_pixels']
fy = cp['focal_length_y_in_pixels']
self.cam_proj_mat = np.matrix([[fx, 0, 0, 0],
[0, fy, 0, 0],
[0, 0, 1, 0]])
self.cam_centers = ( cp['optical_center_x_in_pixels'], cp['optical_center_y_in_pixels'] )
self.cam_deltas = np.array([0.1, 0.1, 0.1, 0.1, 0.001, 0.001, 0.001 ])
self.cam_names = ['Ry_0', 'Rz_0', 'Rx_-90', 'Rz_-90', 'dx', 'dy', 'dz']
self.cam_vec = np.array([ 1.8000 , 1.7000 , -2.6000 , 4.7500 , 0.0620 , 0.0320 , -0.0270 ])
self.camTlaser = trs.camTlaser(self.cam_vec)
self.scanner_metal_plate_offset = 0.05 #TODO
# Initialize THOK
self.thok_l1 = 0
self.thok_l2 = 0.035
self.thok_tilt_angles = (math.radians(40.0),math.radians(-40.0))
self.thok_devname = '/dev/robot/desktopServos'
self.thok_servonum = 19
self.thok_hoknum = 0
self.thok_scan_speed = math.radians(5.0)
elif device == 'codyRobot' or device == 'dummyScanner':
#just for testing/demonstration without dependencies outside of gt-ros-pkg
self.webcam_id = 0
#values from equilibrium_point_control/lpi.py
self.cam_name = 'mekabotUTM' #also: 'dummyUTM'
#Values copied from Cody
#Please update with current values if they are expected to have changed.
cp = {'calibration_image_width' : 640.0,
'calibration_image_height' : 480.0,
'focal_length_x_in_pixels' : 362.381,
'focal_length_y_in_pixels' : 362.260,
'optical_center_x_in_pixels' : 275.630,
'optical_center_y_in_pixels' : 267.914,
'lens_distortion_radial_1' : -0.270544,
'lens_distortion_radial_2' : 0.0530850,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : 48, #same as cv.CV_BayerBG2BGR
'color': True,
'uid': 8520228
}
fx = cp['focal_length_x_in_pixels']
fy = cp['focal_length_y_in_pixels']
self.cam_proj_mat = np.matrix([[fx, 0, 0, 0],
[0, fy, 0, 0],
[0, 0, 1, 0]])
self.cam_centers = ( cp['optical_center_x_in_pixels'], cp['optical_center_y_in_pixels'] )
#self.camTlaser = mcf.utmcam0Tglobal(mcf.globalTthok0(m),self.image_angle)
# Initialize THOK
self.thok_l1 = 0
self.thok_l2 = -0.055
self.thok_tilt_angles = (math.radians(40.0),math.radians(-40.0))
self.thok_devname = '/dev/robot/servo0'
self.thok_servonum = 5
self.thok_hoknum = 0
self.thok_scan_speed = math.radians(10.0) #speed=10 in lpi
else:
print '[configuration] ERROR: configuration.py: Device "%s" not recognized:' %( device )
print 'Exiting. Cannot fetch transformation and camera properties for this device.'
| [
[
1,
0,
0.127,
0.0041,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.1311,
0.0041,
0,
0.66,
0.25,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.1311,
0.0041,
0,
0.6... | [
"import numpy as np,math",
"import roslib; roslib.load_manifest('tf')",
"import roslib; roslib.load_manifest('tf')",
"from tf import transformations",
"class configuration(object):\n '''\n Define camera properties matrix: fx, fy, and optical center in x,y\n Define rotation/translation matrix betwee... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import label_object
class scan_dataset(object):
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
self.dict = {}
self.dict['title'] = ''
self.dict['id'] = ''
self.dict['polygons'] = [label_object.label_object()]
self.dict['scan_filename'] = ''
self.dict['image_filename'] = ''
self.dict['image_artag_filename'] = ''
self.dict['surface_id'] = ''
self.dict['surface_height'] = ''
self.dict['camera_height'] = ''
self.dict['camera_angle'] = ''
self.dict['surface_type'] = ''
self.dict['ground_plane_normal'] = ''
self.dict['ground_plane_three_points'] = ''
self.dict['is_training_set'] = False
self.dict['is_test_set'] = False
self.dict['is_labeled'] = False
self.dict['ground_plane_rotation'] = ''
#Auto creates: ['table_plane_translation'] = np.matrix([0,0,0]).T
# ['ground_plane_translation'] = np.matrix([0,0,1.25]).T
# [id] = <unique scan name>
def __setattr__(self, name, value):
if not name == 'dict':
self.dict[name] = value
else:
object.__setattr__(self, name, value)
def __getattr__(self, name):
if not name == 'dict' and name in self.dict:
return self.dict[name]
else:
return object.__getattribute__(self, name)
| [
[
1,
0,
0.3718,
0.0128,
0,
0.66,
0,
589,
0,
1,
0,
0,
589,
0,
0
],
[
3,
0,
0.6923,
0.6026,
0,
0.66,
1,
727,
0,
3,
0,
0,
186,
0,
3
],
[
8,
1,
0.4231,
0.0385,
1,
0.1,
... | [
"import label_object",
"class scan_dataset(object):\n '''\n classdocs\n '''\n\n\n def __init__(self):\n '''",
" '''\n classdocs\n '''",
" def __init__(self):\n '''\n Constructor\n '''\n self.dict = {}\n self.dict['title'] = ''\n self.dic... |
#!/usr/bin/python
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
# \author (new edits) Jason Okerman (Healthcare Robotics Lab, Georgia Tech.)
'''
This source file is not currently supported.
It was made to help generate classifiers and label the clutter table datasets.
'''
#---------------
# Define Import Location Variables
LOC_DATA_LABELING = '/home/jokerman/svn/robot1_data/usr/martin/laser_camera_segmentation/labeling'
#
import roslib; roslib.load_manifest('clutter_segmentation')
from opencv.highgui import cvLoadImage #unneeded?
from PyQt4 import QtGui, QtCore
import opencv.cv as cv
import opencv.highgui as hg
import sys
import shutil #file operations
import os
import label_object, scan_dataset, scans_database
#take scans:
import canner
import processor
import configuration
import util as ut
#Formerly: import hrl_lib.util as ut
class labeling_tool(QtGui.QWidget):
draw_widget = None
display_mode = 'image'
display_3d_type = 'height'
def __init__(self, path, parent=None):
self.init_in_progress = True
self.path = path
# load configs for taking scans, etc:
self.config = configuration.configuration(path)
#create scanner and processor when needed:
self.scanner = False
self.processor = False
#
# load database:
self.scans_database = scans_database.scans_database()
self.scans_database.load(path,'database.pkl')
#get first dataset:
self.current_dataset = self.scans_database.get_dataset(0)
QtGui.QWidget.__init__(self, parent)
self.setWindowTitle('labeling tool')
left_layout = QtGui.QVBoxLayout()
self.draw_widget = draw_widget(self.current_dataset.polygons, self.scans_database.get_path() + '/' + self.current_dataset.image_filename, self)
title_layout = QtGui.QHBoxLayout()
take_scan_button = QtGui.QPushButton('Scan')
take_scan_button.setMaximumWidth(50)
title_layout.addWidget(take_scan_button)
self.connect(take_scan_button, QtCore.SIGNAL('clicked()'), self.slot_take_scan )
take_artag_image_button = QtGui.QPushButton('ARTag')
take_artag_image_button.setMaximumWidth(50)
title_layout.addWidget(take_artag_image_button)
self.connect(take_artag_image_button, QtCore.SIGNAL('clicked()'), self.slot_take_artag_image )
button = QtGui.QPushButton('Import Img')
title_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_import_image )
label = QtGui.QLabel("View: ")
title_layout.addWidget(label)
self.display_3d_button = QtGui.QPushButton('3D')
self.display_3d_button.setMaximumWidth(40)
title_layout.addWidget(self.display_3d_button)
self.connect(self.display_3d_button, QtCore.SIGNAL('clicked()'), self.slot_display_3d )
combobox = QtGui.QComboBox()
combobox.addItem("Height", QtCore.QVariant("height"))
combobox.addItem("Intensities", QtCore.QVariant("intensities"))
#combobox.addItem("objects", QtCore.QVariant("objects"))
combobox.addItem("Labels", QtCore.QVariant("labels"))
combobox.addItem("Classifier range", QtCore.QVariant("range"))
combobox.addItem("Classifier color", QtCore.QVariant("color"))
combobox.addItem("Classifier all", QtCore.QVariant("all"))
combobox.addItem("Classifier all+post", QtCore.QVariant("all_post"))
combobox.addItem("Baseline algo", QtCore.QVariant("baseline"))
combobox.addItem("h", QtCore.QVariant("h"))
combobox.addItem("s", QtCore.QVariant("s"))
combobox.addItem("v", QtCore.QVariant("v"))
self.connect(combobox, QtCore.SIGNAL('currentIndexChanged(int)'), self.slot_update_display_3d_type)
title_layout.addWidget(combobox)
self.display_3d_type_combobox = combobox;
self.display_3d_spheres_button = QtGui.QPushButton('3D_Spheres')
title_layout.addWidget(self.display_3d_spheres_button)
self.connect(self.display_3d_spheres_button, QtCore.SIGNAL('clicked()'), self.slot_display_3d_spheres )
self.display_intensity_button = QtGui.QPushButton('Intensity')
self.display_intensity_button.setMaximumWidth(50)
title_layout.addWidget(self.display_intensity_button)
self.connect(self.display_intensity_button, QtCore.SIGNAL('clicked()'), self.slot_display_intensity )
self.display_features_button = QtGui.QPushButton('Features')
title_layout.addWidget(self.display_features_button)
self.display_features_button.setMaximumWidth(50)
self.connect(self.display_features_button, QtCore.SIGNAL('clicked()'), self.slot_display_features )
self.display_labels_button = QtGui.QPushButton('Labels')
title_layout.addWidget(self.display_labels_button)
self.display_labels_button.setMaximumWidth(50)
self.connect(self.display_labels_button, QtCore.SIGNAL('clicked()'), self.slot_display_labels )
###
self.display_masks_button = QtGui.QPushButton('Masks')
title_layout.addWidget(self.display_masks_button)
self.display_masks_button.setMaximumWidth(50)
self.connect(self.display_masks_button, QtCore.SIGNAL('clicked()'), self.slot_display_masks )
###
self.display_stats_button = QtGui.QPushButton('Stats')
title_layout.addWidget(self.display_stats_button)
self.display_stats_button.setMaximumWidth(50)
self.connect(self.display_stats_button, QtCore.SIGNAL('clicked()'), self.slot_display_stats )
self.display_global_stats_button = QtGui.QPushButton('Global Stats')
title_layout.addWidget(self.display_global_stats_button)
self.display_global_stats_button.setMaximumWidth(50)
self.connect(self.display_global_stats_button, QtCore.SIGNAL('clicked()'), self.slot_display_global_stats )
self.line_edits = []
self.add_line_edit('Title:',title_layout,'title')
first_dataset_button = QtGui.QPushButton('<<')
first_dataset_button.setMaximumWidth(30)
title_layout.addWidget(first_dataset_button)
self.connect(first_dataset_button, QtCore.SIGNAL('clicked()'), self.slot_first_dataset )
prev_dataset_button = QtGui.QPushButton('<')
prev_dataset_button.setMaximumWidth(30)
title_layout.addWidget(prev_dataset_button)
self.connect(prev_dataset_button, QtCore.SIGNAL('clicked()'), self.slot_prev_dataset )
next_dataset_button = QtGui.QPushButton('>')
next_dataset_button.setMaximumWidth(30)
title_layout.addWidget(next_dataset_button)
self.connect(next_dataset_button, QtCore.SIGNAL('clicked()'), self.slot_next_dataset )
last_dataset_button = QtGui.QPushButton('>>')
last_dataset_button.setMaximumWidth(30)
title_layout.addWidget(last_dataset_button)
self.connect(last_dataset_button, QtCore.SIGNAL('clicked()'), self.slot_last_dataset )
save_button = QtGui.QPushButton('Save')
title_layout.addWidget(save_button)
save_button.setMaximumWidth(50)
self.connect(save_button, QtCore.SIGNAL('clicked()'), self.slot_save )
delete_button = QtGui.QPushButton('Delete')
title_layout.addWidget(delete_button)
delete_button.setMaximumWidth(50)
self.connect(delete_button, QtCore.SIGNAL('clicked()'), self.slot_delete )
self.connect(self.draw_widget, QtCore.SIGNAL('sigPolyChanged'), self.slot_update_polygons)
self.connect(self.draw_widget, QtCore.SIGNAL('sigPolyLabelChanged'), self.slot_update_polygon_label)
self.connect(self.draw_widget, QtCore.SIGNAL('sigDefineGroundPlane'), self.slot_define_ground_plane)
left_layout.addLayout(title_layout)
#second row:
row2_layout = QtGui.QHBoxLayout()
left_layout.addLayout(row2_layout)
label = QtGui.QLabel("Id:")
row2_layout.addWidget(label)
self.id_label = QtGui.QLabel("")
row2_layout.addWidget(self.id_label)
self.add_line_edit('Surface: ID:',row2_layout,'surface_id')
self.add_line_edit('Height',row2_layout,'surface_height')
label = QtGui.QLabel("Type: ")
row2_layout.addWidget(label)
combobox = QtGui.QComboBox()
combobox.addItem("Table Office", QtCore.QVariant("table_office"))
combobox.addItem("Table Dorm", QtCore.QVariant("table_dorm"))
combobox.addItem("Table House", QtCore.QVariant("table_house"))
combobox.addItem("Shelf Office", QtCore.QVariant("shelf_office"))
combobox.addItem("Shelf Dorm", QtCore.QVariant("shelf_dorm"))
combobox.addItem("Shelf House", QtCore.QVariant("shelf_house"))
self.connect(combobox, QtCore.SIGNAL('currentIndexChanged(int)'), self.slot_update_surface_type)
row2_layout.addWidget(combobox)
self.surface_type_combobox = combobox;
self.add_line_edit('Camera: Height:',row2_layout,'camera_height')
self.add_line_edit('Camera: Angle:',row2_layout,'camera_angle')
#####################################
#thrid row:
row3_layout = QtGui.QHBoxLayout()
left_layout.addLayout(row3_layout)
#checkboxes:
button = QtGui.QPushButton("&gen'n'save features")
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_generate_save_features )
checkbox = QtGui.QCheckBox('&Training Set')
row3_layout.addWidget(checkbox)
self.connect(checkbox, QtCore.SIGNAL('stateChanged(int)'), self.slot_update_training_set)
self.checkbox_training_set = checkbox
checkbox = QtGui.QCheckBox('Te&st Set')
row3_layout.addWidget(checkbox)
self.connect(checkbox, QtCore.SIGNAL('stateChanged(int)'), self.slot_update_test_set)
self.checkbox_test_set = checkbox
checkbox = QtGui.QCheckBox('Labels, Groundp. checked')
row3_layout.addWidget(checkbox)
self.connect(checkbox, QtCore.SIGNAL('stateChanged(int)'), self.slot_update_is_labeled)
self.checkbox_is_labeled = checkbox
button = QtGui.QPushButton("Train'n'save Classifiers (training set)")
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_train_and_save_Classifiers )
button = QtGui.QPushButton('Test Classifiers (on current)')
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_test_Classifiers )
button = QtGui.QPushButton('Test Classifiers (on testset)')
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_test_Classifiers_on_testset )
button = QtGui.QPushButton('Load Classifiers')
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_load_Classifiers )
# button = QtGui.QPushButton('Save Classifier')
# row3_layout.addWidget(button)
# self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_save_Classifier )
#####################################
left_layout.addWidget(self.draw_widget)
self.right_layout = QtGui.QVBoxLayout()
self.right_layout.setAlignment(QtCore.Qt.AlignTop)
self.outer_layout = QtGui.QHBoxLayout()
self.outer_layout.addLayout(left_layout)
self.outer_layout.addLayout(self.right_layout)
self.polygon_comboboxes = []
self.add_polygon_combobox()
self.slot_update_polygons(self.current_dataset.polygons,0)
self.setLayout(self.outer_layout)
self.resize(900, 700)
self.load_values_from_dataset()
self.init_in_progress = False
#at startup, display newest:
self.slot_last_dataset()
def slot_update_training_set(self, checkState):
if checkState:
self.current_dataset.is_training_set = True
else:
self.current_dataset.is_training_set = False
def slot_update_test_set(self, checkState):
if checkState:
self.current_dataset.is_test_set = True
else:
self.current_dataset.is_test_set = False
def slot_update_is_labeled(self, checkState):
if checkState:
self.current_dataset.is_labeled = True
else:
self.current_dataset.is_labeled = False
def closeEvent(self, x):
print "Exit: saving database..."
self.slot_save()
def slot_import_image(self):
fileName = QtGui.QFileDialog.getOpenFileName(self,"Open Image", self.path, "Image Files (*.png)")
print "Import image into new dataset:" + fileName
name = ut.formatted_time()
new_dataset = scan_dataset.scan_dataset()
new_dataset.id = name
new_dataset.image_filename = 'data/'+name+'_image.png'
shutil.copy(fileName,self.path+'/'+new_dataset.image_filename)
self.scans_database.add_dataset(new_dataset)
#proceed to new dataset:
while True == self.slot_next_dataset():
pass
def add_line_edit(self,label, layout, variable):
label = QtGui.QLabel(label)
line_edit = QtGui.QLineEdit()
line_edit.setMinimumWidth(80)
self.line_edits.append((line_edit,variable))
layout.addWidget(label)
layout.addWidget(line_edit)
self.connect(line_edit, QtCore.SIGNAL('textEdited (const QString&)'), self.slot_line_edit_changed )
return line_edit
def slot_line_edit_changed(self,text):
if True == self.init_in_progress:
return
for (line_edit, variable) in self.line_edits:
self.current_dataset.dict[variable] = str(line_edit.text())
def slot_next_dataset(self):
dataset = self.scans_database.get_next_dataset()
if False != dataset:
self.current_dataset = dataset
self.load_values_from_dataset()
return True
return False
def slot_prev_dataset(self):
dataset = self.scans_database.get_prev_dataset()
if False != dataset:
self.current_dataset = dataset
self.load_values_from_dataset()
return True
return False
def slot_first_dataset(self):
dataset = self.scans_database.get_first_dataset()
if False != dataset:
self.current_dataset = dataset
self.load_values_from_dataset()
return True
return False
def slot_last_dataset(self):
dataset = self.scans_database.get_last_dataset()
if False != dataset:
self.current_dataset = dataset
self.load_values_from_dataset()
return True
return False
def load_values_from_dataset(self):
self.init_in_progress = True
self.id_label.setText(self.current_dataset.id)
for (line_edit, variable) in self.line_edits:
line_edit.setText(self.current_dataset.dict[variable])
for index, box in enumerate(self.polygon_comboboxes):
if index < len(self.current_dataset.polygons):
print str(index) + " load label:" + self.current_dataset.polygons[index].get_label()
boxindex = box.findData(QtCore.QVariant(self.current_dataset.polygons[index].get_label()))
box.setCurrentIndex(boxindex)
else: #set default to first:
box.setCurrentIndex(0)
box = self.surface_type_combobox
boxindex = box.findData(QtCore.QVariant(self.current_dataset.surface_type))
box.setCurrentIndex(boxindex)
print self.current_dataset.is_training_set
if self.current_dataset.is_training_set:
self.checkbox_training_set.setCheckState(QtCore.Qt.Checked)
else:
self.checkbox_training_set.setCheckState(QtCore.Qt.Unchecked)
if self.current_dataset.is_test_set:
self.checkbox_test_set.setCheckState(QtCore.Qt.Checked)
else:
self.checkbox_test_set.setCheckState(QtCore.Qt.Unchecked)
if self.current_dataset.is_labeled:
self.checkbox_is_labeled.setCheckState(QtCore.Qt.Checked)
else:
self.checkbox_is_labeled.setCheckState(QtCore.Qt.Unchecked)
#hide button if there is no 3d data:
print self.current_dataset.scan_filename
if '' == self.current_dataset.scan_filename:
self.display_3d_button.setEnabled(False)
self.display_3d_spheres_button.setEnabled(False)
self.display_intensity_button.setEnabled(False)
else:
self.display_3d_button.setEnabled(True)
self.display_3d_spheres_button.setEnabled(True)
self.display_intensity_button.setEnabled(True)
self.display_mode = 'image'
self.draw_widget.set_polygons(self.current_dataset.polygons)
self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
self.init_in_progress = False
def slot_take_artag_image(self):
if False == self.scanner:
self.scanner = scanner.scanner(self.config)
if False == self.processor:
self.processor = processor.processor(self.config)
img = self.scanner.take_artag_image()
self.current_dataset.image_artag_filename = self.scanner.save_artag_image(self.current_dataset.id)
self.slot_save() #save for consistency with files
if self.processor.read_artag(img).any():
print "SUCCESS in reading ARTag"
else:
print "FAILURE in reading ARTag - try again!"
def slot_take_scan(self):
#save database, let scanner add dataset, reload it then
self.slot_save()
if False == self.scanner:
self.scanner = scanner.scanner(self.config)
if False == self.processor:
self.processor = processor.processor(self.config)
name = ut.formatted_time()
self.scanner.capture_and_save(name)
#self.processor.load_raw_data(name)
#self.processor.load_metadata(name)
#self.processor.process_raw_data()
#self.processor.save_mapped_image(name)
#self.processor.display_all_data()
print 'scan ' + name + ' taken'
self.scans_database.load(self.path,'database.pkl')
#proceed to new scan:
while True == self.slot_next_dataset():
pass
def slot_display_intensity(self):
if self.display_mode != 'intensities':
if False == self.processor:
self.processor = processor.processor(self.config)
#reset ground plane:
self.current_dataset.ground_plane_normal = ''
self.current_dataset.ground_plane_three_points = ''
self.slot_save()
self.processor.load_data(self.current_dataset.id)
self.processor.process_intensities()
filename = self.processor.save_intensity_image(self.current_dataset.id)
#self.processor.display_intensities()
self.display_mode = 'intensities'
self.draw_widget.set_image(filename)
else:
#display normal image
self.display_mode = 'image'
self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
def slot_display_features(self):
if self.display_mode != 'features':
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_data(self.current_dataset.id)
self.processor.process_intensities()
filename = self.processor.save_intensity_image(self.current_dataset.id)
self.display_mode = 'features'
self.draw_widget.set_image(filename)
else:
#display normal image
self.display_mode = 'image'
self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
def slot_display_labels(self):
if self.display_mode != 'labels':
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_data(self.current_dataset.id)
self.processor.process_labels(self.display_3d_type)
filename = self.processor.save_labels_image(self.display_3d_type)
self.draw_widget.set_image(filename)
self.display_mode = 'labels'
else:
#display normal image
self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
self.display_mode = 'image'
###
def slot_display_masks(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_data(self.current_dataset.id)
if self.display_mode != 'labels':
self.processor.process_masks(self.display_3d_type)
self.display_mode = 'labels'
filename = self.processor.save_masks_image(self.display_3d_type) #saves pic in results
else:
self.processor.process_masks(self.display_3d_type, True) #show clutter mask NOT placement mask
self.display_mode = 'image'
filename = self.processor.save_masks_image(self.display_3d_type, True) #saves pic in results
self.draw_widget.set_image(filename) #loads picture saved previously
###
def slot_display_stats(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_data(self.current_dataset.id)
self.processor.display_stats()
def slot_display_global_stats(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_data(self.current_dataset.id)
self.processor.display_stats(True)
def slot_display_3d_spheres(self):
self.slot_display_3d(True)
def slot_display_3d(self, spheres = False):
print 'Inside slot_display_3d'
if False == self.processor:
self.processor = processor.processor(self.config)
#save data first so the processor can load it:
print 'Before slot_save'
self.slot_save()
print 'Before load_data'
self.processor.load_data(self.current_dataset.id)
#self.processor.create_polygon_images()
print 'Before process_raw_data'
self.processor.process_raw_data()
#pc.save_mapped_image(name)
print 'Before display_3d'
self.processor.display_3d(self.display_3d_type, spheres)
print 'After display_3d'
def slot_train_and_save_Classifiers(self):
if False == self.processor:
self.processor = processor.processor(self.config)
#save data first so the processor can load it:
self.slot_save()
self.processor.load_data(self.current_dataset.id)
self.processor.train_and_save_Classifiers()
def slot_generate_save_features(self):
if False == self.processor:
self.processor = processor.processor(self.config)
#save data first so the processor can load it:
self.slot_save()
self.processor.load_data(self.current_dataset.id)
self.processor.generate_save_features()
def slot_test_Classifiers(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.slot_save() #save data first so the processor can load it:
self.processor.load_data(self.current_dataset.id)
self.processor.train_and_save_Classifiers()
self.processor.test_Classifiers()
def slot_test_Classifiers_on_testset(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.slot_save() #save data first so the processor can load it:
self.processor.load_data(self.current_dataset.id)
self.processor.train_and_save_Classifiers()
self.processor.test_classifiers_on_testset()
def slot_load_Classifiers(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_Classifiers()
def slot_save_Classifier(self):
if False == self.processor:
print 'ERROR: no processor object exists -> no Classifier to save!'
return
self.processor.save_Classifier()
def add_polygon_combobox(self):
combobox = QtGui.QComboBox()
combobox.addItem("Object", QtCore.QVariant("object"))
combobox.addItem("Surface", QtCore.QVariant("surface"))
combobox.addItem("Region of Interest (ROI)", QtCore.QVariant("roi"))
combobox.addItem("Background", QtCore.QVariant("background"))
combobox.addItem("Visible Surface-Edge", QtCore.QVariant("edge"))
combobox.addItem("Wall-Surface-Edge", QtCore.QVariant("edge_up"))
combobox.addItem("Downward-Surface-Edge", QtCore.QVariant("edge_down"))
combobox.setCurrentIndex(0)
self.connect(combobox, QtCore.SIGNAL('currentIndexChanged(int)'), self.slot_update_polygon_labels)
self.polygon_comboboxes.append(combobox)
self.right_layout.addWidget(combobox, QtCore.Qt.AlignTop)
self.slot_update_polygon_labels()
def slot_delete(self):
#delete scan-files:
if os.path.isfile(self.current_dataset.scan_filename):
os.remove(self.path + '/' + self.current_dataset.scan_filename);
if os.path.isfile(self.current_dataset.image_filename):
os.remove(self.path + '/' + self.current_dataset.image_filename);
if os.path.isfile(self.current_dataset.image_artag_filename):
os.remove(self.path + '/' + self.current_dataset.image_artag_filename);
#delete metadata
self.current_dataset = self.scans_database.delete_current_dataset()
self.load_values_from_dataset()
self.slot_save() #save for consistency with files
def slot_save(self):
self.scans_database.save()
def slot_update_surface_type(self):
if True == self.init_in_progress:
return
box = self.surface_type_combobox
self.current_dataset.surface_type = str(box.itemData(box.currentIndex()).toString())
def slot_update_display_3d_type(self):
if True == self.init_in_progress:
return
box = self.display_3d_type_combobox
self.display_3d_type = str(box.itemData(box.currentIndex()).toString())
def slot_update_polygon_label(self, index, label):
if True == self.init_in_progress:
return
box = self.polygon_comboboxes[index]
boxindex = box.findData(QtCore.QVariant(label))
box.setCurrentIndex(boxindex)
self.draw_widget.update()
def slot_update_polygon_labels(self):
if True == self.init_in_progress:
return
for index, box in enumerate(self.polygon_comboboxes):
if index < len(self.current_dataset.polygons):
self.current_dataset.polygons[index].set_label(str(box.itemData(box.currentIndex()).toString()))
print str(index) + " xx " + str(box.itemData(box.currentIndex()).toString())
self.draw_widget.update()
def slot_update_polygons(self, polygons, current_index):
while len(self.polygon_comboboxes) < len(polygons):
self.add_polygon_combobox()
#self.polygon_comboboxes[self.current_polygon_index].x()
for index, box in enumerate(self.polygon_comboboxes):
if index < len(polygons):
self.polygon_comboboxes[index].show()
else:
self.polygon_comboboxes[index].hide()
self.update()
def paintEvent(self, event):
painter = QtGui.QPainter()
painter.begin(self)
x = self.polygon_comboboxes[self.draw_widget.get_current_polygon_index()].x()
y = self.polygon_comboboxes[self.draw_widget.get_current_polygon_index()].y()
color = QtGui.QColor(255,0,0)
painter.setPen(color)
painter.setBrush(color)
painter.drawEllipse(QtCore.QRectF(x-8,y+8,6,6))
painter.end()
def get_display_mode(self):
return self.display_mode
def slot_define_ground_plane(self, ground_plane_points):
#assumes that intensity image is loaded in processor!
(self.current_dataset.ground_plane_normal, self.current_dataset.ground_plane_three_points) = self.processor.get_3d_plane_normal(ground_plane_points)
self.slot_display_intensity() #switch back to image mode
class draw_widget(QtGui.QLabel):
ground_plane_points = []
def __init__(self,polygons, image_filename, parent=None):
QtGui.QWidget.__init__(self, parent)
self.scaleFactor = False #init is done later
self.setBackgroundRole(QtGui.QPalette.Base)
#self.setSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
self.setSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
self.setScaledContents(True)
self.set_polygons(polygons)
self.set_image(image_filename)
self.setScaleFactor(0.8)
def setScaleFactor(self, f):
self.scaleFactor = f
self.updateImageSize()
def updateImageSize(self):
if self.parent().get_display_mode() == 'intensities' or self.parent().get_display_mode() == 'features':
self.scaleFactor = 1
else:
self.scaleFactor = 0.8
self.parent().resize(900, 700)
self.setMinimumHeight(self.image.height() * self.scaleFactor)
self.setMinimumWidth(self.image.width() * self.scaleFactor)
self.setMaximumHeight(self.image.height() * self.scaleFactor)
self.setMaximumWidth(self.image.width() * self.scaleFactor)
pixmap = QtGui.QPixmap.fromImage(self.image)
self.resize(self.scaleFactor * pixmap.size());
self.setPixmap(pixmap);
def set_polygons(self, polygons):
self.polygons = polygons
self.current_polygon_index = 0
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
def set_image(self, filename):
print filename
if os.path.isfile(filename):
self.image = QtGui.QImage(filename)
else:
self.image = QtGui.QImage('noimage.png')
self.updateImageSize()
self.update()
def paintEvent(self, event):
# draw image as label-pixmap
QtGui.QLabel.paintEvent(self,event)
painter = QtGui.QPainter()
painter.begin(self)
if self.parent().get_display_mode() == 'image' or self.parent().get_display_mode() == 'labels':
color = QtGui.QColor(0,0,255)
color_surface = QtGui.QColor(0,255,0)
color_roi = QtGui.QColor(255,255,255)
color_edge = QtGui.QColor(255,255,0)
color_edge_up = QtGui.QColor(255,255,255)
color_edge_down = QtGui.QColor(255,150,255)
color_background = QtGui.QColor(255,0,255)
color_current = QtGui.QColor(255,0,0)
for index, polygon in enumerate(self.polygons):
last_point = (-1,-1)
first = True;
if self.current_polygon_index != index or self.parent().get_display_mode() != 'image':
if polygon.get_label() == 'surface':
painter.setPen(color_surface)
elif polygon.get_label() == 'roi':
painter.setPen(color_roi)
elif polygon.get_label() == 'edge':
painter.setPen(color_edge)
elif polygon.get_label() == 'edge_up':
painter.setPen(color_edge_up)
elif polygon.get_label() == 'edge_down':
painter.setPen(color_edge_down)
elif polygon.get_label() == 'background':
painter.setPen(color_background)
else:
painter.setPen(color)
else:
painter.setPen(color_current)
for point in polygon.get_points():
if False == first:
painter.drawLine(QtCore.QPointF(point[0],point[1]) * self.scaleFactor, QtCore.QPointF(last_point[0],last_point[1]) * self.scaleFactor)
last_point = point
first = False
if (self.parent().get_display_mode() != 'image' or self.current_polygon_index != index ) and polygon.get_type() == 'polygon' and len(polygon.get_points()) :
painter.drawLine(QtCore.QPointF(last_point[0],last_point[1]) * self.scaleFactor, QtCore.QPointF(polygon.get_points()[0][0],polygon.get_points()[0][1]) * self.scaleFactor)
else:
for point in polygon.get_points():
painter.drawEllipse(QtCore.QRectF(point[0] * self.scaleFactor-3,point[1] * self.scaleFactor-3,6,6))
elif self.parent().get_display_mode() == 'intensities':
color = QtGui.QColor(255,0,255)
painter.setPen(color)
for point in self.ground_plane_points:
painter.drawEllipse(QtCore.QRectF(point[0] * self.scaleFactor-3,point[1] * self.scaleFactor-3,6,6))
painter.end()
def mousePressEvent(self,event):
if self.hasFocus():
if self.parent().get_display_mode() == 'image':
if event.button() == QtCore.Qt.LeftButton:
#print 'coords:', x,' ',y
point = (event.x() / self.scaleFactor, event.y() / self.scaleFactor)
self.polygons[self.current_polygon_index].add_point(point)
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
if event.button() == QtCore.Qt.RightButton:
if False == self.polygons[self.current_polygon_index].is_empty():
self.polygons[self.current_polygon_index].delete_last_point()
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
elif self.parent().get_display_mode() == 'intensities':
point = (event.x() / self.scaleFactor, event.y() / self.scaleFactor)
print 'point:', point
if True == self.parent().processor.check_3d_plane_point(point):
self.ground_plane_points.append(point)
if len(self.ground_plane_points) < 3:
self.update()
else:
self.emit(QtCore.SIGNAL("sigDefineGroundPlane"), self.ground_plane_points)
self.ground_plane_points = []
elif self.parent().get_display_mode() == 'features':
point = (event.x() / self.scaleFactor, event.y() / self.scaleFactor)
if True == self.parent().processor.check_3d_plane_point(point):
print 'point:', point
point3d = self.parent().processor.get_3d_point(point)
print 'point3d',point3d
index = self.parent().processor.get_3d_point_index_in_unrotated(point3d)
self.parent().processor.load_data(self.parent().current_dataset.id)
self.parent().processor.process_raw_data()
self.parent().processor.features.prepare([index])
self.parent().processor.feature_type = 'gaussian_histograms'
fv = self.parent().processor.features.get_featurevector(index,0)
print 'fv',fv
self.parent().processor.display_featurevector(fv)
#reload intensity data for next click
self.parent().processor.load_data(self.parent().current_dataset.id)
self.parent().processor.process_intensities()
#print 'fv:', self.parent().processor.get_point_featurevector(index, self.parent().processor.pts3d_int)
#print 'WARNING: THIS IS NOT WORKING YET BECAUSE OF MISSING INTENSITY INDEX MAPPING FOR GRAZEEFFCT REMOVED PTS'
else:
self.setFocus()
def mouseDoubleClickEvent(self,event):
if self.parent().get_display_mode() == 'image':
if event.button() == QtCore.Qt.LeftButton:
self.start_new_polygon()
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
def start_new_polygon(self):
if False == self.polygons[self.current_polygon_index].is_empty():
# if self.current_polygon_index == len(self.polygons) - 1:
self.polygons.append(label_object.label_object()) #last one, append new
self.current_polygon_index = len(self.polygons) - 1
print "new poly index: ", self.current_polygon_index
def delete_empty_polygon(self):
if True == self.polygons[self.current_polygon_index].is_empty():
#and it isn't the only one:
if 1 != len(self.polygons):
del self.polygons[self.current_polygon_index]
if 0 != self.current_polygon_index:
self.current_polygon_index -= 1
print "new poly index: ", self.current_polygon_index
return True
return False
def keyPressEvent(self, event):
key = event.key()
if key == QtCore.Qt.Key_Right:
print 'right'
if self.current_polygon_index < len(self.polygons) - 1:
self.delete_empty_polygon()
self.current_polygon_index += 1
print "czurrent poly index: ", self.current_polygon_index
else:
self.start_new_polygon()
self.parent().slot_update_polygon_labels()
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
elif key == QtCore.Qt.Key_Left:
print 'left'
if self.current_polygon_index > 0:
if False == self.delete_empty_polygon():
self.current_polygon_index -= 1
print "current poly index: ", self.current_polygon_index
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
elif key == QtCore.Qt.Key_O:
print 'o'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'object')
elif key == QtCore.Qt.Key_S:
print 's'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'surface')
elif key == QtCore.Qt.Key_R:
print 'r'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'roi')
elif key == QtCore.Qt.Key_B:
print 'b'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'background')
elif key == QtCore.Qt.Key_E:
print 'e'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'edge')
elif key == QtCore.Qt.Key_U:
print 'u'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'edge_up')
elif key == QtCore.Qt.Key_D:
print 'd'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'edge_down')
elif key == QtCore.Qt.Key_Plus:
print '+'
self.setScaleFactor(self.scaleFactor * 1.25)
self.update()
elif key == QtCore.Qt.Key_Minus:
print '-'
self.setScaleFactor(self.scaleFactor * 0.8)
self.update()
else:
QtGui.QWidget.keyPressEvent(self, event)
def get_polygons(self):
return self.polygons
def get_current_polygon_index(self):
return self.current_polygon_index
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
labeling_tool = labeling_tool(LOC_DATA_LABELING);#
labeling_tool.show()
sys.exit(app.exec_())
| [
[
8,
0,
0.0331,
0.0049,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0399,
0.001,
0,
0.66,
0.0556,
800,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0438,
0.001,
0,
0.66,
... | [
"'''\n This source file is not currently supported.\n It was made to help generate classifiers and label the clutter table datasets.\n\n'''",
"LOC_DATA_LABELING = '/home/jokerman/svn/robot1_data/usr/martin/laser_camera_segmentation/labeling'",
"import roslib; roslib.load_manifest('clutter_segmentation')",... |
#Used for first set of util functions
import os
import numpy as np
import pickle as pk
import time
# ** Do we need this? **
###from hrl_lib.msg import NumpyArray
# ** removed dependancy for laser_camera_segmentation use **
#Used for second set of util functions
from opencv.cv import *
from opencv.highgui import *
import numpy as np
import Image as Image
## Returns a string that can be used as a timestamp (hours and minutes) in logfiles
# @return timestamp-string
def getTime():
return '['+time.strftime("%H:%M:%S", time.localtime())+']'
def standard_rad(t):
if t > 0:
return ((t + np.pi) % (np.pi * 2)) - np.pi
else:
return ((t - np.pi) % (np.pi * -2)) + np.pi
##
# Converts a list of numpy matrices to one large matrix
# @param list_mat the list of little matrices
# @param axis axis to concatenate little matrices
# @return one large numpy matrix
def list_mat_to_mat(list_mat, axis=0):
return np.concatenate(tuple(list_mat), axis=axis)
## returns current time as a string: year|month|date_hours|min|sec.
## @return current time as a string: year|month|date_hours|min|sec.
def formatted_time():
date_name = time.strftime('%Y%h%d_%H%M%S', time.localtime())
# curtime = time.localtime()
# date_name = time.strftime('%Y%m%d_%I%M%S', curtime)
return date_name
## read a pickle and return the object.
# @param filename - name of the pkl
# @return - object that had been pickled.
def load_pickle(filename):
p = open(filename, 'r')
picklelicious = pk.load(p)
p.close()
return picklelicious
## Pickle an object.
# @param object - object to be pickled
# @param filename - name of the pkl file
def save_pickle(object, filename):
pickle_file = open(filename, 'w')
pk.dump(object, pickle_file)
pickle_file.close()
## Calculate L2 norm for column vectors in a matrix
# @param mat - numpy matrix
def norm(mat):
return np.power(np.sum(np.power(mat,2), axis=0), 0.5)
def approx_equal(a, b, epsilon=.001):
return (b < (a+epsilon)) and ((a-epsilon) < b)
def unipolar_limit( x, upper ):
""" limit the value of x such that
0 <= x <= upper
"""
if x > upper:
x=upper
if x < 0:
x=0
return x
def cart_of_pol(p):
""" Finds cartesian coordinates of polar points [r, t]' """
r = p[0,:]
t = p[1,:]
x = numpy.multiply(numpy.cos(t), r)
y = numpy.multiply(numpy.sin(t), r)
return numpy.vstack((x,y))
def pol_of_cart(p):
""" Find polar coordinates of cartesian points [x, y]' """
norm = numpy.linalg.norm(p)
ang = math.atan2(p[1,0], p[0,0])
return numpy.matrix([norm, ang]).T
##
# Bound the value of a number to be above lower, and lower than upper
# @return a number
def bound(value, lower, upper):
if lower >= upper:
t = lower
lower = upper
upper = t
# print 'bound', value, 'lower', lower, 'upper', upper
#return min(max(value, lower), upper)
ret_val = min(max(value, lower), upper)
# if ret_val != value:
# print 'ut.boud bounded something.'
return ret_val
## wraps a numpy array into hrl's datatype for sending np arrays
# over ros.
# @param np array
# @return NumpyArray object (hrl_lib/msg/NumpyArray.msg)
def wrap_np_array(nparr):
shp = nparr.shape
npstr = nparr.tostring()
npdtype = str(nparr.dtype)
nparr_ros = NumpyArray(None,npstr,shp,npdtype)
return nparr_ros
## convert hrl's ros wrapped numpy array to a numpy array
# @param NumpyArray object (hrl_lib/msg/NumpyArray.msg)
# @return np array
def unwrap_np_array(nparr_ros):
npstr,shp,npdtype = nparr_ros.data,nparr_ros.shape,nparr_ros.dtype
nparr = np.fromstring(npstr,dtype=npdtype)
nparr = nparr.reshape(shp)
return nparr
## cartesian product of list of lists.
# code copied from: http://automatthias.wordpress.com/2007/04/28/cartesian-product-of-multiple-sets/
# @return generator. can loop over it, or list(generator) will give
# the entire list.
# NOTE - itertools in python 2.6 provides this functionality. We
# should switch over to it soon.
def cartesian_product(lists, previous_elements = []):
if len(lists) == 1:
for elem in lists[0]:
yield previous_elements + [elem, ]
else:
for elem in lists[0]:
for x in cartesian_product(lists[1:], previous_elements + [elem, ]):
yield x
## choose n elements from list without replacement.
# adapted code from cartesian_product
# @return generator.
def choose_without_replacement(list, n):
lists = [list for i in range(n)]
return _choose_without_replacement(lists)
def _choose_without_replacement(lists,previous_elements=[],ignore_count=0):
if len(lists) == 1:
for elem in lists[0][ignore_count:]:
yield previous_elements + [elem, ]
else:
for i,elem in enumerate(lists[0][ignore_count:]):
for x in _choose_without_replacement(lists[1:],previous_elements + [elem, ],ignore_count+i+1):
yield x
##
# use festival text to speech to make a soud.
# @param text - string to be said.
def say(text):
os.system( 'echo "' + text + '" | festival --tts' )
## compute rank of a matrix.
# code copied from:
# http://mail.scipy.org/pipermail/numpy-discussion/2008-February/031218.html
def matrixrank(A,tol=1e-8):
s = np.linalg.svd(A,compute_uv=0)
return sum( np.where( s>tol, 1, 0 ) )
##################################################
#!usr/bin/python
#
#util_additional.py
#
#The following definitions are utility and conversion definitions that used
# to be part of the gt-ros-pkg scripts in hrl_lib/util.py
#Since they have been taken out sometime during summer 2010, they are added
# as explicit dependancies to the laser_camera_segmentation project.
cv2np_type_dict = {CV_16S : (np.int16, 1),
CV_16SC1 : (np.int16, 1),
CV_16SC2 : (np.int16, 2),
CV_16SC3 : (np.int16, 3),
CV_16SC4 : (np.int16, 4),
CV_16U : (np.uint16, 1),
CV_16UC1 : (np.uint16, 1),
CV_16UC2 : (np.uint16, 2),
CV_16UC3 : (np.uint16, 3),
CV_16UC4 : (np.uint16, 4),
CV_32F : (np.float32, 1),
CV_32FC1 : (np.float32, 1),
CV_32FC2 : (np.float32, 2),
CV_32FC3 : (np.float32, 3),
CV_32FC4 : (np.float32, 4),
CV_32S : (np.int32, 1),
CV_32SC1 : (np.int32, 1),
CV_32SC2 : (np.int32, 2),
CV_32SC3 : (np.int32, 3),
CV_32SC4 : (np.int32, 4),
CV_64F : (np.float64, 1),
CV_64FC1 : (np.float64, 1),
CV_64FC2 : (np.float64, 2),
CV_64FC3 : (np.float64, 3),
CV_64FC4 : (np.float64, 4),
CV_8S : (np.int8, 1),
CV_8SC1 : (np.int8, 1),
CV_8SC2 : (np.int8, 2),
CV_8SC3 : (np.int8, 3),
CV_8SC4 : (np.int8, 4),
CV_8U : (np.uint8, 1),
CV_8UC1 : (np.uint8, 1),
CV_8UC2 : (np.uint8, 2),
CV_8UC3 : (np.uint8, 3),
CV_8UC4 : (np.uint8, 4)}
cv2np_type_dict_invertible = {CV_16SC1 : (np.int16, 1),
CV_16SC2 : (np.int16, 2),
CV_16SC3 : (np.int16, 3),
CV_16SC4 : (np.int16, 4),
CV_16UC1 : (np.uint16, 1),
CV_16UC2 : (np.uint16, 2),
CV_16UC3 : (np.uint16, 3),
CV_16UC4 : (np.uint16, 4),
CV_32FC1 : (np.float32, 1),
CV_32FC2 : (np.float32, 2),
CV_32FC3 : (np.float32, 3),
CV_32FC4 : (np.float32, 4),
CV_32SC1 : (np.int32, 1),
CV_32SC2 : (np.int32, 2),
CV_32SC3 : (np.int32, 3),
CV_32SC4 : (np.int32, 4),
CV_64FC1 : (np.float64, 1),
CV_64FC2 : (np.float64, 2),
CV_64FC3 : (np.float64, 3),
CV_64FC4 : (np.float64, 4),
CV_8SC1 : (np.int8, 1),
CV_8SC2 : (np.int8, 2),
CV_8SC3 : (np.int8, 3),
CV_8SC4 : (np.int8, 4),
CV_8UC1 : (np.uint8, 1),
CV_8UC2 : (np.uint8, 2),
CV_8UC3 : (np.uint8, 3),
CV_8UC4 : (np.uint8, 4)}
#def cv2np(im):
# numpy_type, nchannels = cv2np_type_dict[cv.cvGetElemType(im)]
# array_size = [im.height, im.width, nchannels]
# np_im = np.frombuffer(im.imageData, dtype=numpy_type)
# return np.reshape(np_im, array_size)
def cv2np(im, format='RGB'):
"""This function converts an image from openCV format to a numpy array.
This utility needs both NUMPY and OPENCV to accomplish the conversion.
cv2np(im, format='RGB')
"""
if format == 'BGR':
cvCvtColor( im, im, CV_BGR2RGB )
numpy_type, nchannels = cv2np_type_dict[cvGetElemType(im)]
array_size = [im.height, im.width, nchannels]
#Removed multiplication of size by (im.depth/8) as numpy takes
#into account of that in numpy_type
if im.__doc__ == None:
# ctypes-opencv
return im.as_numpy_array()
else:
np_im = np.array( np.frombuffer(im.imageData, dtype=numpy_type,
count=im.height*im.width*nchannels))
return np.reshape(np_im, array_size)
def np2pil( im ):
""" for grayscale - all values must be between 0 and 255.
not sure about color yet.
np2pil(im)
"""
#TODO: print 'util.np2cv: works for texseg.py'
#TODO: print 'util.np2cv: more extensive tests would be useful'
if len(im.shape) == 3:
shp = im.shape
channels = shp[2]
height, width = shp[0], shp[1]
elif len(im.shape) == 2:
height, width = im.shape
channels = 1
else:
raise AssertionError("unrecognized shape for the input image. should be 3 or 2, but was %d." % len(im.shape))
if channels == 3:
image = Image.fromstring( "RGB", (width, height), im.tostring() )
if channels == 1:
im = np.array(im, dtype=np.uint8)
#image = Image.fromarray(im)
image = Image.fromstring( "L", (width, height), im.tostring() )
return image
if True:
np2cv_type_dict = dict([(str(np.dtype(v[0])), v[1]), k] for
k,v in cv2np_type_dict_invertible.items())
def np2cv(im, force_color=False):
''' Note: force_color -- force grayscale np image into a color cv image
np2cv(im, force_color=False)
'''
image = np2pil( im )
image.save('test.bmp', 'BMP')
if len(im.shape) == 3:
cvim = cvLoadImage('test.bmp')
elif len(im.shape) == 2:
if force_color == False:
cvim = cvLoadImage('test.bmp', CV_LOAD_IMAGE_GRAYSCALE)
else:
cvim = cvLoadImage('test.bmp')
else:
raise AssertionError("unrecognized shape for the input image. should be 3 or 2, but was %d." % len(im.shape))
return cvim
| [
[
1,
0,
0.0089,
0.003,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0118,
0.003,
0,
0.66,
0.0323,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0148,
0.003,
0,
0.6... | [
"import os",
"import numpy as np",
"import pickle as pk",
"import time",
"from opencv.cv import *",
"from opencv.highgui import *",
"import numpy as np",
"import Image as Image",
"def getTime():\n return '['+time.strftime(\"%H:%M:%S\", time.localtime())+']'",
" return '['+time.strftime(\"%H:... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import util as ut #Local function. Uses only: getTime
import processor #Used for LABEL_CLUTTER, LABEL_SURFACE, config.path, and feature_type
class classifier(object):
'''
classdocs
'''
processor = None
features = 'all'
def __init__(self, processor, features):
'''
Constructor
'''
self.processor = processor
self.features = features
def train(self):
return None
#abstract
def test(self, feature_data = None):
return None
#dict are the loaded features including the ground truth, labels the algorithm output
def test_results(self, dict, labels):
current_set_size = dict['set_size']
count_correct = 0
count_clutter_correct = 0
count_surface_correct = 0
count_clutter = 0
count_surface = 0
count = 0
for index in dict['point_indices']:
label = labels[index]
if label == dict['labels'][count]:
count_correct += 1
if dict['labels'][count] == processor.LABEL_CLUTTER:
count_clutter += 1
if label == dict['labels'][count]:
count_clutter_correct += 1
if dict['labels'][count] == processor.LABEL_SURFACE:
count_surface += 1
if label == dict['labels'][count]:
count_surface_correct += 1
count += 1
print ut.getTime(), '##########################################'
print ut.getTime(), '####tested on ', self.features, '###########################'
print ut.getTime(), '==================================='
print ut.getTime(), 'percent in total: surface:',(float(count_surface)/float(current_set_size)*100), '%, clutter:',(float(count_clutter)/float(current_set_size)*100),'%'
print ut.getTime(), '#points surface:',count_surface,'clutter:',count_clutter
print ut.getTime(), '#points correct: surface:',count_surface_correct,'clutter:',count_clutter_correct
if count_surface > 0:
percent_surface_correct = float(count_surface_correct)/float(count_surface) * 100
else:
percent_surface_correct = 100
if count_clutter > 0:
percent_clutter_correct = float(count_clutter_correct)/float(count_clutter) * 100
else:
percent_clutter_correct = 100
print ut.getTime(), '#percent correct: surface:',percent_surface_correct,'clutter:',percent_clutter_correct
print ut.getTime(), '==================================='
print ut.getTime(), '##########################################'
testresults = (count_surface, count_clutter,count_surface_correct, count_clutter_correct, percent_surface_correct, percent_clutter_correct)
return testresults
def get_filename(self):
return self.processor.config.path+'/classifier_'+self.features+'_'+self.processor.feature_type+'_k'+str(self.processor.feature_neighborhood)+'_r'+str(self.processor.feature_radius)+'.XML'
def save(self):
return None
def load(self):
return None
| [
[
1,
0,
0.2586,
0.0086,
0,
0.66,
0,
811,
0,
1,
0,
0,
811,
0,
0
],
[
1,
0,
0.2672,
0.0086,
0,
0.66,
0.5,
177,
0,
1,
0,
0,
177,
0,
0
],
[
3,
0,
0.6379,
0.6983,
0,
0.6... | [
"import util as ut #Local function. Uses only: getTime",
"import processor #Used for LABEL_CLUTTER, LABEL_SURFACE, config.path, and feature_type",
"class classifier(object):\n '''\n classdocs\n '''\n\n processor = None\n features = 'all'",
" '''\n classdocs\n '''",
" processor = N... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Hai Nguyen (Healthcare Robotics Lab, Georgia Tech.)
import util as ut #Usage: cv2np, np2cv
import numpy as np
import opencv as cv
import opencv.highgui as hg
##
# calculates eigen values of covariance matrix accumulating statistics of sobel filter responses in an image block
#
# @param cv_image opencv image to calculate texture over
# @param blocksize size of block to accumulate statistics (in pixels)
# @param filtersize size of sobel filter to use (in pixels)
# @return numpy matrix of size (width, height, 2) where [:,:,0] is the set of first eigen values and [:,:,1] is the second set
def eigen_texture(cv_image, blocksize=8, filtersize=3):
gray_image = cv.cvCreateImage(cv.cvSize(cv_image.width, cv_image.height), cv.IPL_DEPTH_8U, 1)
eig_tex = cv.cvCreateImage(cv.cvSize(cv_image.width*6, cv_image.height), cv.IPL_DEPTH_32F, 1)
cv.cvCvtColor(cv_image, gray_image, cv.CV_BGR2GRAY)
cv.cvCornerEigenValsAndVecs(gray_image, eig_tex, blocksize, filtersize)
eig_tex_np = ut.cv2np(eig_tex)
eig_tex_np = np.reshape(eig_tex_np, [cv_image.height, cv_image.width, 6])
return eig_tex_np[:,:,0:2]
def visualize(eigens):
l1 = eigens[:,:,0]
l2 = eigens[:,:,1]
m1 = np.min(l1)
m2 = np.min(l2)
r1 = np.max(l1) - m1
r2 = np.max(l2) - m2
if r1 == 0:
r1 = 1
if r2 == 0:
r2 = 1
l1cv = ut.np2cv(np.array( (1 - ((l1-m1) / r1)) * 255, dtype='uint8'))
l2cv = ut.np2cv(np.array( (1 - ((l2-m2) / r2)) * 255, dtype='uint8'))
hg.cvNamedWindow('eigen value 1', 1)
hg.cvNamedWindow('eigen value 2', 1)
hg.cvShowImage('eigen value 1', l1cv)
hg.cvShowImage('eigen value 2', l2cv)
while True:
k = hg.cvWaitKey(33)
if k == ' ':
return
if k == 'x':
exit()
if __name__ == '__main__':
#hg.cvNamedWindow('win', 1)
im = hg.cvLoadImage('/home/haidai/svn/robot1/src/projects/08_03_dog_commands/dragonfly_color_calibration/untitled folder/camera_image.png')
#hg.cvShowImage('win', im)
for i in range(40):
s = (i+1) * 2
print s
eig_tex_np = eigen_texture(im, blocksize=s, filtersize=3)
visualize(eig_tex_np)
# pdb.set_trace()
# def texture_features(self, block_size=5, filter_size=3):
# """
# Calculates the texture features associated with the image.
# block_size gives the size of the texture neighborhood to be processed
# filter_size gives the size of the Sobel operator used to find gradient information
# """
# #block_size = cv.cvSize(block_size, block_size)
#
# #convert to grayscale float
# channels = 1
# self.gray_image = cv.cvCreateImage(cv.cvSize(self.im_width, self.im_height),
# cv.IPL_DEPTH_8U, #cv.IPL_DEPTH_16U, #cv.IPL_DEPTH_32F,
# channels)
#
#
# #cv.CV_32FC1, #cv.IPL_DEPTH_32F, #cv.IPL_DEPTH_8U, #cv.IPL_DEPTH_16U,
# channels = 1
# eig_tex = cv.cvCreateImage(cv.cvSize(self.im_width*6, self.im_height),
# cv.IPL_DEPTH_32F,
# channels)
#
#
# cv.cvCvtColor(self.image, self.gray_image, cv.CV_BGR2GRAY);
#
# #cv.cvAdd(const CvArr* src1, const CvArr* src2, CvArr* dst, const CvArr* mask=NULL );
#
# #hg.cvConvertImage(self.image, self.gray_image)
#
# cv.cvCornerEigenValsAndVecs(self.gray_image, eig_tex,#CvArr* eigenvv,
# block_size, filter_size)
#
# eig_tex = ut.cv2np(eig_tex)
# eig_tex = np.reshape(eig_tex, [self.im_height, self.im_width, 6])
# #print eig_tex.shape ## [480,640,3]
# ## (l1, l2, x1, y1, x2, y2), where
# ## l1, l2 - eigenvalues of M; not sorted
# ## (x1, y1) - eigenvector corresponding to l1
# ## (x2, y2) - eigenvector corresponding to l2
# tex_feat = np.zeros([3, self.im_height * self.im_width], dtype=np.float32)
# tmp = np.reshape(eig_tex, [self.im_height * self.im_width, 6]).T
# s = tmp[0] > tmp[1]
# tex_feat[1:3, s] = tmp[0, s] * tmp[2:4, s]
# tex_feat[0, s] = tmp[1, s]
# tex_feat[1:3, -s] = tmp[1, -s] * tmp[4:6, -s]
# tex_feat[0, -s] = tmp[0, -s]
#
# self.tex_feat = tex_feat.T
# self.tex_image = np.reshape(self.tex_feat, [self.im_height, self.im_width, 3])
| [
[
1,
0,
0.1935,
0.0065,
0,
0.66,
0,
811,
0,
1,
0,
0,
811,
0,
0
],
[
1,
0,
0.2,
0.0065,
0,
0.66,
0.1667,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.2065,
0.0065,
0,
0.6... | [
"import util as ut #Usage: cv2np, np2cv",
"import numpy as np",
"import opencv as cv",
"import opencv.highgui as hg",
"def eigen_texture(cv_image, blocksize=8, filtersize=3):\n gray_image = cv.cvCreateImage(cv.cvSize(cv_image.width, cv_image.height), cv.IPL_DEPTH_8U, 1)\n eig_tex = cv.cvCreateImage(cv... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
from features import features
import texture_features
import gaussian_curvature
import scipy.stats as stats
import numpy as np
import opencv as cv
import scipy.spatial.kdtree as kdtree
import util as ut # used for getTime, and cv2np, load_pickle, save_pickle
import os #[for os.path.exists(...)]
import copy #[to copy VOI indices]
import processor ###
#The source file:
# Has a file import statement which uses config.path, etc.
# Heavily uses internal variables of processor.Processor object (called 'processor', must be passed).
# Uses no functions from processor except for rotate_to_plane()
# Vars: pts3d_bound, img, map2d (=camPts_bound), config.path, scan_dataset.id, feature_radius,
# voi_width, rotate_to_plane(), scan_dataset.ground_plane_normal, intensities_bound
class gaussian_histogram_features(features):
'''
classdocs
'''
#all_save_load: set to true only if nonzero_indices contain all pts in pt-cloud!
def prepare(self, features_k_nearest_neighbors, nonzero_indices = None, all_save_load = False, regenerate_neightborhood_indices = False):
#print np.shape(self.processor.pts3d_bound), 'shape pts3d_bound'
imgTmp = cv.cvCloneImage(self.processor.img)
self.imNP = ut.cv2np(imgTmp,format='BGR')
###self.processor.map2d = np.asarray(self.processor.camPts_bound) #copied from laser to image mapping
if features_k_nearest_neighbors == None or features_k_nearest_neighbors == False: #use range
self.kdtree2d = kdtree.KDTree(self.processor.pts3d_bound.T)
#print len(nonzero_indices)
#print np.shape(np.asarray((self.processor.pts3d_bound.T)[nonzero_indices]))
if nonzero_indices != None:
print ut.getTime(), 'query ball tree for ', len(nonzero_indices), 'points'
kdtree_query = kdtree.KDTree((self.processor.pts3d_bound.T)[nonzero_indices])
else:
print ut.getTime(), 'query ball tree'
kdtree_query = kdtree.KDTree(self.processor.pts3d_bound.T)
filename = self.processor.config.path+'/data/'+self.processor.scan_dataset.id+'_sphere_neighborhood_indices_'+str(self.processor.feature_radius)+'.pkl'
if all_save_load == True and os.path.exists(filename) and regenerate_neightborhood_indices == False:
#if its already there, load it:
print ut.getTime(), 'loading',filename
self.kdtree_queried_indices = ut.load_pickle(filename)
else:
self.kdtree_queried_indices = kdtree_query.query_ball_tree(self.kdtree2d, self.processor.feature_radius, 2.0, 0.2) #approximate
print ut.getTime(), 'queried kdtree: ',len(self.kdtree_queried_indices),'points, radius:',self.processor.feature_radius
if all_save_load == True:
ut.save_pickle(self.kdtree_queried_indices, filename)
#make dict out of list for faster operations? (doesn't seem to change speed significantly):
#self.kdtree_queried_indices = dict(zip(xrange(len(self.kdtree_queried_indices)), self.kdtree_queried_indices))
else: #experiemental: use_20_nearest_neighbors == True
#TODO: exclude invalid values in get_featurevector (uncomment code there)
self.kdtree2d = kdtree.KDTree(self.processor.pts3d_bound.T)
self.kdtree_queried_indices = []
print ut.getTime(), 'kdtree single queries for kNN start, k=', features_k_nearest_neighbors
count = 0
for point in ((self.processor.pts3d_bound.T)[nonzero_indices]):
count = count + 1
result = self.kdtree2d.query(point, features_k_nearest_neighbors,0.2,2,self.processor.feature_radius)
#existing = result[0][0] != np.Inf
#print existing
#print result[1]
self.kdtree_queried_indices += [result[1]] #[existing]
if count % 4096 == 0:
print ut.getTime(),count
print ut.getTime(), 'kdtree singe queries end'
#convert to numpy array -> faster access
self.kdtree_queried_indices = np.asarray(self.kdtree_queried_indices)
#print self.kdtree_queried_indices
#takes long to compute:
#avg_len = 0
#minlen = 999999
#maxlen = 0
#for x in self.kdtree_queried_indices:
# avg_len += len(x)
# minlen = min(minlen, len(x))
# maxlen = max(maxlen, len(x))
#avg_len = avg_len / len(self.kdtree_queried_indices)
#print ut.getTime(), "range neighbors: avg_len", avg_len, 'minlen', minlen, 'maxlen', maxlen
#create HSV numpy images:
# compute the hsv version of the image
image_size = cv.cvGetSize(self.processor.img)
img_h = cv.cvCreateImage (image_size, 8, 1)
img_s = cv.cvCreateImage (image_size, 8, 1)
img_v = cv.cvCreateImage (image_size, 8, 1)
img_hsv = cv.cvCreateImage (image_size, 8, 3)
cv.cvCvtColor (self.processor.img, img_hsv, cv.CV_BGR2HSV)
cv.cvSplit (img_hsv, img_h, img_s, img_v, None)
self.imNP_h = ut.cv2np(img_h)
self.imNP_s = ut.cv2np(img_s)
self.imNP_v = ut.cv2np(img_v)
textures = texture_features.eigen_texture(self.processor.img)
self.imNP_tex1 = textures[:,:,0]
self.imNP_tex2 = textures[:,:,1]
self.debug_before_first_featurevector = True
self.generate_voi_histogram(self.processor.point_of_interest,self.processor.voi_width)
#has to have at least length 2 because of openCV matrices!!!!
def get_indexvector(self, type):
var_idx = []
#start indices
rh1 = 0 #zhist, normal, eigenvalue1, ev2
ch1 = rh1 + 6 #hsi zhist, maxheight-diff, tex1, tex2
ci = ch1 + 25
end = ci + 4 #
if type=='range':
for i in range(rh1, ch1):
var_idx.append(i)
elif type=='color':
for i in range(ch1, end):
var_idx.append(i)
#for plotting:
elif type=='hsvi':
for i in range(ci,end):
var_idx.append(i)
else: #all
for i in range(rh1, end):
var_idx.append(i)
return np.array(var_idx)
#get the feature vector for a specific point
def get_featurevector(self, index, count, pts = None):
if pts == None:
pts = self.processor.pts3d_bound
#print 'i',index,'c', count
fv = []
indices = np.asarray(self.kdtree_queried_indices[count])
invalid_value = np.shape(pts)[1]
#print indices
#print 'iv',invalid_value
indices = indices[indices != invalid_value]
#print ut.getTime(), indices
#print ut.getTime(), 'number of pts', len(indices)
a = pts[:,indices]
view = processor.rotate_to_plane(self.processor.scan_dataset.ground_plane_normal, np.matrix([-1,0,0.]).T)
normal, eigenvalues = gaussian_curvature.gaussian_curvature(a,view)
#eigenvalues = eigenvalues / np.square(r)
#fv += [normal[0,0],0,normal[2,0]]
#fv += normal.T.A[0].tolist()
#fv += eigenvalues.tolist()
#print np.asarray(pts[:,index].T[0])[0]
# print 'pt',np.asarray(pts[:,index].T[0])
point = pts[:,index]
ev1, ev2 = self.get_voi_histogram_spread(point)
#z_max_height_diff = pts[2,index] - self.get_voi_maxcount_height()
#fv += [self.get_voi_histogram_value(point),z_max_height_diff,normal[0,0],normal[1,0],normal[2,0], ev1, ev2]
fv += [self.get_voi_histogram_value(point),normal[0,0],normal[1,0],normal[2,0], ev1, ev2]
h = self.imNP_h[self.processor.map2d[1,index],self.processor.map2d[0,index]]
s = self.imNP_s[self.processor.map2d[1,index],self.processor.map2d[0,index]]
i = self.processor.intensities_bound[index]
hsi = self.get_voi_hsi_histogram_values(point,h,s,i)
fv += [hsi[0],hsi[1],hsi[2]]
#print np.shape(self.imNP_tex1)
#print np.shape(self.map2d)
tex1 = self.imNP_tex1[self.processor.map2d[1,index],self.processor.map2d[0,index]]
tex2 = self.imNP_tex2[self.processor.map2d[1,index],self.processor.map2d[0,index]]
fv += [tex1, tex2]
#print tex1, tex2
#color histograms:
colors_h = []
colors_s = []
colors_v = []
for idx in indices:
colors_h.append(float(self.imNP_h[self.processor.map2d[1,idx],self.processor.map2d[0,idx]]))
colors_s.append(float(self.imNP_s[self.processor.map2d[1,idx],self.processor.map2d[0,idx]]))
colors_v.append(float(self.imNP_v[self.processor.map2d[1,idx],self.processor.map2d[0,idx]]))
color_hist = stats.histogram2(np.array(colors_h), [0,51,102,153,204])
color_hist = color_hist / float(np.sum(color_hist))
color_hist = list(color_hist)
fv += color_hist
color_hist = stats.histogram2(np.array(colors_s), [0,51,102,153,204])
color_hist = color_hist / float(np.sum(color_hist))
color_hist = list(color_hist)
fv += color_hist
color_hist = stats.histogram2(np.array(colors_v), [0,51,102,153,204])
color_hist = color_hist / float(np.sum(color_hist))
color_hist = list(color_hist)
fv += color_hist
#intensities
intensities = self.processor.intensities_bound[indices]
intensities = np.asarray(intensities)
#map to 0-255-range: TODO: perhaps do some nonlinear transformation here?
intensities = intensities / 10000 * 255
intensity_hist = stats.histogram2(intensities, [0,51,102,153,204])
intensity_hist = intensity_hist / float(np.sum(intensity_hist))
intensity_hist = list(intensity_hist)
fv += intensity_hist
#current colors:
fv += [float(self.imNP_h[self.processor.map2d[1,index],self.processor.map2d[0,index]]) / 255.0]
fv += [float(self.imNP_s[self.processor.map2d[1,index],self.processor.map2d[0,index]]) / 255.0]
fv += [float(self.imNP_v[self.processor.map2d[1,index],self.processor.map2d[0,index]]) / 255.0]
#current intensity value (scaled)
intensity = self.processor.intensities_bound[index]
#scale:
intensity = intensity / 15000.0
intensity = [intensity]
fv += intensity
if self.debug_before_first_featurevector == True:
self.debug_before_first_featurevector = False
print ut.getTime(), 'get_featurevector: Choosing not to print Feature Vector Sample'
#print ut.getTime(), 'feature vector sample(gaussian histograms):', fv
return fv
#poi and width: cube of interest around point
#min, max, bincount gives the height slices
def generate_voi_histogram(self, poi, width):
print 'poi',poi,'width',width
# indices of points in volume of interest (poi)
pts_indices = self.get_voi_pts_indices(poi, width)
self.voi_pts_indices = pts_indices
pts = np.asarray(self.processor.pts3d_bound)
pts = pts[:,pts_indices] #truncate points to volume of interest
self.voi_pts = pts
#mlab.points3d(pts[0,:],pts[1,:],pts[2,:], mode='point')
#mlab.show()
#go from 0 to 2m, create histogram with 80 bins = bin of 2.5cm (=height-slice)
min = 0.
max = 2.
self.voi_bincount = 80
self.voi_interval_size = max - min
bins = np.asarray(range(self.voi_bincount)) * self.voi_interval_size/float(self.voi_bincount)
#print 'bins',bins
hist = stats.histogram2(pts[2],bins) / float(len(pts[2]))
#print 'zhist',hist
#print zip(bins, hist)
self.z_hist = hist
self.z_hist_bins = bins
slices = self.get_voi_slice_indices()
self.z_hist_slices_indices = slices
#precalculate spread values:
self.z_hist_spread = []
for indices in self.z_hist_slices_indices:
a = self.processor.pts3d_bound[:,indices]
# ev12 gives an indication about how far points are spread out in a specific height-slice
u, ev12 = gaussian_curvature.spread(a)
self.z_hist_spread += [(ev12[0], ev12[1])]
#create h,s,i histograms for each slice:
pts_h = []
pts_s = []
#print self.processor.pts3d_bound
#TODO: does this use the volume of interest? should it???
n,m = np.shape(np.asarray(self.processor.pts3d_bound))
#print 'm',m,'len(self.processor.pts3d_bound[2,:].A1)',len(self.processor.pts3d_bound[2,:].A1)
for index in range(m):
pts_h.append(float(self.imNP_h[self.processor.map2d[1,index],self.processor.map2d[0,index]]))
for index in range(m):
pts_s.append(float(self.imNP_s[self.processor.map2d[1,index],self.processor.map2d[0,index]]))
pts_i = np.asarray(self.processor.intensities_bound)
#print 'ptsi',pts_i
if np.max(pts_i) > 0:
self.intensity_normalization_factor = 1.0 / float(np.max(pts_i)) * 255
else:
self.intensity_normalization_factor = 1.
#print 'self.intensity_normalization_factor', self.intensity_normalization_factor
#print pts_i
pts_i *= self.intensity_normalization_factor
pts_h = np.asarray(pts_h)
pts_s = np.asarray(pts_s)
self.z_hist_h_hists = []
self.z_hist_s_hists = []
self.z_hist_i_hists = []
#normalize by maximum slice:
max_count = 0
max_count_index = 0
for count_idx, indices in enumerate(slices):
n = np.shape(indices)
if n[0] > max_count:
max_count = n[0]
max_count_index = count_idx
slize_height = (self.voi_interval_size / float(self.voi_bincount))
self.z_hist_height_max = slize_height * (max_count_index + 0.5)
#print 'max_count', max_count,'index',max_count_index, 'height in max bin', self.z_hist_height_max
for indices in slices:
pts_h_slice = pts_h[indices]
pts_s_slice = pts_s[indices]
pts_i_slice = pts_i[indices]
self.hsi_hist_bincount = 5
bins = np.asarray(range(0,self.hsi_hist_bincount))*float(255.0/float(self.hsi_hist_bincount))
#print bins
#todo: smooth with kernel fct
count = float(len(pts_h_slice))
if count == 0:
count = 1
hist_h = stats.histogram2(pts_h_slice,bins) / count
self.z_hist_h_hists.append(hist_h)
hist_s = stats.histogram2(pts_s_slice,bins) / count
self.z_hist_s_hists.append(hist_s)
hist_i = stats.histogram2(pts_i_slice,bins) / count
#print 'hist_i', hist_i, pts_i_slice, bins, pts_i
self.z_hist_i_hists.append(hist_i)
#print 'hh',self.z_hist_h_hists
#print 'sh',self.z_hist_s_hists
#print 'ih',self.z_hist_i_hists
def get_voi_pts_indices(self, poi, width):
pts = np.asarray(self.processor.pts3d_bound)
#region of interest:
conditions = np.multiply(np.multiply(np.multiply(np.multiply(np.multiply(pts[0] < poi[0]+width/2.0, pts[0] > poi[0]-width/2.0),
pts[1] < poi[1]+width/2.0), pts[1] > poi[1]-width/2.0),
pts[2] < poi[2]+width/2.0), pts[2] > poi[2]-width/2.0)
indices = np.where(conditions)[0]
return indices
def get_voi_slice_indices(self):
slices = []
last_z = -999999
for z in self.z_hist_bins:
indices = copy.copy(self.voi_pts_indices)
pts = self.voi_pts
conditions = np.multiply(pts[2] < z, pts[2] > last_z)
indices = indices[np.where(conditions)[0]]
slices += [indices]
last_z = z
return slices
def get_voi_histogram_value(self, point):
z = point[2]
z = int(z*self.voi_bincount / float(self.voi_interval_size))
if z >= 0 and z < self.voi_bincount:
# print z, self.z_hist[z]
return self.z_hist[z]
else:
#print z,0
return 0
def get_voi_histogram_spread(self, point):
z = point[2]
z = int(z*self.voi_bincount / float(self.voi_interval_size))
if z >= 0 and z < self.voi_bincount:
# indices = self.z_hist_slices_indices[z]
# a = self.processor.pts3d_bound[:,indices]
# u, ev12 = gaussian_curvature.spread(a)
# if abs(self.z_hist_spread[z][0] - ev12[0]) > 0.0000000001 or abs(self.z_hist_spread[z][1] - ev12[1]) > 0.0000000001:
# print 'ERROR', self.z_hist_spread[z], '!=', (ev12[0], ev12[1])
# return ev12[0], ev12[1]
return self.z_hist_spread[z]
else:
#print z,0
return 0, 0
def get_voi_hsi_histogram_values(self, point,h ,s, i):
z = point[2]
z = int(z*self.voi_bincount / float(self.voi_interval_size))
if z >= 0 and z < self.voi_bincount:
h_index = int(h * self.hsi_hist_bincount / 255.0)
s_index = int(s * self.hsi_hist_bincount / 255.0)
i *= self.intensity_normalization_factor
i_index = int(i * self.hsi_hist_bincount / 255.0)
h_hist = self.z_hist_h_hists[z][h_index]
s_hist = self.z_hist_s_hists[z][s_index]
#print 'z',z,'i_index',i_index, i
#print self.z_hist_i_hists, np.shape(self.z_hist_i_hists)
i_hist = self.z_hist_i_hists[z][i_index]
return h_hist, s_hist, i_hist
else:
#print z,0
return 0, 0, 0
def get_voi_maxcount_height(self):
return self.z_hist_height_max
| [
[
1,
0,
0.0668,
0.0022,
0,
0.66,
0,
479,
0,
1,
0,
0,
479,
0,
0
],
[
1,
0,
0.069,
0.0022,
0,
0.66,
0.0909,
985,
0,
1,
0,
0,
985,
0,
0
],
[
1,
0,
0.0713,
0.0022,
0,
0... | [
"from features import features",
"import texture_features",
"import gaussian_curvature",
"import scipy.stats as stats",
"import numpy as np",
"import opencv as cv",
"import scipy.spatial.kdtree as kdtree",
"import util as ut # used for getTime, and cv2np, load_pickle, save_pickle",
"import os #[for ... |
from _direction import *
| [
[
1,
0,
1,
1,
0,
0.66,
0,
390,
0,
1,
0,
0,
390,
0,
0
]
] | [
"from _direction import *"
] |
#! /usr/bin/python
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Travis Deyle and Kelsey Hawkins (Healthcare Robotics Lab, Georgia Tech.)
GRASP_LOCATION = [ 0.50, -0.30, 0.00]
PLACE_LOCATIONS = [[ 0.58, 0.13, 0.00],
[ 0.58, 0.21, 0.00],
[ 0.58, 0.29, 0.00]]
import sys
import roslib
roslib.load_manifest('hrl_pr2_experiments')
import rospy
import smach
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
import actionlib
import tf.transformations as tft
from pr2_grasp_behaviors.msg import OverheadGraspAction, OverheadGraspSetupAction
from pr2_grasp_behaviors.msg import OverheadGraspGoal, OverheadGraspSetupGoal
from pr2_controllers_msgs.msg import SingleJointPositionAction, SingleJointPositionGoal
from hrl_trajectory_playback.srv import TrajPlaybackSrv, TrajPlaybackSrvRequest
# Overhead grasping requres:
# run: hrl_pr2_gains/change_gains_grasp.sh
# roslaunch pr2_grasping_behaviors overhead_grasping_server_trained.launch
class NTries(smach.State):
def __init__(self, n):
smach.State.__init__(self, outcomes=['succeeded', 'aborted'],
output_keys=['ntries_counter'])
self.counter = 0
self.n = n
def execute(self, userdata):
self.counter += 1
userdata.ntries_counter = self.counter
if self.counter <= self.n:
rospy.logout( 'Executing NTries: On #%d of %d' % (self.counter, self.n))
return 'succeeded'
else:
return 'aborted'
def sm_grasp():
if len(sys.argv) < 2 or sys.argv[1] not in ['r', 'l']:
print "First arg should be 'r' or 'l'"
return None
arm = sys.argv[1]
if arm == 'r':
arm_mult = 1
else:
arm_mult = -1
# Create a SMACH state machine
sm = smach.StateMachine(outcomes=['succeeded','aborted','preempted'])
with sm:
# Setup arm pose (out of way for perception)
tgoal = SingleJointPositionGoal()
#tgoal.position = 0.190 # all the way up is 0.200
tgoal.position = 0.210 # all the way up is 0.200
tgoal.min_duration = rospy.Duration( 2.0 )
tgoal.max_velocity = 1.0
smach.StateMachine.add(
'TORSO_SETUP',
SimpleActionState( 'torso_controller/position_joint_action',
SingleJointPositionAction,
goal = tgoal),
transitions = { 'succeeded': 'ARM_UNTUCK' })
smach.StateMachine.add(
'ARM_UNTUCK',
ServiceState('traj_playback/' + arm + '_arm_untuck', TrajPlaybackSrv),
transitions = { 'succeeded': 'GRASP_BEGIN_SETUP' })
# Setup arm pose (out of way for perception)
smach.StateMachine.add(
'GRASP_BEGIN_SETUP',
SimpleActionState( arm + '_overhead_grasp_setup',
OverheadGraspSetupAction,
goal = OverheadGraspSetupGoal()),
transitions = { 'succeeded': 'DEMO_START' })
@smach.cb_interface(outcomes=['succeeded'])
def wait_for_enter(ud):
raw_input("Press enter to begin cleanup demo.")
return 'succeeded'
smach.StateMachine.add(
'DEMO_START',
smach.CBState(wait_for_enter),
transitions = {'succeeded': 'THREE_OBJECTS'})
# We will pick up 3 objects.
smach.StateMachine.add(
'THREE_OBJECTS',
NTries( 3 ),
transitions = {'succeeded':'THREE_TRIES',
'aborted':'RESET_ARMS'},
remapping={'ntries_counter':'object_number'})
# We will run the grasper at most 3 times.
grasp_tries = NTries( 3 )
smach.StateMachine.add(
'THREE_TRIES',
grasp_tries,
transitions = {'succeeded':'GRASP_SETUP',
'aborted':'aborted'})
# Setup arm pose (out of way for perception)
smach.StateMachine.add(
'GRASP_SETUP',
SimpleActionState( arm + '_overhead_grasp_setup',
OverheadGraspSetupAction,
goal = OverheadGraspSetupGoal()),
transitions = { 'succeeded': 'GRASP' })
def grasp_goal_cb(userdata, goal):
############################################################
# Creating grasp goal
grasp_goal = OverheadGraspGoal()
grasp_goal.is_grasp = True
grasp_goal.disable_head = False
grasp_goal.disable_coll = False
grasp_goal.grasp_type = OverheadGraspGoal.VISION_GRASP
grasp_goal.x = GRASP_LOCATION[0]
grasp_goal.y = arm_mult * GRASP_LOCATION[1]
grasp_goal.behavior_name = "overhead_grasp"
grasp_goal.sig_level = 0.999
############################################################
return grasp_goal
smach.StateMachine.add(
'GRASP',
SimpleActionState( arm + '_overhead_grasp',
OverheadGraspAction,
goal_cb = grasp_goal_cb),
transitions = { 'succeeded': 'PLACE',
'aborted':'THREE_TRIES' })
def place_goal_cb(userdata, goal):
print "object Number", userdata.object_number
############################################################
# Creating place place_goal
place_goal = OverheadGraspGoal()
place_goal.is_grasp = False
place_goal.disable_head = False
place_goal.disable_coll = False
place_goal.grasp_type = OverheadGraspGoal.MANUAL_GRASP
place_goal.x = PLACE_LOCATIONS[userdata.object_number-1][0]
place_goal.y = arm_mult * PLACE_LOCATIONS[userdata.object_number-1][1]
place_goal.roll = PLACE_LOCATIONS[userdata.object_number-1][2]
place_goal.behavior_name = "overhead_grasp"
place_goal.sig_level = 0.999
############################################################
return place_goal
def clear_grasp_tries(userdata, status, result):
grasp_tries.counter = 0
smach.StateMachine.add(
'PLACE',
SimpleActionState( arm + '_overhead_grasp',
OverheadGraspAction,
goal_cb = place_goal_cb,
result_cb = clear_grasp_tries,
input_keys = ['object_number']),
transitions = { 'succeeded': 'THREE_OBJECTS',
'aborted':'THREE_OBJECTS' })
# Setup arm pose (out of way for perception)
smach.StateMachine.add(
'RESET_ARMS',
SimpleActionState( arm + '_overhead_grasp_setup',
OverheadGraspSetupAction,
goal = OverheadGraspSetupGoal()),
transitions = { 'succeeded': 'ARM_TUCK' })
smach.StateMachine.add(
'ARM_TUCK',
ServiceState('traj_playback/' + arm + '_arm_untuck', TrajPlaybackSrv,
request=TrajPlaybackSrvRequest(True)),
transitions = { 'succeeded': 'succeeded' })
return sm
if __name__ == '__main__':
rospy.init_node('smach_sm_grasp')
sm = sm_grasp()
sis = IntrospectionServer('Grasp Cleanup', sm, '/SM_GRASP_CLEANUP')
sis.start()
outcome = sm.execute()
sis.stop()
| [
[
14,
0,
0.1348,
0.0043,
0,
0.66,
0,
861,
0,
0,
0,
0,
0,
5,
0
],
[
14,
0,
0.1435,
0.013,
0,
0.66,
0.0625,
458,
0,
0,
0,
0,
0,
5,
0
],
[
1,
0,
0.1565,
0.0043,
0,
0.6... | [
"GRASP_LOCATION = [ 0.50, -0.30, 0.00]",
"PLACE_LOCATIONS = [[ 0.58, 0.13, 0.00],\n [ 0.58, 0.21, 0.00],\n [ 0.58, 0.29, 0.00]]",
"import sys",
"import roslib",
"roslib.load_manifest('hrl_pr2_experiments')",
"import rospy",
"import smach",
"from smach_ros i... |
#! /usr/bin/python
import sys
import numpy as np
import roslib
roslib.load_manifest('hrl_pr2_arms')
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
import rospy
import smach
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
import actionlib
import tf
import tf.transformations as tf_trans
from std_msgs.msg import Bool, Float32
from std_srvs.srv import Empty
from geometry_msgs.msg import PoseStamped, Vector3
from actionlib_msgs.msg import GoalStatus
#from hrl_trajectory_playback.srv import TrajPlaybackSrv, TrajPlaybackSrvRequest
from hrl_generic_arms.pose_converter import PoseConverter
from hrl_pr2_arms.pr2_arm import create_pr2_arm
from hrl_pr2_arms.pr2_arm_hybrid import PR2ArmHybridForce
class ClickMonitor(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=['click', 'shutdown'],
output_keys=['click_pose'])
self.cur_msg = None
rospy.Subscriber('/pixel3d', PoseStamped, self.click_cb)
def click_cb(self, msg):
self.cur_msg = msg
def execute(self, userdata):
self.cur_msg = None
while not rospy.is_shutdown():
if self.cur_msg is not None:
userdata.click_pose = self.cur_msg
return 'click'
rospy.sleep(0.01)
return 'shutdown'
class TFPubLoop(object):
def __init__(self, parent_frame_name, child_frame_name, rate=100):
self.child_frame_name = child_frame_name
self.parent_frame_name = parent_frame_name
self.tf_broad = tf.TransformBroadcaster()
self.timer = rospy.Timer(rospy.Duration(1. / rate), self.pub_tf)
self.tf_pose = None
def pub_tf(self, timer_info):
if self.tf_pose is not None:
self.tf_broad.sendTransform(self.tf_pose[0], self.tf_pose[1], rospy.Time.now(),
self.child_frame_name, self.parent_frame_name)
def update_pose(self, pose):
self.tf_pose = PoseConverter.to_pos_quat(pose)
self.pub_tf(None)
class SMTouchSimple(object):
def __init__(self):
self.tf_listener = tf.TransformListener()
self.start_frame_pub = rospy.Publisher("~start_frame", PoseStamped)
self.end_frame_pub = rospy.Publisher("~end_frame", PoseStamped)
self.arm = create_pr2_arm('l', PR2ArmHybridForce)
self.tf_pub = TFPubLoop("/torso_lift_link", "/contact_control_frame")
def get_transform(self, from_frame, to_frame, time=None):
if time is None:
time = rospy.Time.now()
try:
self.tf_listener.waitForTransform(from_frame, to_frame, time, rospy.Duration(5))
pos, quat = self.tf_listener.lookupTransform(from_frame, to_frame, time)
return util.pose_pq_to_mat(pos, quat)
except (tf.Exception, tf.LookupException, tf.ConnectivityException):
return None
def get_trajectory_generator(self):
@smach.cb_interface(input_keys=['start_click', 'end_click'],
output_keys=['start_traj_frame', 'end_traj_frame'],
outcomes=['succeeded'])
def generate_trajectory(ud):
b_B_s = PoseConverter.to_homo_mat(ud.start_click)
b_B_e = PoseConverter.to_homo_mat(ud.end_click)
s_B_e = (b_B_s ** -1) * b_B_e
b_normal = b_B_s[:3, 2] / np.linalg.norm(b_B_s[:3, 2])
s_vel = np.mat([s_B_e[0, 3], s_B_e[1, 3], 0]).T
s_vel = s_vel / np.linalg.norm(s_vel)
b_vel = b_B_s[:3, :3].T * s_vel
b_ortho = np.mat(np.cross(b_normal.T, b_vel.T)).T
b_ortho = b_ortho / np.linalg.norm(b_ortho)
b_R_traj = np.vstack([b_vel.T, b_ortho.T, b_normal.T])
b_p_start = b_B_s[:3, 3]
b_p_end = b_B_e[:3, 3]
b_p_end = 3 #TODO TODO
self.start_frame_pub.publish(PoseConverter.to_pose_stamped_msg(ud.start_click.header.frame_id,
(b_p_start, b_R_traj)))
self.end_frame_pub.publish(PoseConverter.to_pose_stamped_msg(ud.start_click.header.frame_id,
(b_p_end, b_R_traj)))
ud.start_traj_frame = (b_p_start, b_R_traj)
ud.end_traj_frame = (b_p_end, b_R_traj)
return 'succeeded'
return smach.CBState(generate_trajectory)
def get_sm(self):
sm = smach.StateMachine(outcomes=['succeeded','preempted','shutdown'])
with sm:
smach.StateMachine.add(
'INPUT_START_CLICK',
ClickMonitor(),
transitions={'click' : 'INPUT_END_CLICK',
'shutdown' : 'shutdown'},
remapping={'click_pose' : 'start_click'}) # output (PoseStamped)
smach.StateMachine.add(
'INPUT_END_CLICK',
ClickMonitor(),
transitions={'click' : 'GENERATE_TRAJECTORY',
'shutdown' : 'shutdown'},
remapping={'click_pose' : 'end_click'}) # output (PoseStamped)
smach.StateMachine.add(
'GENERATE_TRAJECTORY',
self.get_trajectory_generator(),
transitions={'succeeded' : 'INPUT_START_CLICK'})
return sm
def get_sm_basic(self):
sm = smach.StateMachine(outcomes=['succeeded','preempted','shutdown'])
with sm:
smach.StateMachine.add(
'INPUT_START_CLICK',
ClickMonitor(),
transitions={'click' : 'PUBLISH_CONTROL_FRAME',
'shutdown' : 'shutdown'},
remapping={'click_pose' : 'start_click'}) # output (PoseStamped)
@smach.cb_interface(input_keys=['start_click'],
outcomes=['succeeded', 'failed'])
def publish_control_frame(ud):
if ud.start_click.pose.position.x == -10000.0:
return 'failed'
pose = ud.start_click
pose.header.stamp = rospy.Time(0)
click_torso = self.tf_listener.transformPose("/torso_lift_link", pose)
self.tf_pub.update_pose(click_torso)
rospy.sleep(1)
self.arm.set_tip_frame("/contact_control_frame")
#self.arm.set_motion_gains(p_trans=[300, 300, 100])
self.arm.update_gains()
return 'succeeded'
smach.StateMachine.add(
'PUBLISH_CONTROL_FRAME',
smach.CBState(publish_control_frame),
transitions={'succeeded' : 'INPUT_START_CLICK',
'failed' : 'INPUT_START_CLICK'})
return sm
def main():
rospy.init_node('pr2_touch_simple')
smts = SMTouchSimple()
sm = smts.get_sm_basic()
rospy.sleep(1)
sis = IntrospectionServer('touch_simple', sm, '/INPUT_START_CLICK')
sis.start()
outcome = sm.execute()
sis.stop()
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0159,
0.0053,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0212,
0.0053,
0,
0.66,
0.0435,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0317,
0.0053,
0,
... | [
"import sys",
"import numpy as np",
"import roslib",
"roslib.load_manifest('hrl_pr2_arms')",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"import rospy",
"import smach",
"from smach_ros import SimpleActionState, ServiceState, IntrospectionServer",
"import actionlib",
... |
#! /usr/bin/python
import sys
import numpy as np
import scipy.io
import roslib
roslib.load_manifest('hrl_phri_2011')
import rospy
import rosbag
def main():
field_map = {
"time_offset" : "time_offset",
"tool_frame.transform.translation.x" : "pos_x",
"tool_frame.transform.translation.y" : "pos_y",
"tool_frame.transform.translation.z" : "pos_z",
"tool_frame.transform.rotation.x" : "rot_x",
"tool_frame.transform.rotation.y" : "rot_y",
"tool_frame.transform.rotation.z" : "rot_z",
"tool_frame.transform.rotation.w" : "rot_w",
"pc_pt.x" : "pc_pt_x",
"pc_pt.y" : "pc_pt_y",
"pc_pt.z" : "pc_pt_z",
"pc_normal.x" : "pc_norm_x",
"pc_normal.y" : "pc_norm_y",
"pc_normal.z" : "pc_norm_z",
"pc_dist" : "pc_dist",
"wrench.force.x" : "force_x",
"wrench.force.y" : "force_y",
"wrench.force.z" : "force_z",
"wrench.torque.x" : "torque_x",
"wrench.torque.y" : "torque_y",
"wrench.torque.z" : "torque_z",
"force_magnitude" : "force_mag",
"force_normal" : "force_norm",
"force_tangental" : "force_tan",
"contact_period" : "contact_period",
"time_from_contact_start" : "time_contact",
"ell_coords.x" : "lat",
"ell_coords.y" : "long",
"ell_coords.z" : "height",
}
data = {}
for field in field_map:
data[field_map[field]] = []
bag = rosbag.Bag(sys.argv[1], 'r')
for topic, fp, t in bag.read_messages(topics=["/force_processed"]):
for field in field_map:
exec("data['%s'].append(fp.%s)" % (field_map[field], field))
bag.close()
output_file = ".".join(sys.argv[1].split(".")[:-1] + ["mat"])
scipy.io.savemat(output_file, data)
print "Saved mat file to:", output_file
if __name__ == "__main__":
main()
| [
[
1,
0,
0.0492,
0.0164,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0656,
0.0164,
0,
0.66,
0.125,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.082,
0.0164,
0,
0.... | [
"import sys",
"import numpy as np",
"import scipy.io",
"import roslib",
"roslib.load_manifest('hrl_phri_2011')",
"import rospy",
"import rosbag",
"def main():\n\n field_map = {\n \"time_offset\" : \"time_offset\",\n \"tool_frame.transform.translation.x\" : \"pos_x\",\n \"tool_f... |
#! /usr/bin/python
import sys
import numpy as np
import roslib
roslib.load_manifest('hrl_pr2_arms')
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
import rospy
import smach
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
import actionlib
import tf
import tf.transformations as tf_trans
from std_msgs.msg import Bool, Float32
from std_srvs.srv import Empty
from geometry_msgs.msg import PoseStamped, Vector3
from actionlib_msgs.msg import GoalStatus
from pr2_controllers_msgs.msg import Pr2GripperCommandAction, Pr2GripperCommandGoal
from hrl_generic_arms.pose_converter import PoseConverter
from hrl_pr2_arms.pr2_arm import create_pr2_arm
from hrl_pr2_arms.pr2_arm_hybrid import PR2ArmHybridForce
def main():
rospy.init_node("load_tool")
from optparse import OptionParser
p = OptionParser()
p.add_option('-w', '--wait', dest="wait", default=6,
help="Set wait time.")
p.add_option('-r', '--relax', dest="relax", default=False,
action="store_true", help="Set the gripper torque to 0.")
p.add_option('-t', '--tighten', dest="tighten", default=False,
action="store_true", help="Set the gripper torque to 30.")
(opts, args) = p.parse_args()
g_client = actionlib.SimpleActionClient('l_gripper_controller/gripper_action', Pr2GripperCommandAction)
g_client.wait_for_server()
g_goal = Pr2GripperCommandGoal()
if opts.relax:
g_goal.command.position = 0
g_goal.command.max_effort = 0
g_client.send_goal(g_goal)
#g_client.wait_for_result()
return
if opts.tighten:
g_goal.command.position = 0
g_goal.command.max_effort = 30
g_client.send_goal(g_goal)
g_client.wait_for_result()
return
g_goal.command.position = 1
g_goal.command.max_effort = -1
g_client.send_goal(g_goal)
g_client.wait_for_result()
rospy.sleep(float(opts.wait))
g_goal.command.position = 0
g_goal.command.max_effort = 30
g_client.send_goal(g_goal)
g_client.wait_for_result()
if __name__ == "__main__":
main()
| [
[
1,
0,
0.0411,
0.0137,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0548,
0.0137,
0,
0.66,
0.0476,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0822,
0.0137,
0,
... | [
"import sys",
"import numpy as np",
"import roslib",
"roslib.load_manifest('hrl_pr2_arms')",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"import rospy",
"import smach",
"from smach_ros import SimpleActionState, ServiceState, IntrospectionServer",
"import actionlib",
... |
#! /usr/bin/python
import sys
import numpy as np
import scipy.io
import roslib
roslib.load_manifest('hrl_phri_2011')
import rospy
import rosbag
def main():
field_map = {
"/l_cart/f_cmd" :
{
"wrench.force.x" : "f_cmd_x",
"wrench.force.y" : "f_cmd_y",
"wrench.force.z" : "f_cmd_z",
"wrench.torque.x" : "t_cmd_x",
"wrench.torque.y" : "t_cmd_y",
"wrench.torque.z" : "t_cmd_z"
},
"/l_cart/f_err" :
{
"wrench.force.x" : "f_err_x",
"wrench.force.y" : "f_err_y",
"wrench.force.z" : "f_err_z",
"wrench.torque.x" : "t_err_x",
"wrench.torque.y" : "t_err_y",
"wrench.torque.z" : "t_err_z"
},
"/l_cart/k_effective" :
{
"wrench.force.x" : "k_eff_x",
"wrench.force.y" : "k_eff_y",
"wrench.force.z" : "k_eff_z",
"wrench.torque.x" : "kt_eff_x",
"wrench.torque.y" : "kt_eff_y",
"wrench.torque.z" : "kt_eff_z"
},
"/l_cart/sensor_ft" :
{
"wrench.force.x" : "f_sens_x",
"wrench.force.y" : "f_sens_y",
"wrench.force.z" : "f_sens_z",
"wrench.torque.x" : "t_sens_x",
"wrench.torque.y" : "t_sens_y",
"wrench.torque.z" : "t_sens_z"
},
"/l_cart/sensor_raw_ft" :
{
"wrench.force.x" : "f_sens_raw_x",
"wrench.force.y" : "f_sens_raw_y",
"wrench.force.z" : "f_sens_raw_z",
"wrench.torque.x" : "t_sens_raw_x",
"wrench.torque.y" : "t_sens_raw_y",
"wrench.torque.z" : "t_sens_raw_z"
},
"/l_cart/state/x" :
{
"pose.position.x" : "pos_x",
"pose.position.y" : "pos_y",
"pose.position.z" : "pos_z",
"pose.orientation.x" : "rot_x",
"pose.orientation.y" : "rot_y",
"pose.orientation.z" : "rot_z",
"pose.orientation.w" : "rot_w",
},
"/l_cart/state/xd" :
{
"linear.x" : "vel_x",
"linear.y" : "vel_y",
"linear.z" : "vel_z",
"angular.x" : "avel_x",
"angular.y" : "avel_y",
"angular.z" : "avel_z"
},
"/l_cart/x_err" :
{
"linear.x" : "pos_err_x",
"linear.y" : "pos_err_y",
"linear.z" : "pos_err_z",
"angular.x" : "rot_err_x",
"angular.y" : "rot_err_y",
"angular.z" : "rot_err_z"
}
}
data = {}
for topic in field_map:
for field in field_map[topic]:
data[field_map[topic][field]] = []
bag = rosbag.Bag(sys.argv[1], 'r')
for topic in field_map:
for tp, fp, t in bag.read_messages(topics=[topic]):
for field in field_map[topic]:
exec("data['%s'].append(fp.%s)" % (field_map[topic][field], field))
bag.close()
output_file = sys.argv[1].split(".")[0] + ".mat"
scipy.io.savemat(output_file, data)
print "Saved mat file to:", output_file
if __name__ == "__main__":
main()
| [
[
1,
0,
0.028,
0.0093,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0374,
0.0093,
0,
0.66,
0.125,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0467,
0.0093,
0,
0.... | [
"import sys",
"import numpy as np",
"import scipy.io",
"import roslib",
"roslib.load_manifest('hrl_phri_2011')",
"import rospy",
"import rosbag",
"def main():\n\n field_map = {\n \"/l_cart/f_cmd\" : \n {\n \"wrench.force.x\" : \"f_cmd_x\",\n \"wrench.force.y\" : ... |
#! /usr/bin/python
import sys
import numpy as np
import scipy.io
import scipy.stats
import matplotlib.pyplot as plt
import roslib
roslib.load_manifest('hrl_phri_2011')
import rospy
import rosbag
def main():
bag = rosbag.Bag(sys.argv[1], 'r')
x_forces, cur_x_forces = [], []
for topic, msg, t in bag.read_messages(topics=["/l_cart/sensor_ft"]):
f = [msg.wrench.force.x, msg.wrench.force.y, msg.wrench.force.z]
f_mag = np.linalg.norm(f)
if f_mag > 0.5:
cur_x_forces.append(msg.wrench.force.x)
else:
if len(cur_x_forces) >= 20:
x_forces.extend(cur_x_forces)
cur_x_forces = []
if len(cur_x_forces) >= 20:
x_forces.extend(cur_x_forces)
bag.close()
ptile_inds = [25, 50, 75, 95]
ptiles = {}
for ptile in ptile_inds:
ptiles[ptile] = scipy.stats.scoreatpercentile(x_forces, ptile)
print "Percentile %d: %f" % (ptile, ptiles[ptile])
#plt.plot(x_forces)
#plt.show()
if __name__ == "__main__":
main()
| [
[
1,
0,
0.0769,
0.0256,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.1026,
0.0256,
0,
0.66,
0.1,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.1282,
0.0256,
0,
0.6... | [
"import sys",
"import numpy as np",
"import scipy.io",
"import scipy.stats",
"import matplotlib.pyplot as plt",
"import roslib",
"roslib.load_manifest('hrl_phri_2011')",
"import rospy",
"import rosbag",
"def main():\n bag = rosbag.Bag(sys.argv[1], 'r')\n x_forces, cur_x_forces = [], []\n ... |
#! /usr/bin/python
import sys
import numpy as np
import scipy.io
import roslib
roslib.load_manifest('hrl_phri_2011')
import rospy
import rosbag
def main():
bag = rosbag.Bag(sys.argv[1], 'r')
prefix = ".".join(sys.argv[1].split(".")[:-1])
fixed_file = prefix + "_fixed.bag"
fixed_bag = rosbag.Bag(fixed_file, 'w')
for topic, tf_msg, t in bag.read_messages():
if topic == "/tf":
if len(tf_msg.transforms) > 0 and tf_msg.transforms[0].child_frame_id == "/tool_netft_raw_frame":
tf_msg.transforms[0].transform.rotation.x = 0
tf_msg.transforms[0].transform.rotation.y = 0
tf_msg.transforms[0].transform.rotation.z = 1
tf_msg.transforms[0].transform.rotation.w = 0
fixed_bag.write(topic, tf_msg, t)
bag.close()
fixed_bag.close()
print "Saved fixed bag to:", fixed_file
if __name__ == "__main__":
main()
| [
[
1,
0,
0.0938,
0.0312,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.125,
0.0312,
0,
0.66,
0.125,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.1562,
0.0312,
0,
0.... | [
"import sys",
"import numpy as np",
"import scipy.io",
"import roslib",
"roslib.load_manifest('hrl_phri_2011')",
"import rospy",
"import rosbag",
"def main():\n bag = rosbag.Bag(sys.argv[1], 'r')\n prefix = \".\".join(sys.argv[1].split(\".\")[:-1])\n fixed_file = prefix + \"_fixed.bag\"\n ... |
#! /usr/bin/python
import roslib
roslib.load_manifest('hrl_generic_arms')
import rospy
import tf
import tf.transformations as tf_trans
from hrl_generic_arms.pose_converter import PoseConverter
def main():
rospy.init_node("tf_link_flipper")
child_frame = rospy.get_param("~child_frame")
parent_frame = rospy.get_param("~parent_frame")
link_frame = rospy.get_param("~link_frame")
rate = rospy.get_param("~rate", 100)
link_trans = rospy.get_param("~link_transform")
l_B_c = PoseConverter.to_homo_mat(link_trans['pos'], link_trans['quat'])
tf_broad = tf.TransformBroadcaster()
tf_listener = tf.TransformListener()
rospy.sleep(1)
r = rospy.Rate(rate)
while not rospy.is_shutdown():
time = rospy.Time.now()
tf_listener.waitForTransform(child_frame, parent_frame, rospy.Time(0), rospy.Duration(1))
pos, quat = tf_listener.lookupTransform(child_frame, parent_frame, rospy.Time(0))
c_B_p = PoseConverter.to_homo_mat(pos, quat)
l_B_p = l_B_c * c_B_p
tf_pos, tf_quat = PoseConverter.to_pos_quat(l_B_p)
tf_broad.sendTransform(tf_pos, tf_quat, time, parent_frame, link_frame)
r.sleep()
if __name__ == "__main__":
main()
| [
[
1,
0,
0.0541,
0.027,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0811,
0.027,
0,
0.66,
0.1429,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1081,
0.027,
0,
0.66,... | [
"import roslib",
"roslib.load_manifest('hrl_generic_arms')",
"import rospy",
"import tf",
"import tf.transformations as tf_trans",
"from hrl_generic_arms.pose_converter import PoseConverter",
"def main():\n rospy.init_node(\"tf_link_flipper\")\n\n child_frame = rospy.get_param(\"~child_frame\")\n ... |
#! /usr/bin/python
import sys
import numpy as np
import roslib
roslib.load_manifest('hrl_pr2_arms')
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
import rospy
import rosbag
import smach
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
import actionlib
import tf
import tf.transformations as tf_trans
from std_msgs.msg import Bool, Float32, Float64MultiArray
from std_srvs.srv import Empty
from geometry_msgs.msg import PoseStamped, Vector3
from actionlib_msgs.msg import GoalStatus
from hrl_generic_arms.pose_converter import PoseConverter
from hrl_pr2_arms.pr2_arm import create_pr2_arm
from hrl_pr2_arms.pr2_arm_hybrid import PR2ArmHybridForce
def main():
rospy.init_node("teleop_positioner")
from optparse import OptionParser
p = OptionParser()
p.add_option('-r', '--rate', dest="rate", default=10,
help="Set rate.")
(opts, args) = p.parse_args()
arm = create_pr2_arm('l', PR2ArmHybridForce)
rospy.sleep(0.1)
arm.zero_sensor()
cur_pose = arm.get_end_effector_pose()
arm.set_ep(cur_pose, 1)
arm.set_force_directions([])
arm.set_force_gains(p_trans=[3, 1, 1], p_rot=0.5, i_trans=[0.002, 0.001, 0.001], i_max_trans=[10, 5, 5], i_rot=0, i_max_rot=0)
arm.set_motion_gains(p_trans=400, d_trans=[16, 10, 10], p_rot=[10, 10, 10], d_rot=0)
arm.set_tip_frame("/l_gripper_tool_frame")
arm.update_gains()
arm.set_force(6 * [0])
r = rospy.Rate(float(opts.rate))
while not rospy.is_shutdown():
ep_pose = arm.get_ep()
cur_pose = arm.get_end_effector_pose()
err_ep = arm.ep_error(cur_pose, ep_pose)
if np.linalg.norm(err_ep[0:3]) > 0.012 or np.linalg.norm(err_ep[3:]) > np.pi / 8.:
arm.set_ep(cur_pose, 1)
r.sleep()
cur_pose = arm.get_end_effector_pose()
arm.set_ep(cur_pose, 1)
q = arm.get_joint_angles()
q_posture = q.tolist()[0:3] + 4 * [9999]
arm.set_posture(q_posture)
arm.set_motion_gains(p_trans=400, d_trans=[16, 10, 10], p_rot=[20, 50, 50], d_rot=0)
arm.update_gains()
print PoseConverter.to_pos_quat(cur_pose)
pkg_dir = roslib.rospack.rospackexec(["find", "hrl_phri_2011"])
bag = rosbag.Bag(pkg_dir + "/data/saved_teleop_pose.bag", 'w')
bag.write("/teleop_pose", PoseConverter.to_pose_msg(cur_pose))
q_posture_msg = Float64MultiArray()
q_posture_msg.data = q_posture
bag.write("/teleop_posture", q_posture_msg)
bag.close()
if __name__ == "__main__":
main()
| [
[
1,
0,
0.0405,
0.0135,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0541,
0.0135,
0,
0.66,
0.0476,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0811,
0.0135,
0,
... | [
"import sys",
"import numpy as np",
"import roslib",
"roslib.load_manifest('hrl_pr2_arms')",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"import rospy",
"import rosbag",
"import smach",
"from smach_ros import SimpleActionState, ServiceState, IntrospectionServer",
"i... |
#! /usr/bin/python
import sys
import numpy as np
import roslib
roslib.load_manifest('hrl_pr2_arms')
roslib.load_manifest('smach_ros')
roslib.load_manifest('actionlib')
import rospy
import rosbag
import smach
from smach_ros import SimpleActionState, ServiceState, IntrospectionServer
import actionlib
import tf
import tf.transformations as tf_trans
from std_msgs.msg import Bool, Float32
from std_srvs.srv import Empty
from geometry_msgs.msg import PoseStamped, Vector3, Pose
from actionlib_msgs.msg import GoalStatus
from hrl_generic_arms.pose_converter import PoseConverter
from hrl_pr2_arms.pr2_arm import create_pr2_arm
from hrl_pr2_arms.pr2_arm_hybrid import PR2ArmHybridForce
def main():
rospy.init_node("switch_controller")
from optparse import OptionParser
p = OptionParser()
p.add_option('-s', '--stiff', dest="stiff", default=False,
action="store_true", help="Enable stiff controller.")
p.add_option('-f', '--force', dest="force", default=False,
action="store_true", help="Enable force controller.")
p.add_option('-m', '--force_mag', dest="force_mag", default=2,
help="Specify force magnitude.")
p.add_option('-x', '--max_force', dest="max_force", default=-1,
help="Specify max force magnitude.")
p.add_option('-i', '--impedance', dest="impedance", default=False,
action="store_true", help="Enable impedance controller.")
p.add_option('-c', '--compliance', dest="compliance", default=-1,
help="Enable impedance controller.")
p.add_option('-t', '--tip_frame', dest="tip_frame", default="/l_gripper_tool_frame",
help="Set tip to this frame.")
p.add_option('-z', '--zero_sensor', dest="zero_sensor", default=False,
action="store_true", help="Just zero the sensor.")
p.add_option('-r', '--reset_pose', dest="reset_pose", default=False,
action="store_true", help="Use the saved position in the data file.")
(opts, args) = p.parse_args()
arm = create_pr2_arm('l', PR2ArmHybridForce)
rospy.sleep(0.1)
# reset arm
arm.zero_sensor()
if opts.zero_sensor:
return
arm.set_force(6 * [0])
#
if opts.reset_pose:
pkg_dir = roslib.rospack.rospackexec(["find", "hrl_phri_2011"])
bag = rosbag.Bag(pkg_dir + "/data/saved_teleop_pose.bag", 'r')
for topic, msg, stamp in bag.read_messages("/teleop_pose"):
pose = PoseConverter.to_pos_rot([msg.position.x, msg.position.y, msg.position.z],
[msg.orientation.x, msg.orientation.y, msg.orientation.z,
msg.orientation.w])
for topic, msg, stamp in bag.read_messages("/teleop_posture"):
posture = msg.data
bag.close()
arm.set_posture(posture)
i_poses = arm.interpolate_ep(arm.get_end_effector_pose(), pose, 100)
for cur_pose in i_poses:
arm.set_ep(cur_pose, 1)
rospy.sleep(0.1)
return
# set common parameters
arm.set_force_max([float(opts.max_force), -1, -1, -1, -1, -1])
arm.set_tip_frame(opts.tip_frame)
if opts.stiff:
compliance = float(opts.compliance)
if compliance < 0:
compliance = 1300
#arm.set_force_gains(p_trans=[1, 1, 1], p_rot=0.1, i_trans=[0.002, 0.001, 0.001], i_max_trans=[10, 5, 5], i_rot=0, i_max_rot=0)
arm.set_force_gains(p_trans=[1, 0, 0], p_rot=0.1, i_trans=[0.002, 0, 0], i_max_trans=[10, 0, 0], i_rot=0, i_max_rot=0)
arm.set_motion_gains(p_trans=[compliance, 1300, 1300], d_trans=[16, 10, 10], p_rot=120, d_rot=0)
arm.set_force_directions([])
arm.set_force(6 * [0])
elif opts.impedance:
compliance = float(opts.compliance)
if compliance < 0:
compliance = 80
arm.set_force_gains(p_trans=[3, 1, 1], p_rot=0.1, i_trans=[0.002, 0.001, 0.001], i_max_trans=[10, 5, 5], i_rot=0, i_max_rot=0)
arm.set_motion_gains(p_trans=[compliance, 1300, 1300], d_trans=[16, 10, 10], p_rot=120, d_rot=0)
arm.set_force_directions([])
arm.set_force(6 * [0])
elif opts.force:
arm.set_force_gains(p_trans=[3, 1, 1], p_rot=0.1, i_trans=[0.002, 0.001, 0.001], i_max_trans=[10, 5, 5], i_rot=0, i_max_rot=0)
arm.set_motion_gains(p_trans=[float(opts.compliance), 1300, 1300], d_trans=[16, 10, 10], p_rot=120, d_rot=0)
arm.set_force_directions(['x'])
arm.set_force([float(opts.force_mag), 0, 0, 0, 0, 0])
else:
p.print_help()
return
arm.update_gains()
if __name__ == "__main__":
main()
| [
[
1,
0,
0.027,
0.009,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.036,
0.009,
0,
0.66,
0.0476,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0541,
0.009,
0,
0.66,... | [
"import sys",
"import numpy as np",
"import roslib",
"roslib.load_manifest('hrl_pr2_arms')",
"roslib.load_manifest('smach_ros')",
"roslib.load_manifest('actionlib')",
"import rospy",
"import rosbag",
"import smach",
"from smach_ros import SimpleActionState, ServiceState, IntrospectionServer",
"i... |
class TaskError(Exception):
def __init__(self, value):
self.parameter = value
def __str__(self):
return repr(self.parameter)
class ActionType:
def __init__(self, inputs, outputs):
self.inputs = inputs
self.outputs = outputs
class ParamType:
def __init__(self, name, ptype, options=None):
self.name = name
self.ptype = ptype
self.options = options
class Action:
def __init__(self, name, params):
self.name = name
self.params = params
class BehaviorDescriptor:
def __init__(self):
self.descriptors = {
'twist': ActionType([ParamType('angle', 'radian')], [ParamType('success', 'bool')]),
'linear_move': ActionType([ParamType('start_loc', 'se3'),
ParamType('movement', 'r3'),
ParamType('stop', 'discrete', ['pressure', 'pressure_accel'])],
[ParamType('success', 'bool')]),
}
start_location = (np.matrix([0.3, 0.15, 0.9]).T, np.matrix([0., 0., 0., 0.1]))
movement = np.matrix([.4, 0, 0.]).T
self.seed = [Action('linear_move', [start_location, movement, 'pressure']),
Action('linear_move', [Action('current_location', [])])]
self.run(self.seed)
def run(self, seed):
pass
#def go_to_home_pose(self):
# #self.behaviors.movement.set_movement_mode_cart()
# return self.behaviors.movement.move_absolute(self.start_location, stop='pressure')
# #self.behaviors.movement.set_movement_mode_ik()
# #return self.behaviors.movement.move_absolute(self.start_location, stop='pressure')
#def location_activated_behaviors(self, point_bl, stored_point=False):
# driving_param = {'light_switch': {'coarse': .7, 'fine': .5, 'voi': .2},
# 'drawer': {'coarse': .7, 'fine': .7, 'voi': .2}}
# map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
# point_map = tfu.transform_points(map_T_base_link, point_bl)
# matches = self.find_close_by_points(point_map)
# if len(matches) > 0:
# #pdb.set_trace()
# ldata = self.location_data[self.location_labels[matches[0]]]
# task = ldata['task']
# rospy.loginfo('Found closeby location %s' % str(ldata))
# else:
# rospy.loginfo( 'No location matches found. Please enter location type:')
# for i, k in enumerate(driving_param.keys()):
# rospy.loginfo(' %d %s' %(i,k))
# task_number = raw_input()
# task = driving_param.keys()[int(task_number)]
# self.robot.sound.say('task %s' % task.replace('_', ' '))
# rospy.loginfo('Task is %s' % task)
# if self.approach_location(point_bl,
# coarse_stop=driving_param[task]['coarse'],
# fine_stop=driving_param[task]['fine'],
# voi_radius=driving_param[task]['voi']):
# return
# else:
# ret = self.approach_perpendicular_to_surface(point_bl,
# voi_radius=driving_param[task]['voi'],
# dist_approach=driving_param[task]['fine'])
# if ret != 3:
# rospy.logerr('location_activated_behaviors: approach_perpendicular_to_surface failed!')
# return
# base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
# point_bl_t1 = tfu.transform_points(base_link_T_map, point_map)
# try:
# self.untuck()
# self.behaviors.movement.move_absolute(self.start_location, stop='pressure')
# self.behaviors.movement.pressure_listener.rezero()
# if task == 'light_switch':
# #self.location_add(perturbed_map, task)
# # TODO: what happens when we first encounter this location?! experiment n times to create dataset?
# self.practice(point_bl_t1,
# ft.partial(self.light_switch1,
# point_offset=np.matrix([0,0,.03]).T,
# press_contact_pressure=300,
# move_back_distance=np.matrix([-.0075,0,0]).T,
# press_pressure=3500,
# press_distance=np.matrix([0,0,-.15]).T,
# visual_change_thres=.03),
# 'light_switch')
# self.tuck()
# if False: #Old branch where we retry blindly
# MAX_RETRIES = 15
# rospy.loginfo('location_activated_behaviors: go_home_pose')
# #self.go_to_home_pose()
# self.behaviors.movement.move_absolute(self.start_location, stop='pressure')
# gaussian = pr.Gaussian(np.matrix([ 0, 0, 0.]).T, \
# np.matrix([[1., 0, 0], \
# [0, .02**2, 0], \
# [0, 0, .02**2]]))
# retry_count = 0
# success = False
# gaussian_noise = np.matrix([0, 0, 0.0]).T
# point_offset = np.matrix([0, 0, 0.03]).T
# while not success:
# perturbation = gaussian_noise
# perturbed_point_bl = point_bl_t1 + perturbation
# success, _ = self.light_switch1(perturbed_point_bl, point_offset=point_offset, \
# press_contact_pressure=300, move_back_distance=np.matrix([-.0075,0,0]).T,\
# press_pressure=3500, press_distance=np.matrix([0,0,-.15]).T, \
# visual_change_thres=.03)
# gaussian_noise = gaussian.sample()
# gaussian_noise[0,0] = 0
# retry_count = retry_count + 1
# if retry_count > MAX_RETRIES:
# self.robot.sound.say('giving up tried %d times already' % MAX_RETRIES)
# break
# elif not success:
# self.robot.sound.say('retrying')
# if success:
# self.robot.sound.say('successful!')
# if not stored_point or retry_count > 1:
# map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
# perturbed_map = tfu.transform_points(map_T_base_link, perturbed_point_bl)
# self.location_add(perturbed_map, task)
# self.robot.sound.say('recorded point')
# #if retry_count > 1:
# # if not self.add_perturbation_to_location(point_map, perturbation):
# # self.robot.sound.say('unable to add perturbation to database! please fix')
# self.tuck()
#
# if task == 'drawer':
# self.drawer(point_bl_t1)
# self.tuck()
# self.robot.sound.say('done')
# self.location_add(point_map, task)
#
# #except lm.RobotSafetyError, e:
# # rospy.loginfo('location_activated_behaviors: Caught a robot safety exception "%s"' % str(e.parameter))
# # #self.behaviors.movement.move_absolute(self.start_location, stop='accel')
# except lm.RobotSafetyError, e:
# rospy.loginfo('location_activated_behaviors: Caught a robot safety exception "%s"' % str(e.parameter))
# self.behaviors.movement.move_absolute(self.start_location, stop='accel')
#
# except TaskError, e:
# rospy.loginfo('location_activated_behaviors: TaskError: %s' % str(e.parameter))
# rospy.loginfo('location_activated_behaviors: DONE MANIPULATION!')
# self.robot.sound.say('done')
#if self.approach_location(point_bl,
# coarse_stop=self.locations_man.driving_param[task]['coarse'],
# fine_stop=self.locations_man.driving_param[task]['fine'],
# voi_radius=self.locations_man.driving_param[task]['voi']):
# #rospy.logerr('location_approach_driving: intial approach failed')
# return True, 'initial approach'
#else:
#def load_classifier(self, classifier_name, data_file_name):
# self.learners[classifier_name] = ipa.InterestPointPerception(classifier_name,
# data_file_name, self.tf_listener)
#return {'points3d': np.column_stack(points3d_tried),
# 'instances': np.column_stack(instances_tried),
# 'points2d': np.column_stack(points2d_tried),
# 'labels': np.matrix(labels),
# 'sizes': fea_dict['sizes']}
#def blind_exploration3(self, task_id, behavior, undo_behavior, point_bl, stop_fun,
# max_retries=15, closeness_tolerance=.01, fea_dict=None):
# params = r3d.Recognize3DParam()
# params.uncertainty_x = 1.
# params.uncertainty_y = .02
# params.uncertainty_z = .02
# params.n_samples = 400
# params.uni_mix = 0.
# #MAX_RETRIES = 20
#
# # instances, locs2d, locs3d, image, rdict, sizes =
# if fea_dict == None:
# fea_dict, _ = self.read_features_save(task_id, point_bl, params)
# image_T_bl = tfu.transform('openni_rgb_optical_frame', 'base_link', self.tf_listener)
# fea_dict['image_T_bl'] = image_T_bl
# #fea_dict = self.feature_ex.read(expected_loc_bl=point_bl, params=params)
#
# dists = ut.norm(fea_dict['points3d'] - point_bl)
# ordering = np.argsort(dists).A1
# points3d_sampled = fea_dict['points3d'][:, ordering]
# points2d_sampled = fea_dict['points2d'][:, ordering]
# instances_sampled = fea_dict['instances'][:, ordering]
# start_pose = self.robot.head.pose()
# point3d_img = tfu.transform_points(fea_dict['image_T_bl'], point_bl)
# point2d_img = self.feature_ex.cal.project(point3d_img)
# sampled_idx = 0
# iter_count = 0
# labels = []
# points_tried = []
# tinstances = []
# sp2d = []
# while iter_count < max_retries and not stop_fun(np.matrix(labels)):
# if len(points_tried)> 0 and \
# np.any(ut.norm(np.column_stack(points_tried) - points3d_sampled[:, sampled_idx]) < closeness_tolerance):
# sampled_idx = sampled_idx + 1
# continue
# #pdb.set_trace()
# #self.robot.sound.say('executing behavior')
# self.robot.head.set_pose(start_pose, 1)
# success, reason = behavior(points3d_sampled[:, sampled_idx])
# iter_count = iter_count + 1
# points_tried.append(points3d_sampled[:, sampled_idx])
# tinstances.append(instances_sampled[:, sampled_idx])
# sp2d.append(points2d_sampled[:, sampled_idx])
# sampled_idx = sampled_idx + 1
# #tinstances.append(fea_dict['instances'][:,iter_count])
# #sp2d.append(fea_dict['points2d'][:,iter_count])
# #add point and label to points tried
# if success:
# labels.append(r3d.POSITIVE)
# if undo_behavior != None:
# #If we were successful, call blind exploration with the undo behavior
# def any_pos_sf(labels_mat):
# if np.any(r3d.POSITIVE == labels_mat):
# return True
# return False
# if task_id != None:
# utid = self.locations_man.create_undo_task(task_id)
# else:
# utid = None
# #TODO: gather instances for undo action
# #TODO: figure out why position of point_bl is shifted in second call
# self.seed_dataset_explore(utid, undo_behavior, None, point_bl, any_pos_sf,
# max_retries, fea_dict=fea_dict)
# #success, reason = undo_behavior(points3d_sampled[:, 'iter_count'])
# else:
# labels.append(r3d.NEGATIVE)
# #Visualization
# img = cv.CloneMat(fea_dict['image'])
# r3d.draw_points(img, points2d_sampled, [255, 255, 255], 2, -1)
# _, pos_points, neg_points = separate_by_labels(np.column_stack(sp2d), np.matrix(labels))
# r3d.draw_points(img, point2d_img, [255, 0, 0], 4, 2)
# r3d.draw_points(img, pos_points, [0, 255, 0], 2, -1)
# r3d.draw_points(img, neg_points, [0, 0, 255], 2, -1)
# r3d.draw_points(img, sp2d[-1], [0, 184, 245], 3, -1)
# self.img_pub.publish(img)
#
# rospy.loginfo('tried %d times' % iter_count)
# return {'points3d': np.column_stack(points_tried),
# 'instances': np.column_stack(tinstances),
# 'points2d': np.column_stack(sp2d),
# 'labels': np.matrix(labels),
# 'sizes': fea_dict['sizes']}
# #if iter_count > MAX_RETRIES:
# # self.robot.sound.say('giving up tried %d times already' % MAX_RETRIES)
# # break
# #elif not success:
# # self.robot.sound.say('retrying')
# return points tried record
#success, _ = self.light_switch1(perturbed_point_bl, point_offset=point_offset, \
# press_contact_pressure=300, move_back_distance=np.matrix([-.0075,0,0]).T,\
# press_pressure=3500, press_distance=np.matrix([0,0,-.15]).T, \
# visual_change_thres=.03)
# points tried = []
# while we have not succeeded and not stop_fun(points tried):
# label = behavior(point)
# add point and label to points tried
# perturb point
# return points tried record
#def load_classifier(self, name, fname):
# print 'loading classifier'
# dataset = ut.load_pickle(fname)
# self.train(dataset, name)
#self.location_labels = []
#self.location_data = []
#if os.path.isfile(self.saved_locations_fname):
# location_data = ut.load_pickle(self.saved_locations_fname) #each col is a 3d point, 3xn mat
# for idx, rloc in enumerate(location_data):
# self.location_centers.append(rloc['center'])
# self.location_labels.append(idx)
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# self.location_data = location_data
#if os.path.isfile(self.saved_locations_fname):
# location_data = ut.load_pickle(self.saved_locations_fname) #each col is a 3d point, 3xn mat
# for idx, rloc in enumerate(location_data):
# self.location_centers.append(rloc['center'])
# self.location_labels.append(idx)
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# self.location_data = location_data
#pass
#location_idx = self.location_labels[close_by_locs[0]]
#ldata = self.location_data[location_idx]
#rospy.loginfo('location_add: point close to %d at %s.' % (location_idx, str(ldata['center'].T)))
#ldata['points'].append(point_map)
#ldata['center'] = np.column_stack(ldata['points']).mean(1)
#self.location_centers[location_idx] = ldata['center']
#self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# def update_center(self, center_id, point_map):
# #If close by locations found then add to points list and update center
# location_idx = self.location_labels[close_by_locs[0]]
# ldata = self.location_data[location_idx]
#
# rospy.loginfo('location_add: point close to %d at %s.' % (location_idx, str(ldata['center'].T)))
# ldata['points'].append(point_map)
# ldata['center'] = np.column_stack(ldata['points']).mean(1)
# self.location_centers[location_idx] = ldata['center']
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
#
#def location_add(self, point_map, task, data):
# close_by_locs = self.find_close_by_points_match_task(point_map, task)
# if len(close_by_locs) == 0:
# rospy.loginfo('location_add: point not close to any existing location. creating new record.')
# self.location_data.append({
# 'task': task,
# 'center': point_map,
# 'perceptual_dataset': None,
# 'points':[point_map]})
# self.location_centers.append(point_map)
# self.location_labels.append(len(self.location_data) - 1)
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# else:
# #If close by locations found then add to points list and update center
# location_idx = self.location_labels[close_by_locs[0]]
# ldata = self.location_data[location_idx]
# rospy.loginfo('location_add: point close to %d at %s.' % (location_idx, str(ldata['center'].T)))
# ldata['points'].append(point_map)
# ldata['center'] = np.column_stack(ldata['points']).mean(1)
# self.location_centers[location_idx] = ldata['center']
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# ut.save_pickle(self.location_data, self.saved_locations_fname)
# rospy.loginfo('location_add: saved point in map.')
# def find_close_by_points(self, point_map):
# if self.locations_tree != None:
# close_by_locs = self.locations_tree.query_ball_point(np.array(point_map.T), self.LOCATION_ADD_RADIUS)[0]
# return close_by_locs
# else:
# return []
# 3) listing all locations
# 4) listing locations closest to given point, with and without task
#def find_close_by_points_match_task(self, point_map, task):
# matches = self.find_close_by_points(point_map)
# task_matches = []
# for m in matches:
# idx = self.location_labels[m]
# ldata = self.location_data[idx]
# if ldata['task'] == task:
# task_matches.append(m)
# return task_matches
#class PickPointsCloseToStartLocation:
#
# def __init__(self, point_bl, closeness_tolerance=.01, max_retries=20):
# self.params = r3d.Recognize3DParam()
# self.params.uncertainty_x = 1.
# self.params.uncertainty_y = .02
# self.params.uncertainty_z = .02
# self.params.n_samples = 400
# self.params.uni_mix = 0.
#
# self.sampled_idx = 0
# self.iter_count = 0
# self.max_retries = max_retries
# self.closeness_tolerance = closeness_tolerance
#
# self.points3d_tried = []
# self.points2d_tried = []
# self.instances_tried = []
#
# def process_scan(self, fea_dict):
# dists = ut.norm(fea_dict['points3d'] - point_bl)
# ordering = np.argsort(dists).A1
#
# self.points3d_sampled = fea_dict['points3d'][:, ordering]
# self.points2d_sampled = fea_dict['points2d'][:, ordering]
# self.instances_sampled = fea_dict['instances'][:, ordering]
#
# def get_params(self):
# return self.params
#
# def stop(self):
# return self.iter_count > max_retries
#
# def pick_next(self):
# while len(self.points3d_tried) > 0 \
# and np.any(ut.norm(np.column_stack(self.points3d_tried) - self.points3d_sampled[:, self.sampled_idx]) < self.closeness_tolerance):
# self.sampled_idx = self.sampled_idx + 1
#
# self.points3d_tried.append(self.points3d_sampled[:, self.sampled_idx])
# self.points2d_tried.append(self.points2d_sampled[:, self.sampled_idx])
# self.instances_tried.append(self.instances_sampled[:, self.sampled_idx])
# self.iter_count = iter_count + 1
#
# return {'points3d': self.points3d_sampled[:, self.sampled_idx],
# 'points2d': self.points2d_sampled[:, self.sampled_idx],
# 'instances': self.instances_sampled[:, self.sampled_idx]}
#
# def get_instances_used(self):
# if len(self.points3d_sampled) > 0:
# return {'points3d': np.column_stack(self.points3d_sampled),
# 'points2d': np.column_stack(self.points2d_sampled),
# 'instances': np.column_stack(self.instances_sampled)}
# else:
# return None
#
#class PickPointsUsingActiveLearning:
#
# def __init__(self, locations_manager):
# self.params = r3d.Recognize3DParam()
# self.params.uncertainty_x = 1.
# self.params.n_samples = 2000
# self.params.uni_mix = .1
#
# self.points3d_tried = []
# self.points2d_tried = []
# self.instances_tried = []
#
# def process_scan(self, fea_dict):
#
# def get_params(self):
#
# def pick_next(self):
#
# def stop(self):
#
# def get_instances_used(self):
#self.LOCATION_ADD_RADIUS = .5
#self.kinect_listener = kl.KinectListener()
#self.kinect_cal = rc.ROSCameraCalibration('camera/rgb/camera_info')
#self.kinect_img_sub = message_filters.Subscriber('/camera/rgb/image_color', smsg.Image)
#self.kinect_depth_sub = message_filters.Subscriber('/camera/depth/points2', smsg.PointCloud2)
#ts = message_filters.TimeSynchronizer([image_sub, depth_sub], 10)
#ts.registerCallback(callback)
#self.load_classifier('light_switch', 'labeled_light_switch_data.pkl')
#self.start_location = (np.matrix([0.25, 0.30, 1.3]).T, np.matrix([0., 0., 0., 0.1]))
#loading stored locations
#self.saved_locations_fname = 'saved_locations.pkl'
#self.location_centers = []
#self.location_labels = []
#self.location_data = []
#self.locations_tree = None
#if os.path.isfile(self.saved_locations_fname):
# location_data = ut.load_pickle(self.saved_locations_fname) #each col is a 3d point, 3xn mat
# for idx, rloc in enumerate(location_data):
# self.location_centers.append(rloc['center'])
# self.location_labels.append(idx)
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# self.location_data = location_data
# joint angles used for tuck
#pdb.set_trace()
#self.untuck()
#self.behaviors.movement.set_movement_mode_ik()
#self.movement.set_movement_mode_ik()
#self.tuck()
#self.r1 = np.matrix([[-0.31006769, 1.2701541 , -2.07800829, -1.45963243, -4.35290489,
# -1.86052221, 5.07369192]]).T
#self.l0 = np.matrix([[ 1.05020383, -0.34464327, 0.05654 , -2.11967694,
# -10.69100221, -1.95457839, -3.99544713]]).T
#self.l1 = np.matrix([[ 1.06181076, 0.42026402, 0.78775801, -2.32394841,
# -11.36144995, -1.93439025, -3.14650108]]).T
#self.l2 = np.matrix([[ 0.86275197, 0.93417818, 0.81181124, -2.33654346,
# -11.36121856, -2.14040499, -3.15655164]]).T
#self.l3 = np.matrix([[ 0.54339568, 1.2537778 , 1.85395725, -2.27255481, -9.92394984,
# -0.86489749, -3.00261708]]).T
#def train(self, dataset, name):
# rec_params = self.feature_ex.rec_params
# nneg = np.sum(dataset.outputs == r3d.NEGATIVE) #TODO: this was copied and pasted from r3d
# npos = np.sum(dataset.outputs == r3d.POSITIVE)
# print '================= Training ================='
# print 'NEG examples', nneg
# print 'POS examples', npos
# print 'TOTAL', dataset.outputs.shape[1]
# neg_to_pos_ratio = float(nneg)/float(npos)
# weight_balance = ' -w0 1 -w1 %.2f' % neg_to_pos_ratio
# print 'training'
# learner = r3d.SVMPCA_ActiveLearner(use_pca=True)
# #TODO: figure out something scaling inputs field!
# learner.train(dataset, dataset.inputs,
# rec_params.svm_params + weight_balance,
# rec_params.variance_keep)
# self.learners[name] = {'learner': learner, 'dataset': dataset}
# print 'done loading'
#def tuck(self):
# ldiff = np.linalg.norm(pr2.diff_arm_pose(self.robot.left.pose(), self.l3))
# # np.linalg.norm(self.robot.left.pose() - self.l3)
# rdiff = np.linalg.norm(pr2.diff_arm_pose(self.robot.right.pose(), self.r1))
# #rdiff = np.linalg.norm(self.robot.right.pose() - self.r1)
# if ldiff < .3 and rdiff < .3:
# rospy.loginfo('tuck: Already tucked. Ignoring request.')
# return
# self.robot.right.set_pose(self.r1, block=False)
# self.robot.left.set_pose(self.l0, block=True)
# poses = np.column_stack([self.l0, self.l1, self.l2, self.l3])
# #pdb.set_trace()
# self.robot.left.set_poses(poses, np.array([0., 1.5, 3, 4.5]))
#def untuck(self):
# if np.linalg.norm(self.robot.left.pose() - self.l0) < .3:
# rospy.loginfo('untuck: Already untucked. Ignoring request.')
# return
# self.robot.right.set_pose(self.r1, 2., block=False)
# self.robot.left.set_pose(self.l3, 2., block=True)
# poses = np.column_stack([self.l3, self.l2, self.l1, self.l0])
# self.robot.left.set_poses(poses, np.array([0., 3., 6., 9.])/2.)
#if len(self.location_centers) < 1:
# return
#rospy.loginfo('click_cb: double clicked but no 3d point given')
#rospy.loginfo('click_cb: will use the last successful location given')
#base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
#point_bl = tfu.transform_points(base_link_T_map, self.location_centers[-1])
#rospy.loginfo('click_cb: using ' + str(self.location_centers[-1].T))
#self.location_activated_behaviors(point_bl, stored_point=True)
#def find_close_by_points(self, point_map):
# if self.locations_tree != None:
# close_by_locs = self.locations_tree.query_ball_point(np.array(point_map.T), self.LOCATION_ADD_RADIUS)[0]
# return close_by_locs
# else:
# return []
#def find_close_by_points_match_task(self, point_map, task):
# matches = self.find_close_by_points(point_map)
# task_matches = []
# for m in matches:
# idx = self.location_labels[m]
# ldata = self.location_data[idx]
# if ldata['task'] == task:
# task_matches.append(m)
# return task_matches
#def location_add(self, point_map, task, data):
# close_by_locs = self.find_close_by_points_match_task(point_map, task)
# if len(close_by_locs) == 0:
# rospy.loginfo('location_add: point not close to any existing location. creating new record.')
# self.location_data.append({
# 'task': task,
# 'center': point_map,
# 'perceptual_dataset': None,
# 'points':[point_map]})
# self.location_centers.append(point_map)
# self.location_labels.append(len(self.location_data) - 1)
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# else:
# #If close by locations found then add to points list and update center
# location_idx = self.location_labels[close_by_locs[0]]
# ldata = self.location_data[location_idx]
# rospy.loginfo('location_add: point close to %d at %s.' % (location_idx, str(ldata['center'].T)))
# ldata['points'].append(point_map)
# ldata['center'] = np.column_stack(ldata['points']).mean(1)
# self.location_centers[location_idx] = ldata['center']
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# ut.save_pickle(self.location_data, self.saved_locations_fname)
# rospy.loginfo('location_add: saved point in map.')
#def location_add(self, point_map, task):
# close_by_locs = self.find_close_by_points_match_task(point_map, task)
# if len(close_by_locs) == 0:
# rospy.loginfo('location_add: point not close to any existing location. creating new record.')
# self.location_data.append({
# 'task': task,
# 'center': point_map,
# 'points':[point_map]})
# self.location_centers.append(point_map)
# self.location_labels.append(len(self.location_data) - 1)
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# else:
# #If close by locations found then add to points list and update center
# location_idx = self.location_labels[close_by_locs[0]]
# ldata = self.location_data[location_idx]
# rospy.loginfo('location_add: point close to %d at %s.' % (location_idx, str(ldata['center'].T)))
# ldata['points'].append(point_map)
# ldata['center'] = np.column_stack(ldata['points']).mean(1)
# self.location_centers[location_idx] = ldata['center']
# self.locations_tree = sp.KDTree(np.array(np.column_stack(self.location_centers).T))
# ut.save_pickle(self.location_data, self.saved_locations_fname)
# rospy.loginfo('location_add: saved point in map.')
#def record_processed_data_kinect2(self, point3d_bl, kinect_fea):
# instances, locs2d_image, locs3d_bl, image = kinect_fea #self.feature_ex.read(point3d_bl)
# #rospy.loginfo('Getting a kinect reading')
# tstring = time.strftime('%A_%m_%d_%Y_%I:%M%p')
# kimage_name = '%s_highres.png' % tstring
# cv.SaveImage(kimage_name, kimage)
# preprocessed_dict = {'instances': instances,
# 'points2d': locs2d_image,
# 'points3d': locs3d_bl,
# 'image': kimage_name,
# 'labels': labels,
# 'sizes': feature_extractor.sizes}
#self.feature_ex.read(point3d_bl)
#rdict = self.kinect_listener.read()
#kimage = rdict['image']
#rospy.loginfo('Waiting for calibration.')
#while self.kinect_cal.has_msg == False:
# time.sleep(.1)
#which frames?
#rospy.loginfo('Getting transforms.')
#k_T_bl = tfu.transform('openni_rgb_optical_frame', '/base_link', self.tf_listener)
#tstring = time.strftime('%A_%m_%d_%Y_%I:%M%p')
#kimage_name = '%s_highres.png' % tstring
#rospy.loginfo('Saving images (basename %s)' % tstring)
#cv.SaveImage(kimage_name, kimage)
#rospy.loginfo('Saving pickles')
#pickle_fname = '%s_interest_point_dataset.pkl' % tstring
#data_pkl = {'touch_point': point3d_bl,
# 'points3d': rdict['points3d'],
# 'image': kimage_name,
# 'cal': self.prosilica_cal,
# 'k_T_bl': k_T_bl}
#'point_touched': point3d_bl}
#ut.save_pickle(data_pkl, pickle_fname)
#print 'Recorded to', pickle_fname
#npoint = point + gaussian_noise
#success_off, touchloc_bl = self.light_switch1(npoint,
#pdb.set_trace()
# ##
# # The behavior can make service calls to a GUI asking users to label
# def repeat_action(self, task_id, ctask_id, point3d_bl, sampling_object, stop_fun, fea_dict=None):
#
# # instances, locs2d_image, locs3d_bl, image, raw_dict =
# #kf_dict = self.feature_ex.read(point3d_bl)
# params = r3d.Recognize3DParam()
# params.uncertainty_x = 1.
# params.n_samples = 2000
# params.uni_mix = .1
#
# kdict, fname = self.read_features_save(task_id, point3d_bl, params)
# learner = self.locations_man.learners[task_id]
# behavior = self.get_behavior_by_task(self.locations_man.data[task_id]['task'])
# undo_behavior = self.get_undo_behavior_by_task(self.locations_man.data[task_id]['task'])
# start_pose = self.robot.head.pose()
#
# kdict['image_T_bl'] = tfu.transform('openni_rgb_optical_frame', 'base_link', self.tf_listener)
# point3d_img = tfu.transform_points(kdict['image_T_bl'], point3d_bl)
# point2d_img = self.feature_ex.cal.project(point3d_img)
#
# labels = []
# points3d_tried = []
# points2d_tried = []
# converged = False
# indices_added = []
# pdb.set_trace()
#
#
# while not converged and not stop_fun(np.matrix(labels)):
# #Find remaining instances
# remaining_pt_indices = r3d.inverse_indices(indices_added, kdict['instances'].shape[1])
# remaining_instances = kdict['instances'][:, remaining_pt_indices]
#
# #Ask learner to pick an instance
# ridx, selected_dist, converged = learner.select_next_instances_no_terminate(remaining_instances)
# selected_idx = remaining_pt_indices[ridx]
# indices_added.append(selected_idx)
#
# #draw
# img = cv.CloneMat(kdict['image'])
# #Draw the center
# r3d.draw_points(img, point2d_img, [255, 0, 0], 4, 2)
# #Draw possible points
# r3d.draw_points(img, kdict['points2d'], [255, 255, 255], 2, -1)
# #Draw what we have so far
# if len(points2d_tried) > 0:
# _, pos_exp, neg_exp = separate_by_labels(np.column_stack(points2d_tried), np.matrix(labels))
# r3d.draw_points(img, pos_exp, [0, 255, 0], 3, 1)
# r3d.draw_points(img, neg_exp, [0, 0, 255], 3, 1)
#
# predictions = np.matrix(learner.classify(kdict['instances']))
# _, pos_pred, neg_pred = separate_by_labels(kdict['points2d'], predictions)
# r3d.draw_points(img, pos_pred, [0, 255, 0], 2, -1)
# r3d.draw_points(img, neg_pred, [0, 0, 255], 2, -1)
#
# #Draw what we're selecting
# r3d.draw_points(img, kdict['points2d'][:, selected_idx], [0, 184, 245], 3, -1)
# self.img_pub.publish(img)
#
# #Get label for instance
# self.robot.head.set_pose(start_pose, 1)
#
# #EXCECUTE!!
# success, reason = behavior(kdict['points3d'][:, selected_idx])
# if success:
# color = [0,255,0]
# label = r3d.POSITIVE
# def any_pos_sf(labels_mat):
# if np.any(r3d.POSITIVE == labels_mat):
# return True
# return False
# utid = self.locations_man.create_undo_task(task_id)
# self.blind_exploration2(utid, undo_behavior, None, point3d_bl, any_pos_sf,
# max_retries=max_undo_retries, fea_dict=kdict)
#
# else:
# label = r3d.NEGATIVE
# color = [0,0,255]
#
# labels.append(label)
# points3d_tried.append(kdict['points3d'][:, selected_idx])
# points2d_tried.append(kdict['points2d'][:, selected_idx])
#
# datapoint = {'instances': kdict['instances'][:, selected_idx],
# 'points2d': kdict['points2d'][:, selected_idx],
# 'points3d': kdict['points3d'][:, selected_idx],
# 'sizes': kdict['sizes'],
# 'labels': np.matrix([label])
# }
# self.locations_man.add_perceptual_data(task_id, datapoint)
# self.locations_man.save_database()
# self.locations_man.train(task_id)
#
# #Classify
# predictions = np.matrix(learner.classify(kdict['instances']))
#
# #Draw
# img = cv.CloneMat(kdict['image'])
# _, pos_exp, neg_exp = separate_by_labels(np.column_stack(points2d_tried), np.matrix(labels))
# r3d.draw_points(img, point2d_img, [255, 0, 0], 4, 2)
# r3d.draw_points(img, kdict['points2d'], [255, 255, 255], 2, -1)
# r3d.draw_points(img, pos_exp, [0, 255, 0], 3, 1)
# r3d.draw_points(img, neg_exp, [0, 0, 255], 3, 1)
#
# _, pos_pred, neg_pred = separate_by_labels(kdict['points2d'], predictions)
# r3d.draw_points(img, pos_pred, [0, 255, 0], 2, -1)
# r3d.draw_points(img, neg_pred, [0, 0, 255], 2, -1)
# r3d.draw_points(img, points2d_tried[-1], color, 3, -1)
#
# #publish
# self.img_pub.publish(img)
#Save dataset in the location's folder
#def save_dataset(self, task_id, point, rdict):
# pt.join(task_id,
# self.locations_man
# self.record_perceptual_data(point, rdict)
# #TODO...
#TODO TEST
#BOOKMARK 3/7 4:03 AM
#LAST DITCH EXECUTION(point, stop_fun):
#def blind_exploration(self, behavior, point_bl, stop_fun, max_retries=15):
# gaussian = pr.Gaussian(np.matrix([ 0, 0, 0.]).T, \
# np.matrix([[1., 0, 0], \
# [0, .02**2, 0], \
# [0, 0, .02**2]]))
# iter_count = 0
# gaussian_noise = np.matrix([0, 0, 0.0]).T #We want to try the given point first
# labels = []
# points_tried = []
# #while we have not succeeded and not stop_fun(points tried):
# while iter_count < MAX_RETRIES and stop_fun(np.matrix(labels)):
# perturbation = gaussian_noise
# perturbed_point_bl = point_bl + perturbation
# self.robot.sound.say('executing behavior')
# success, reason = behavior(perturbed_point_bl)
# points_tried.append(perturbed_point_bl)
# #add point and label to points tried
# if success:
# labels.append(r3d.POSITIVE)
# else:
# labels.append(r3d.NEGATIVE)
# #perturb point
# gaussian_noise = gaussian.sample()
# gaussian_noise[0,0] = 0
# iter_count = iter_count + 1
#
# self.robot.sound.say('tried %d times' % iter_count)
# return np.column_stack(points_tried)
#def blind_exploration2(self, task_id, behavior, undo_behavior, point_bl, stop_fun,
# max_retries=15, closeness_tolerance=.005, fea_dict=None):
# params = r3d.Recognize3DParam()
# params.uncertainty_x = 1.
# params.uncertainty_y = .02
# params.uncertainty_z = .02
# params.n_samples = 400
# params.uni_mix = 0.
# MAX_RETRIES = 20
#
# if fea_dict == None:
# fea_dict, _ = self.read_features_save(task_id, point_bl, params)
#
# dists = ut.norm(fea_dict['points3d'] - point_bl)
# ordering = np.argsort(dists).A1
# points3d_sampled = fea_dict['points3d'][:, ordering]
# points2d_sampled = fea_dict['points2d'][:, ordering]
# instances_sampled = fea_dict['instances'][:, ordering]
# labels = []
# points_tried = []
# tinstances = []
# sp2d = []
# labels.append(r3d.POSITIVE)
# points_tried.append(points3d_sampled[:, 0])
# tinstances.append(instances_sampled[:, 0])
# sp2d.append(points2d_sampled[:, 0])
# labels.append(r3d.NEGATIVE)
# points_tried.append(points3d_sampled[:, 1])
# tinstances.append(instances_sampled[:, 1])
# sp2d.append(points2d_sampled[:, 1])
# return {'points3d': np.column_stack(points_tried),
# 'instances': np.column_stack(tinstances),
# 'points2d': np.column_stack(sp2d),
# 'labels': np.matrix(labels),
# 'sizes': fea_dict['sizes']}
#def __init__(self, object_name, labeled_data_fname, tf_listener):
#make learner
#learner = SVMActiveLearnerApp()
#labeled_light_switch_dataset = ut.load_pickle(data_file_name)
#learner.train(labeled_light_switch_dataset,
# labeled_light_switch_dataset.sizes['intensity']
# self.params.variance_keep)
#self.learners[classifier_name] = learner
#def locate_light_switch(self):
# #capture data
# pointcloud_msg = self.laser_scan.scan(math.radians(180.), math.radians(-180.), 20.)
# prosilica_image = self.prosilica.get_frame() #TODO check if this is a cvmat
# while self.prosilica_cal.has_msg == False:
# time.sleep(.1)
# #preprocess
# ic_data = IntensityCloudData(pointcloud_msg, prosilica_image,
# tfu.transform('/high_def_optical_frame', '/base_link', self.tf_listener),
# self.prosilica_cal,
# r3d.Recognize3DParam())
# instances = ic_data.extract_vectorized_features()
# results = []
# for i in range(instances.shape[1]):
# nlabel = self.learners['light_switch'].classify(instances[:, i])
# results.append(nlabel)
# results = np.matrix(results)
# positive_indices = np.where(results == r3d.POSITIVE)[1]
# #want 3d location of each instance
# positive_points_3d = ic_data.sampled_points[:, positive_indices]
# #return a random point for now
# rindex = np.random.randint(0, len(positive_indices))
# return positive_points_3d[:,rindex]
#def add_perturbation_to_location(self, point_map, perturbation):
# locs = self.find_close_by_points(point_map)
# if locs != None:
# location = self.location_data[self.location_labels(locs[0])]
# if not location.has_key('perturbation'):
# location['perturbation'] = []
# location['perturbation'].append(perturbation)
# return True
# return False
#self.go_to_home_pose()
#print '>>>> POINT IS', point_bl_t1.T
#point_bl_t1 = np.matrix([[ 0.73846737, 0.07182931, 0.55951065]]).T
#DIST_THRESHOLD = .8 for lightswitch
#DIST_THRESHOLD = .85 #for drawers
#DIST_APPROACH = .5
#COARSE_STOP = .7
#FINE_STOP = .7
#VOI_RADIUS = .2
#point_dist = np.linalg.norm(point_bl_t0[0:2,0])
#rospy.loginfo('run_behaviors: Point is %.3f away.' % point_dist)
#map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
#point_map = tfu.transform_points(map_T_base_link, point_bl_t0)
#if point_dist > DIST_THRESHOLD:
# rospy.loginfo('run_behaviors: Point is greater than %.1f m away (%.3f). Driving closer.' % (DIST_THRESHOLD, point_dist))
# ##self.turn_to_point(point_bl_t0)
# rospy.loginfo( 'run_behaviors: CLICKED on point_bl ' + str(point_bl_t0.T))
# ret = self.drive_approach_behavior(point_bl_t0, dist_far=COARSE_STOP)
# if ret != 3:
# base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
# point_bl_t1 = tfu.transform_points(base_link_T_map, point_map)
# dist_end = np.linalg.norm(point_bl_t1[0:2,0])
# if dist_end > DIST_THRESHOLD:
# rospy.logerr('run_behaviors: drive_approach_behavior failed! %.3f' % dist_end)
# self.robot.sound.say("I am unable to navigate to that location")
# return
# base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
# point_bl_t1 = tfu.transform_points(base_link_T_map, point_map)
# ret = self.approach_perpendicular_to_surface(point_bl_t1, voi_radius=VOI_RADIUS, dist_approach=FINE_STOP)
# if ret != 3:
# rospy.logerr('run_behaviors: approach_perpendicular_to_surface failed!')
# return
# #map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
# #point_bl_t2 = tfu.transform_points(base_link_T_map, point_map)
# self.robot.sound.say('done')
# rospy.loginfo('run_behaviors: DONE DRIVING!')
#elif False:
#if tf_listener == None:
# self.tf_listener = tf.TransformListener()
#else:
# self.tf_listener = tf_listener
#self.pr2 = pr2_obj
#self.cman = con.ControllerManager(arm, self.tf_listener, using_slip_controller=1)
#self.reactive_gr = rgr.ReactiveGrasper(self.cman)
#if arm == 'l':
# ptopic = '/pressure/l_gripper_motor'
# self.arm_obj = self.pr2.left
# self.ik_frame = 'l_wrist_roll_link'
# self.tool_frame = 'l_gripper_tool_frame'
#else:
# ptopic = '/pressure/r_gripper_motor'
# self.arm_obj = self.pr2.right
# self.ik_frame = 'r_wrist_roll_link'
# self.tool_frame = 'r_gripper_tool_frame'
#self.movement_mode = 'ik' #or cart
#rospy.Subscriber('cursor3d', PointStamped, self.laser_point_handler)
#self.double_click = rospy.Subscriber('mouse_left_double_click', String, self.double_click_cb)
#def set_movement_mode_ik(self):
# self.movement_mode = 'ik'
# self.reactive_gr.cm.switch_to_joint_mode()
# self.reactive_gr.cm.freeze_arm()
#def set_movement_mode_cart(self):
# self.movement_mode = 'cart'
#pdb.set_trace()
#self.gather_interest_point_dataset(point)
#point = np.matrix([ 0.60956734, -0.00714498, 1.22718197]).T
#pressure_parameters = range(1900, 2050, 30)
#self.record_perceptual_data(point)
#successes = []
#parameters = [np.matrix([-.15, 0, 0]).T, 300, np.matrix([-.005, 0, 0]).T, 3500, np.matrix([0,0,-.15]).T, .03]
#for p in pressure_parameters:
# experiment = []
# for i in range(4):
# #Turn off lights
# rospy.loginfo('Experimenting with press_pressure = %d' % p)
# success_off = self.light_switch1(point,
# point_offset=np.matrix([-.15,0,0]).T, press_contact_pressure=300, move_back_distance=np.matrix([-.005,0,0]).T,\
# press_pressure=3500, press_distance=np.matrix([0,0,-.15]).T, visual_change_thres=.03)
# experiment.append(success_off)
# rospy.loginfo('Lights turned off? %s' % str(success_off))
# return
# #Turn on lights
# success_on = self.light_switch1(point,
# point_offset=np.matrix([-.15,0,-.10]).T, press_contact_pressure=300, move_back_distance=np.matrix([-0.005, 0, 0]).T,
# press_pressure=3500, press_distance=np.matrix([0,0,.1]).T, visual_change_thres=.03)
# #def light_switch1(self, point,
# # point_offset, press_contact_pressure, move_back_distance,
# # press_pressure, press_distance, visual_change_thres):
# print 'Lights turned on?', success_on
# successes.append(experiment)
#ut.save_pickle({'pressure': pressure_parameters,
# 'successes': successes}, 'pressure_variation_results.pkl')
#return self.pressure_listener.check_threshold() or self.pressure_listener.check_safety_threshold()
##stop if you hit a tip, side, back, or palm
#(left_touching, right_touching, palm_touching) = self.reactive_gr.check_guarded_move_contacts()
##saw a contact, freeze the arm
#if left_touching or right_touching or palm_touching:
# rospy.loginfo("CONTACT made!")
# return True
#else:
# return False
#print 'move returning'
#return whether the left and right fingers were touching
#return (left_touching, right_touching, palm_touching)
#def execute_action_list(self):
#def run(self, seed):
# # search for pairs of perception operators and manipulation operators that would work
# population = 10
# seeds = []
# for i in range(population):
# aseed = copy.deepcopy(seed)
# # 'bool', 'radian', 'se3', 'r3', 'discrete',
# new_seed_actions = []
# for action in aseed:
# if replace_action:
# pass
# if delete_action:
# pass
#
# if insert_action:
# #pick random action from descriptors list
# new_action =
# new_seed_actions += new_action
# pass
#
# if perturb_parameter:
# num_params = len(action.params)
# rand_param_idx = ...
# self.descriptors[action.name].params[rand_param_idx]
# rand_param_types[rand_param_types]
# #can replace/delete/insert action
# #can pick a parameter and perturb it
# #pdb.set_trace()
# print seed
#point = np.matrix([0.63125642, -0.02918334, 1.2303758 ]).T
#print 'move direction', movement.T
#print 'CORRECTING', point.T
#print 'NEW', point.T
#start_location = (np.matrix([0.25, 0.15, 0.7]).T, np.matrix([0., 0., 0., 0.1]))
#movement = np.matrix([.4, 0., 0.]).T
#what other behavior would I want?
# touch then move away..
# move back but more slowly..
# want a safe physical
# a safe exploration strategy
#self.behaviors.linear_move(self.behaviors.current_location(), back_alittle, stop='none')
#loc_before = self.behaviors.current_location()[0]
#loc_after = self.behaviors.current_location()[0]
#pdb.set_trace()
#self.behaviors.linear_move(self.behaviors.current_location(), down, stop='pressure_accel')
#self.behaviors.linear_move(self.behaviors.current_location(), back, stop='none')
#pdb.set_trace()
#b.twist(math.radians(30.))
#bd = BehaviorDescriptor()
#movement = point - self.behaviors.current_location()[0]
#pdb.set_trace()
#self.behaviors.linear_move(self.behaviors.current_location(), movement, stop='pressure_accel')
#loc = self.behaviors.current_location()[0]
#front_loc = point.copy()
#front_loc[0,0] = loc[0,0]
#self.behaviors.set_pressure_threshold(150)
#self.behaviors.move_absolute((front_loc, self.behaviors.current_location()[1]), stop='pressure_accel')
#self.behaviors.move_absolute((point, self.behaviors.current_location()[1]), stop='pressure_accel')
#def detect_event(self):
# self.behaviors.cman._start_gripper_event_detector(timeout=40.)
# stop_func = self.behaviors._tactile_stop_func
# while stop_func():
#pass
#self.robot = pr2.PR2()
#self.kin = pk.PR2Kinematics(self.robot.tf_listener)
#def linear_move(self, start_location, direction, distance, arm):
# if arm == 'left':
# arm_kin = self.kin.left
# else:
# arm_kin = self.kin.right
# start_pose = arm_kin.ik(start_location)
# loc = start_location[0:3, 4]
# end_location = loc + distance*direction
# end_pose = arm_kin.ik(end_location)
# self.robot.left_arm.set_pose(start_pose, 5.) #!!!
# self.robot.left_arm.set_pose(end_pose, 5.) #!!!
##stop if you hit a tip, side, back, or palm
#(left_touching, right_touching, palm_touching) = rg.check_guarded_move_contacts()
##saw a contact, freeze the arm
#if left_touching or right_touching or palm_touching:
# rospy.loginfo("saw contact")
# rg.cm.switch_to_joint_mode()
# rg.cm.freeze_arm()
# break
#import pdb
#start_location = [0.34, 0.054, 0.87] + [0.015454981255042808, -0.02674860197736427, -0.012255429236635201, 0.999447577565171]
#direction = np.matrix([1., 0., 0.]).T
#self.reactive_l.move_cartesian_step(start_location, blocking = 1)
#(left_touching, right_touching, palm_touching) = self.reactive_l.guarded_move_cartesian(grasp_pose, 10.0, 5.0)
#self.cman_r = con.ControllerManager('r')
#self.reactive_r = rgr.ReactiveGrasper(self.cman_r)
#self.cman_r.start_joint_controllers()
#self.reactive_r.start_gripper_controller()
#(pos, rot) = self.cman.return_cartesian_pose()
#pdb.set_trace()
#currentgoal = pos + rot
#currentgoal[2] -= .05
#self.reactive_l.move_cartesian_step(currentgoal, blocking = 1)
#(left_touching, right_touching, palm_touching) = self.reactive_l.guarded_move_cartesian(grasp_pose, 10.0, 5.0)
#exit()
#end_loc = start_location + direction * distance
#self.reactive_l.move_cartesian_step(start_loc, blocking = 1)
#self.reactive_l.move_cartesian_step(end_loc, blocking = 1)
#left_pose = b.robot.left.pose()
#left_cart = ut.load_pickle('start_pose.pkl')
#pdb.set_trace()
#kin_sol = b.kin.left.ik(left_cart)
#b.robot.left.set_pose(kin_sol, 5.)
##b.linear_move(left_cart)
##left_cart = b.kin.left.fk(left_pose)
##pdb.set_trace()
#print left_cart
#(pos, rot) = cm.return_cartesian_pose()
#currentgoal = pos+rot
#currentgoal[2] -= .05
#rg.move_cartesian_step(currentgoal, blocking = 1)
#exit()
#b.linear_move()
#cart_pose = kin.left.fk('torso_lift_link', 'l_wrist_roll_link', joints)
#kin.left.ik(cart_pose, 'torso_lift_link')
#def light_switch1_on(self, point, press_pressure=3500, press_contact_pressure=150):
# point = point + np.matrix([-.15, 0, -0.20]).T
# success, reason = self.behaviors.reach(point)
# if not success:
# rospy.loginfo('Reach failed due to "%s"' % reason)
# rospy.loginfo('PRESSING')
# success, reason = self.behaviors.press(np.matrix([0, 0, .20]).T, \
# press_pressure, press_contact_pressure)
# if not success:
# rospy.loginfo('Press failed due to "%s"' % reason)
# return
# rospy.loginfo('RESETING')
# r2 = self.behaviors.move_absolute(self.start_location, stop='pressure_accel')
# if r2 != None:
# rospy.loginfo('moving back to start location failed due to "%s"' % r2)
# return
# print 'DONE.'
#def _tactile_stop_func(self):
# r1 = self.pressure_listener.check_threshold()
# r2 = self.pressure_listener.check_safety_threshold()
# if r1:
# rospy.loginfo('Pressure exceeded!')
# if r2:
# rospy.loginfo('Pressure safety limit EXCEEDED!')
# return r1 or r2
#r1 = self.pressure_listener.check_threshold()
#r2 = self.pressure_listener.check_safety_threshold()
#if r1:
# rospy.loginfo('Pressure exceeded!')
#if r2:
# rospy.loginfo('Pressure safety limit EXCEEDED!')
#pressure_state = r1 or r2
#pressure_state = self.pressure_listener.check_threshold() or self.pressure_listener.check_safety_threshold()
#action finished (trigger seen)
#def optimize_parameters(self, x0, x_range, behavior, objective_func, reset_env_func, reset_param):
# reset_retries = 3
# num_params = len(x0)
# x = copy.deepcopy(x0)
# # for each parameter
# #for i in range(num_params):
# while i < num_params:
# #search for a good setting
# not_converged = True
# xmin = x_range[i, 0]
# xmax = x_range[i, 1]
# while not_converged:
# current_val = x[i]
# candidates_i = [(x[i] + xmin) / 2., (x[i] + xmax) / 2.]
# successes = []
# for cand in candidates_i:
# x[i] = cand
# success = behavior(x)
# if success:
# for reset_i in range(reset_retries):
# reset_success = reset_env_func(*reset_param)
# if reset_success:
# break
# successes.append(success)
# if successes[0] and successes[1]:
# raise RuntimeException('What? this isn\'t suppose to happen.')
# elif successes[0] and not successes[1]:
# next_val = candidates_i[0]
# elif successes[1] and not successes[0]:
# next_val = candidates_i[1]
# else:
# raise RuntimeException('What? this isn\'t suppose to happen.')
# #if all the trials are bad
# if not test(successes):
# #go back by 1 parameter
# i = i - 1
# #if there are more than one good parameter
# for p in params
# ... = objective_func(p)
# i = i + 1
# return x
#def autonomous_learn(self, point3d_bl, behavior, object_name):
# # We learn, but must moderate between spatial cues and requirements of
# # the learner. Spatial cue is a heuristic that can guide to positive
# # examples. Learning heuristic reduces the number of experiments to
# # perform given that we know that we are generally *not* successful
# # (assume that this procedure launches only during non mission critial circumstances).
# # So in the case where we're actively learning we're going to ignore the spatial heuristic.
# # Well... can we incorporate distance to the selected 3d point as a feature?
# # ah!
# learn_manager = self.learners[object_name]
# #scan and extract features
# self.robot.head.look_at(point3d_bl, 'base_link', True)
# learn_manager.scan(point3d_bl)
# gaussian = pr.Gaussian(np.matrix([ 0, 0, 0.]).T, \
# np.matrix([[1., 0, 0], \
# [0, .02**2, 0], \
# [0, 0, .02**2]]))
# #pdb.set_trace()
# gaussian_noise = np.matrix([0,0,0.]).T
# while not learn_manager.is_ready():
# pi = point3d_bl + gaussian_noise
# label = behavior(pi)
# #look at point, then try to add again
# if not learn_manager.add_example(pi, np.matrix([label])):
# rospy.logerr('Unable to extract features from point %s' % str(pi.T))
# continue
# learn_manager.train()
# learn_manager.draw_and_send()
# gaussian_noise = gaussian.sample()
# gaussian_noise[0,0] = 0
# #Acquire data
# #Given image, cloud, 3d point ask, extract features.
# #while no_interruptions and stopping_criteria_not_reached
# # maximally_informative_point = get maximally informative point
# # label = behavior(maximally_informative_point)
# # retrain!
# converged = False
# while not converged:
# indices, dists = learn_manager.select_next_instances(1)
# if idx != None:
# pt2d = learn_manager.points2d[:, indices[0]]
# pt3d = learn_manager.points3d[:, indices[0]]
# label = behavior(pt3d)
# #learn_manager.add_example(pt3d, np.matrix([label]), pt2d)
# if not learn_manager.add_example(pi, np.matrix([label])):
# rospy.logerr('Unable to extract features from point %s' % str(pi.T))
# continue
# learn_manager.train()
# learn_manager.draw_and_send()
# else:
# converged = True
#def gather_interest_point_dataset(self, point):
# gaussian = pr.Gaussian(np.matrix([0, 0, 0.]).T, \
# np.matrix([[1., 0, 0], \
# [0, .02**2, 0], \
# [0, 0, .02**2]]))
# for i in range(100):
# # perturb_point
# gaussian_noise = gaussian.sample()
# gaussian_noise[0,0] = 0
# success_off, touchloc_bl = self.light_switch1(point,
# point_offset=np.matrix([-.15, 0, 0]).T, press_contact_pressure=300,
# move_back_distance=np.matrix([-.005,0,0]).T, press_pressure=2500,
# press_distance=np.matrix([0,0,-.15]).T, visual_change_thres=.03)
# rospy.loginfo('Lights turned off? %s' % str(success_off))
# pdb.set_trace()
# self.behaviors.movement.move_absolute((np.matrix([.15, .45, 1.3]).T, self.start_location[1]), stop='pressure_accel')
# self.record_perceptual_data(touchloc_bl)
# self.behaviors.movement.move_absolute(self.start_location, stop='pressure_accel')
# if success_off:
# self.behaviors.movement.move_absolute((np.matrix([.15, .45, 1.3]).T, self.start_location[1]), stop='pressure_accel')
# self.record_perceptual_data(touchloc_bl)
# self.behaviors.movement.move_absolute(self.start_location, stop='pressure_accel')
# success_on, touchloc_bl2 = self.light_switch1(point,
# point_offset=np.matrix([-.15,0,-.10]).T, press_contact_pressure=300,
# move_back_distance=np.matrix([-0.005, 0, 0]).T, press_pressure=2500,
# press_distance=np.matrix([0,0,.1]).T, visual_change_thres=.03)
# ##1
# #if success_on:
# # self.movement.behaviors.move_absolute((np.matrix([.15, .45, 1.3]).T, self.start_location[1]), stop='pressure_accel')
# # self.record_perceptual_data(touchloc_bl)
# # self.movement.behaviors.move_absolute(self.start_location, stop='pressure_accel')
# #Turn on lights
# #success_on, touchloc_bl = self.light_switch1(npoint,
# else:
# return
#def record_perceptual_data_laser_scanner(self, point_touched_bl):
# #what position should the robot be in?
# #set arms to non-occluding pose
# #record region around the finger where you touched
# rospy.loginfo('Getting laser scan.')
# points = []
# for i in range(3):
# rospy.loginfo('scan %d' % i)
# points.append(self.laser_scan.scan(math.radians(180.), math.radians(-180.), 20./3.))
# rospy.loginfo('Getting Prosilica image.')
# prosilica_image = self.prosilica.get_frame()
# rospy.loginfo('Getting image from left wide angle camera.')
# left_image = self.wide_angle_camera_left.get_frame()
# rospy.loginfo('Getting image from right wide angle camera.')
# right_image = self.wide_angle_camera_left.get_frame()
# rospy.loginfo('Waiting for calibration.')
# while self.prosilica_cal.has_msg == False:
# time.sleep(.1)
# #which frames?
# rospy.loginfo('Getting transforms.')
# pro_T_bl = tfu.transform('/self.OPTICAL_FRAMEhigh_def_optical_frame', '/base_link', self.tf_listener)
# laser_T_bl = tfu.transform('/laser_tilt_link', '/base_link', self.tf_listener)
# tstring = time.strftime('%A_%m_%d_%Y_%I:%M%p')
# prosilica_name = '%s_highres.png' % tstring
# left_name = '%s_left.png' % tstring
# right_name = '%s_right.png' % tstring
# rospy.loginfo('Saving images (basename %s)' % tstring)
# cv.SaveImage(prosilica_name, prosilica_image)
# cv.SaveImage(left_name, left_image)
# cv.SaveImage(right_name, right_image)
# rospy.loginfo('Saving pickles')
# pickle_fname = '%s_interest_point_dataset.pkl' % tstring
# data_pkl = {'touch_point': point_touched_bl,
# 'points_laser': points,
# 'laser_T_bl': laser_T_bl,
# 'pro_T_bl': pro_T_bl,
# 'high_res': prosilica_name,
# 'prosilica_cal': self.prosilica_cal,
# 'left_image': left_name,
# 'left_cal': self.left_cal,
# 'right_image': right_name,
# 'right_cal': self.right_cal}
# #'point_touched': point_touched_bl}
#
# ut.save_pickle(data_pkl, pickle_fname)
# print 'Recorded to', pickle_fname
#if mode == 'autonomous learn':
# def light_behavior(point):
# point_offset = np.matrix([0, 0, 0.03]).T
# success, _ = self.light_switch1(point, point_offset=point_offset, \
# press_contact_pressure=300, move_back_distance=np.matrix([-.0075,0,0]).T,\
# press_pressure=3500, press_distance=np.matrix([0,0,-.15]).T, \
# visual_change_thres=.03)
# if success:
# return 1.0
# else:
# return 0.0
# self.untuck()
# self.behaviors.movement.move_absolute(self.start_location, stop='pressure')
# self.behaviors.movement.pressure_listener.rezero()
# self.autonomous_learn(point_bl, light_behavior, 'light_switch')
#if mode == 'location activated':
# self.location_activated_behaviors(point_bl)
#elif mode == 'location activated':
# all_locs = self.locations_man.list_all()
# for i, pair in enumerate(all_locs):
# key, task = pair
# print i, task, key
# rospy.loginfo('Select location to execute action')
# selected = int(raw_input())
#if mode == 'practice':
# self.add_to_practice_points_map(point_bl)
##If that location is new:
#map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
#point_map = tfu.transform_points(map_T_base_link, point_bl)
#close_by_locs = self.locations_man.list_close_by(point_map)
#if len(close_by_locs) <= 0:
# #initialize new location
# rospy.loginfo('Select task type:')
# for i, ttype in enumerate(self.locations_man.task_types):
# print i, ttype
# task_type = self.locations_man[int(raw_input())]
# task_id = self.locations_man.create_new_location(task_type, point_map)
# rospy.loginfo('if existing dataset exists enter that dataset\'s name')
# print 'friday_730_light_switch2.pkl'
# filename = raw_input()
# if len(filename) > 0:
# dataset = ut.load_pickle(filename)
# self.locations_man.data[task_id]['dataset'] = dataset
# self.locations_man.train(dataset, task_id)
# else:
# self.last_ditch_execution(
#elif len(close_by_locs) == 1:
# task_id, task = close_by_locs[0]
# rospy.loginfo('Executing task %s with id % s', task, task_id)
# self.execute_behavior(task_id, point_bl)
#elif len(close_by_locs) > 1:
# #TODO: implement this case
# rospy.logerr('ERROR: unimplemented')
# pdb.set_trace()
# self.execute_behavior(task_id, point_bl)
#else:
# rospy.logerr('ERROR: we shouldn\'t have reached here')
# pdb.set_trace()
| [
[
3,
0,
0.0028,
0.0037,
0,
0.66,
0,
591,
0,
2,
0,
0,
645,
0,
1
],
[
2,
1,
0.0022,
0.0012,
1,
0.11,
0,
555,
0,
2,
0,
0,
0,
0,
0
],
[
14,
2,
0.0025,
0.0006,
2,
0.19,
... | [
"class TaskError(Exception):\n def __init__(self, value):\n self.parameter = value\n\n def __str__(self):\n return repr(self.parameter)",
" def __init__(self, value):\n self.parameter = value",
" self.parameter = value",
" def __str__(self):\n return repr(self.pa... |
#! /usr/bin/python
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import roslib; roslib.load_manifest('trf_learn')
import rospy
import trf_learn.recognize_3d as r3d
import trf_learn.locations_manager as lcm
import sys
import optparse
import pdb
import numpy as np
import ml_lib.dataset as ds
class LeaveOneOut:
def __init__(self, filename, dataset_name):
self.rec_params = r3d.Recognize3DParam()
self.locations_man = lcm.LocationsManager(filename, rec_params=self.rec_params, train=True)
self.dataset_name = dataset_name
pdb.set_trace()
print 'The following datasets are available:', self.locations_man.data.keys()
def leave_one_out(self):
#For each data set in locations man, for each data point, remove one data point, train, test on that point
dataset = self.locations_man.data[self.dataset_name]['dataset']
num_datapoints = dataset.inputs.shape[1]
#dataset.inputs = np.row_stack((np.matrix(range(num_datapoints)), dataset.inputs))
predicted_values = []
correct = 0
incorrect = 0
confusion = np.matrix([[0,0], [0,0.]])
num_pos = np.sum(dataset.outputs)
num_neg = num_datapoints-num_pos
#for i in range(2):
for i in range(num_datapoints):
loo_dataset, left_out_input, left_out_output = ds.leave_one_out(dataset, i)
self.locations_man.data[self.dataset_name]['dataset'] = loo_dataset
self.locations_man.train(self.dataset_name, save_pca_images=False)
learner = self.locations_man.learners[self.dataset_name]
predicted = learner.classify(left_out_input)
predicted_values += predicted
if predicted[0] == 0:
if left_out_output[0,0] == 0:
confusion[0,0] += 1
else:
confusion[0,1] += 1
else:
#predicted[0] == 1
if left_out_output[0,0] == 0:
confusion[1,0] += 1
else:
confusion[1,1] += 1
if predicted[0] == left_out_output[0,0]:
correct += 1
else:
incorrect += 1
print '============================================'
print 'dataset', self.dataset_name
print 'confusion matrix\n', confusion
confusion[:,0] = confusion[:,0] / num_neg
confusion[:,1] = confusion[:,1] / num_pos
print 'correct', correct, '\nincorrect', incorrect, '\npercentage', 100.* (correct/float(num_datapoints))
print predicted_values
print '============================================'
#predicted_values += predicted
#np.matrix(predicted_values)
#print 'result', predicted[0], predicted.__class__, left_out_output[0,0]
if __name__ == '__main__':
#p = optparse.OptionParser()
#p.add_option("-d", "--display", action="store", default='locations_narrow_v11.pkl')
if len(sys.argv) > 1:
name = sys.argv[1]
else:
name = 'locations_narrow_v11.pkl'
loo = LeaveOneOut(name, sys.argv[2])
loo.leave_one_out()
print 'end!'
| [
[
1,
0,
0.2655,
0.0088,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.2655,
0.0088,
0,
0.66,
0.0909,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.2743,
0.0088,
0,
0.... | [
"import roslib; roslib.load_manifest('trf_learn')",
"import roslib; roslib.load_manifest('trf_learn')",
"import rospy",
"import trf_learn.recognize_3d as r3d",
"import trf_learn.locations_manager as lcm",
"import sys",
"import optparse",
"import pdb",
"import numpy as np",
"import ml_lib.dataset a... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import roslib; roslib.load_manifest('hai_sandbox')
import hrl_lib.util as ut
import pylab as pb
import numpy as np
import pdb
def conf_to_percent(rec):
conf = rec['mat']
conf[0,:] = conf[0,:] / rec['neg']
conf[1,:] = conf[1,:] / rec['pos']
return conf[0,0], conf[1,1]
def plot_classifier_performance(fname, pname, plot_all):
results = ut.load_pickle(fname)
#pdb.set_trace()
#results['train_set_statistics'] # [ {'conf', 'size'}, {}...]
#results['current_scan_statistics'] # [ {'conf'} {}...]
#results['perf_on_other_scans'] # [[{'name', 'conf'}, {}...] [{} {}...]...]
#where conf is {'mat', 'neg', 'pos'}
scores = {}
for rlist in results['perf_on_other_scans']:
for d in rlist:
if scores.has_key(d['name']):
scores[d['name']].append(conf_to_percent(d['conf']))
else:
scores[d['name']] = [conf_to_percent(d['conf'])]
for k in scores.keys():
scores[k] = zip(*scores[k])
if results.has_key('train_set_statistics'):
train_neg, train_pos = zip(*[conf_to_percent(d['conf']) for d in results['train_set_statistics']])
else:
train_neg = train_pos = None
if results.has_key('current_scan_statistics'):
pdb.set_trace()
test_neg, test_pos = zip(*[conf_to_percent(d['conf']) for d in results['current_scan_statistics']])
else:
test_neg = test_pos = None
n_iterations = np.array(range(len(results['train_set_statistics'])))
#======================================================================
pb.figure(1)
if results.has_key('train_set_statistics'):
pb.plot(n_iterations, train_neg, label='train ' + pname)
if test_neg != None:
pb.plot(n_iterations, test_neg, label='test ' + pname)
if plot_all:
for i, k in enumerate(scores.keys()):
pb.plot(n_iterations, scores[k][0], '--', label=str(i))
#if results.has_key('current_scan_statistics'):
if results.has_key('converged_at_iter'):
pb.plot([results['converged_at_iter'], results['converged_at_iter']], [0., 1.], 'r')
pb.title('True negatives')
pb.legend()
#======================================================================
pb.figure(2)
if train_pos != None:
pb.plot(n_iterations, train_pos, label='train ' + pname)
if test_pos != None:
pb.plot(n_iterations, test_pos, label='test ' + pname)
#if results.has_key('current_scan_statistics'):
print 'mapping from dataset to id'
if plot_all:
for i, k in enumerate(scores.keys()):
pb.plot(n_iterations, scores[k][1], '--', label=str(i))
print 'ID', i, 'dataset', k
if results.has_key('converged_at_iter'):
pb.plot([results['converged_at_iter'], results['converged_at_iter']], [0., 1.], 'r')
pb.title('True positives')
pb.legend()
def plot_features_perf(fnames, pnames):
all_scores = {}
dset_names = None
for fname, pname in zip(fnames, pnames):
results = ut.load_pickle(fname)
train_neg, train_pos = zip(*[conf_to_percent(d['conf']) for d in results['train_set_statistics']])
scores = {}
for rlist in results['perf_on_other_scans']:
for d in rlist:
if scores.has_key(d['name']):
scores[d['name']].append(conf_to_percent(d['conf']))
else:
scores[d['name']] = [conf_to_percent(d['conf'])]
for k in scores.keys():
scores[k] = zip(*scores[k])
scores['train'] = [(train_neg), (train_pos)]
all_scores[pname] = scores
if dset_names == None:
dset_names = scores.keys()
neg_by_dset = {}
for n in dset_names:
posn = []
for pname in pnames:
posn.append(all_scores[pname][n][0][0])
neg_by_dset[n] = posn
pos_by_dset = {}
for n in dset_names:
posn = []
for pname in pnames:
posn.append(all_scores[pname][n][1][0])
pos_by_dset[n] = posn
ind = np.arange(len(pnames))
width = 0.05
fig = pb.figure(1)
ax = fig.add_subplot(111)
rects=[]
for i, name in enumerate(dset_names):
rect = ax.bar(ind+(width*i), pos_by_dset[name], width, color=tuple(np.random.rand(3).tolist()))
rects.append(rect)
ax.set_ylabel('accuracy')
ax.set_title('True positives by dataset and features used')
ax.set_xticks(ind+width)
ax.set_xticklabels(tuple(pnames))
fig = pb.figure(2)
ax = fig.add_subplot(111)
rects=[]
for i, name in enumerate(dset_names):
rect = ax.bar(ind+(width*i), neg_by_dset[name], width, color=tuple(np.random.rand(3).tolist()))
rects.append(rect)
ax.set_ylabel('accuracy')
ax.set_title('True negatives by dataset and features used')
ax.set_xticks(ind+width)
ax.set_xticklabels(tuple(pnames))
if __name__ == '__main__':
import sys
import optparse
p = optparse.OptionParser()
p.add_option("-m", "--mode", action="store", type="string")
p.add_option("-f", "--file", action="append", type="string")
p.add_option('-n', '--name', action="append", type="string")
opt, args = p.parse_args()
if opt.mode == 'active':
if len(opt.file) <= 1:
plot_all = True
else:
plot_all = False
for i in range(len(opt.file)):
plot_classifier_performance(opt.file[i], opt.name[i], plot_all)
pb.show()
if opt.mode == 'features':
plot_features_perf(opt.file, opt.name)
pb.show()
#For comparing between different algorithms, don't need to plot performance on all scans just
| [
[
1,
0,
0.1393,
0.005,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.1393,
0.005,
0,
0.66,
0.1111,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1443,
0.005,
0,
0.66,... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import hrl_lib.util as ut",
"import pylab as pb",
"import numpy as np",
"import pdb",
"def conf_to_percent(rec):\n conf = rec['mat']\n conf[0,:] = conf[0,:] / rec['neg']\n conf[1,:] = co... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
| [] | [] |
#!/usr/bin/python
import roslib; roslib.load_manifest('trf_learn')
import trf_learn.recognize_3d as r3d
import hrl_lib.util as ut
import pylab as pb
from PIL import Image
import os.path as pt
import pdb
import numpy as np
import os
def minmax(mat):
return (np.min(mat[0,:]), np.max(mat[0,:]),
np.min(mat[1,:]), np.max(mat[1,:]))
def num_bins(points, bin_size):
minx, maxx, miny, maxy = minmax(points)
rangex = maxx-minx
xbins = np.ceil(rangex/bin_size)
rangey = maxy-miny
ybins = np.ceil(rangey/bin_size)
print 'XBINS', xbins, 'YBINS', ybins
return xbins, ybins
##
# ['neg_pred', 'image', 'pos_pred', 'tried', 'center']
def density_plot(pickle_file_name):
print 'density_plot: processing', pickle_file_name
BIN_SIZE = 20
#PICKLE_FOLDER = 'pickle_files'
data_dict = ut.load_pickle(pickle_file_name)
orig_pickle_folder, _ = pt.split(pickle_file_name)
folder_name, img_name = pt.split(data_dict['image'])
nimg_path = pt.join(orig_pickle_folder, img_name)
img_obj = Image.open(nimg_path)
w, h = img_obj.size
pb.imshow(img_obj, origin='lower')
data_dict['neg_pred'][1,:] = h - data_dict['neg_pred'][1,:]
data_dict['pos_pred'][1,:] = h - data_dict['pos_pred'][1,:]
all_pts = np.column_stack((data_dict['neg_pred'], data_dict['pos_pred']))
Hall, xedges, yedges = np.histogram2d(all_pts[0,:].A1, all_pts[1,:].A1,
bins=num_bins(all_pts, BIN_SIZE))
Hneg, xedges, yedges = np.histogram2d(data_dict['neg_pred'][0,:].A1, data_dict['neg_pred'][1,:].A1,
bins=[xedges, yedges])
extent = [xedges[0], xedges[-1], yedges[-1], yedges[0]]
Himage = (Hall-Hneg).T
max_val, min_val = np.max(Himage), np.min(Himage)
Hrgba = np.zeros((Himage.shape[0], Himage.shape[1], 4), dtype='uint8')
Hrgba[:,:,0] = 0
Hrgba[:,:,1] = 255 #Himage*80
Hrgba[:,:,2] = 0
Hrgba[:,:,3] = 255
r,c = np.where(Himage == 0)
Hrgba[r,c,3] = 0
print 'max', max_val, 'min', min_val
pb.imshow(Hrgba, extent=extent, interpolation='spline36', origin='upper', alpha = .7)
if data_dict['tried'][1] == r3d.NEGATIVE:
pb.plot(data_dict['tried'][0][0,0], h-data_dict['tried'][0][1,0], 'bx')
else:
pb.plot(data_dict['tried'][0][0,0], h-data_dict['tried'][0][1,0], 'bx')
#pdb.set_trace()
#pb.plot(data_dict['neg_pred'][0,:].A1, data_dict['neg_pred'][1,:].A1, 'rx')
#pb.plot(data_dict['pos_pred'][0,:].A1, data_dict['pos_pred'][1,:].A1, 'x')
min_x, max_x, min_y, max_y = minmax(all_pts)
pb.axis([max(min_x-100,0), min(max_x+100,w), max(min_y-100, 0), min(max_y+100, h)])
#pb.axis([0, w, 0, h])
#name, extension = pt.splitext(img_name)
name = pt.splitext(pt.split(pickle_file_name)[1])[0]
if data_dict['tried'][1] == r3d.NEGATIVE:
figname = pt.join(orig_pickle_folder, name + '_plot_FAIL.png')
else:
figname = pt.join(orig_pickle_folder, name + '_plot_SUCC.png')
pb.savefig(figname)
#pb.show()
if __name__ == '__main__':
import sys
import optparse
if len(sys.argv) > 1:
for i in range(1, len(sys.argv)):
density_plot(sys.argv[i])
| [
[
1,
0,
0.0211,
0.0105,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0211,
0.0105,
0,
0.66,
0.0769,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0316,
0.0105,
0,
0.... | [
"import roslib; roslib.load_manifest('trf_learn')",
"import roslib; roslib.load_manifest('trf_learn')",
"import trf_learn.recognize_3d as r3d",
"import hrl_lib.util as ut",
"import pylab as pb",
"from PIL import Image",
"import os.path as pt",
"import pdb",
"import numpy as np",
"import os",
"de... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import roslib; roslib.load_manifest('trf_learn')
import rospy
import cv
import numpy as np
import feature_extractor_fpfh.srv as fsrv
import hrl_opencv.image3d as i3d
import hrl_lib.rutils as ru
import hrl_lib.prob as pr
import hrl_lib.tf_utils as tfu
##
# Generalized from Probabilistic robotics for N != weights.shape[0]
def sample_points(weights, N):
assert(weights.shape[0] >= N)
M = weights.shape[0]
weights = weights / np.sum(weights)
r = np.random.rand() * (1.0/M)
c = weights[0,0]
i = 0
Xt = []
indices = np.sort(np.random.permutation(np.arange(1, M+1))[0:N]).tolist()
for m in indices:
U = r + (m - 1) * (1.0/M)
while U > c:
i = i + 1
c = c + weights[i,0]
Xt.append(i)
return Xt
def test_sample_points():
w = np.matrix([.1,.4,.5]).T
count = [0., 0., 0.]
for i in range(6000.):
idx = sample_points(w, 2)
for x in idx:
count[x] += 1
print np.matrix(count) / np.sum(count)
def intensity_pyramid_feature(point2d_image, np_image_arr, win_size, multipliers, flatten=True):
invalid_location = False
local_intensity = []
for multiplier in multipliers:
if multiplier == 1:
features = i3d.local_window(point2d_image, np_image_arr, win_size, flatten=flatten)
else:
features = i3d.local_window(point2d_image, np_image_arr, win_size*multiplier,
resize_to=win_size, flatten=flatten)
if features == None:
invalid_location = True
break
else:
local_intensity.append(features)
if invalid_location:
return None
else:
if flatten:
return np.row_stack(local_intensity)
else:
return local_intensity
class Subsampler:
def __init__(self):
self.proxy = rospy.ServiceProxy('subsample', fsrv.SubsampleCalc)
def subsample(self, points3d, frame='base_link'):
req = fsrv.SubsampleCalcRequest()
req.input = ru.np_to_pointcloud(points3d, frame)
res = self.proxy(req)
return ru.pointcloud_to_np(res.output)
class IntensityCloudFeatureExtractor:
def __init__(self, pointcloud_bl, cvimage_mat, expected_loc_bl, distance_feature_points,
image_T_bl, camera_calibration, params):
self.pointcloud_bl = pointcloud_bl
self.cvimage_mat = cvimage_mat
self.expected_loc_bl = expected_loc_bl
self.distance_feature_points = distance_feature_points
self.image_T_bl = image_T_bl
self.camera_calibration = camera_calibration
self.params = params
self.subsampler_service = Subsampler()
self.sizes = None #Important but access should be limited to decouple code
def get_sizes(self):
return self.sizes
def _subsample(self):
rospy.loginfo('Subsampling using PCL')
rospy.loginfo('before %s' % str(self.pointcloud_bl.shape))
self.pc_sub_samp_bl = self.subsampler_service.subsample(self.pointcloud_bl)
rospy.loginfo('after %s' % str(self.pc_sub_samp_bl.shape))
def _sample_points(self):
rospy.loginfo('Sampling points')
#evaluate all points
gaussian = pr.Gaussian(self.expected_loc_bl, \
np.matrix([[self.params.uncertainty_x**2, 0, 0], \
[0, self.params.uncertainty_y**2, 0], \
[0, 0, self.params.uncertainty_z**2]]))
pdf = gaussian.pdf_mat()
probs = np.matrix(pdf(self.pc_sub_samp_bl))
#sample unique points
n_samples = min(self.params.n_samples, self.pc_sub_samp_bl.shape[1])
pt_indices = list(set(sample_points(probs.T, n_samples)))
#only keep those that are in bound of points
sampled_pts3d_bl = self.pc_sub_samp_bl[:, pt_indices]
sampled_pts3d_image = tfu.transform_points(self.image_T_bl, sampled_pts3d_bl)
sampled_pts2d = self.camera_calibration.project(sampled_pts3d_image)
sampled_pix2d = np.matrix(np.round(sampled_pts2d))
#throw away points that are outside of bounds
x = sampled_pix2d[0,:]
y = sampled_pix2d[1,:]
good_pts = np.where((x >= 0) + (x < self.camera_calibration.w) \
+ (y >= 0) + (y < self.camera_calibration.h))[1].A1
sampled_pts3d_bl = sampled_pts3d_bl[:, good_pts]
sampled_pix2d = sampled_pix2d[:, good_pts]
rospy.loginfo('got %s good points' % str(sampled_pix2d.shape[1]))
return sampled_pts3d_bl, sampled_pix2d
def feature_vec_at(self, point3d_bl, point2d_image):
fea_calculated = []
#Get synthetic distance points
distance_feas = None
if self.distance_feature_points != None:
distance_feas = np.power(np.sum(np.power(self.distance_feature_points - point3d_bl, 2), 0), .5).T
fea_calculated.append(distance_feas)
#Get intensity features
intensity = intensity_pyramid_feature(point2d_image, np.asarray(self.cvimage_mat),
self.params.win_size, self.params.win_multipliers, True)
#pdb.set_trace()
if intensity == None:
return None
else:
fea_calculated.append(intensity)
if self.sizes == None:
self.sizes = {}
if distance_feas != None:
self.sizes['distance'] = distance_feas.shape[0]
self.sizes['intensity'] = intensity.shape[0]
return fea_calculated
def extract_features(self):
self._subsample()
sampled_pts3d_bl, sampled_pix2d = self._sample_points()
features_l = []
pts_with_features = []
rospy.loginfo('Extracting features')
for i in range(sampled_pts3d_bl.shape[1]):
features = self.feature_vec_at(sampled_pts3d_bl[:,i], sampled_pix2d[:,i])
if features != None:
features_l.append(features)
pts_with_features.append(i)
if i % 500 == 0:
rospy.loginfo(i)
features_by_type = zip(*features_l)
xs = np.row_stack([np.column_stack(f) for f in features_by_type])
rospy.loginfo('Finished feature extraction.')
return xs, sampled_pix2d[:, pts_with_features], sampled_pts3d_bl[:, pts_with_features]
| [
[
1,
0,
0.134,
0.0048,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.134,
0.0048,
0,
0.66,
0.0714,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1388,
0.0048,
0,
0.66... | [
"import roslib; roslib.load_manifest('trf_learn')",
"import roslib; roslib.load_manifest('trf_learn')",
"import rospy",
"import cv",
"import numpy as np",
"import feature_extractor_fpfh.srv as fsrv",
"import hrl_opencv.image3d as i3d",
"import hrl_lib.rutils as ru",
"import hrl_lib.prob as pr",
"i... |
#
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Advait Jain (Healthcare Robotics Lab, Georgia Tech.)
import sys,time
import hrl_lib.util as ut, hrl_lib.transforms as tr
import matplotlib_util.util as mpu
import numpy as np, math
sys.path.append('../')
import segway_motion_calc as smc
import arm_trajectories as at
def plot_hook_translation(curr_pos_tl,cx_tl,cy_tl,cy_ts,
start_pos_ts,eq_pt_tl,bndry,wrkspc_pts):
vt,a = smc.segway_motion_repulse(curr_pos_tl,cx_tl,cy_tl,cy_ts,
start_pos_ts,eq_pt_tl,bndry,wrkspc_pts)
mpu.plot_yx(eq_pt_tl[1,:].A1, eq_pt_tl[0,:].A1, linewidth=2,
color='g', scatter_size=15, label='Eq Pt')
mpu.plot_yx(curr_pos_tl[1,:].A1, curr_pos_tl[0,:].A1, linewidth=0,
color='b', scatter_size = 15, label = 'FK')
mpu.plot_yx(bndry[1,:].A1, bndry[0,:].A1, linewidth=0, color='y',
scatter_size=8)
mpu.plot_yx([-0.2], [0.], linewidth=0, color='b', scatter_size=2)
bndry_dist_eq = smc.dist_from_boundary(eq_pt_tl, bndry, wrkspc_pts) # signed
bndry_dist_ee = smc.dist_from_boundary(curr_pos_tl, bndry, wrkspc_pts) # signed
if bndry_dist_ee < bndry_dist_eq:
p = curr_pos_tl
else:
p = eq_pt_tl
pts_close = smc.pts_within_dist(p[0:2,:],bndry,0.01,0.1)
mpu.plot_yx(pts_close[1,:].A1, pts_close[0,:].A1, linewidth=0,
color='r', scatter_size = 8)
nrm = np.linalg.norm(vt)
vt = vt/nrm
mpu.plot_quiver_yxv(p[1,:].A1, p[0,:].A1, vt, scale=12)
mpu.show()
# only interested in the translation. so junk values for circle
# params are ok.
def plot_eq_pt_motion_tl():
vec_list = []
for i in range(len(ee_tl.p_list)):
# for i in range(5):
curr_pos_tl = np.matrix(ee_tl.p_list[i]).T
eq_pt_tl = np.matrix(eq_tl.p_list[i]).T
pts_ts = np.matrix(ee_ts.p_list[0:i+1]).T
pts_2d_ts = pts_ts[0:2,:]
# rad_opt,cx_ts,cy_ts = at.fit_circle(rad_guess,x_guess,y_guess,pts_2d_ts,
# method='fmin_bfgs',verbose=False)
rad_opt = 1.0
cx_ts,cy_ts = 0.5,-1.3
c_ts = np.matrix([cx_ts,cy_ts,0.]).T
x,y,a = st.x_list[i],st.y_list[i],st.a_list[i]
c_tl = smc.tlTts(c_ts,x,y,a)
cx_tl,cy_tl = c_tl[0,0],c_tl[1,0]
t0 = time.time()
vt,a = smc.segway_motion_repulse(curr_pos_tl,cx_tl,cy_tl,cy_ts,
start_pos_ts,eq_pt_tl,bndry,wrkspc_pts)
t1 = time.time()
# print 'time to segway_motion_repulse:',t1-t0
nrm = np.linalg.norm(vt)
# if nrm > 0.005:
vt = vt/nrm
vec_list.append(vt.A1.tolist())
v = np.matrix(vec_list).T
eq_pts = np.matrix(eq_tl.p_list).T
ee_pts = np.matrix(ee_tl.p_list).T
mpu.plot_yx(eq_pts[1,:].A1,eq_pts[0,:].A1,linewidth=1,color='g',label='eq')
mpu.plot_yx(ee_pts[1,:].A1,ee_pts[0,:].A1,linewidth=1,color='b',label='FK')
mpu.plot_yx(bndry[1,:].A1,bndry[0,:].A1,linewidth=0,color='y')
mpu.plot_quiver_yxv(eq_pts[1,:].A1,eq_pts[0,:].A1,v,scale=30)
mpu.legend()
mpu.show()
def plot_single_point():
n_pts = 115
pts_ts = np.matrix(ee_ts.p_list[0:n_pts]).T
pts_2d_ts = pts_ts[0:2,:]
rad_opt,cx_ts,cy_ts = at.fit_circle(rad_guess,x_guess,y_guess,pts_2d_ts,
method='fmin_bfgs',verbose=False)
print 'rad_opt,cx_ts,cy_ts:',rad_opt,cx_ts,cy_ts
c_ts = np.matrix([cx_ts,cy_ts,0.]).T
x,y,a = st.x_list[n_pts-1],st.y_list[n_pts-1],st.a_list[n_pts-1]
c_tl = smc.tlTts(c_ts,x,y,a)
cx_tl,cy_tl = c_tl[0,0],c_tl[1,0]
curr_pos_tl = np.matrix(ee_tl.p_list[n_pts-1]).T
eqpt_tl = np.matrix(eq_tl.p_list[n_pts-1]).T
plot_hook_translation(curr_pos_tl,cx_tl,cy_tl,cy_ts,start_pos_ts,
eqpt_tl,bndry,wrkspc_pts)
def calc_motion_all():
for i in range(len(ee_tl.p_list)):
curr_pos_tl = np.matrix(ee_tl.p_list[i]).T
eq_pt_tl = np.matrix(eq_tl.p_list[i]).T
pts_ts = np.matrix(ee_ts.p_list[0:i+1]).T
pts_2d_ts = pts_ts[0:2,:]
rad_opt,cx_ts,cy_ts = at.fit_circle(rad_guess,x_guess,y_guess,pts_2d_ts,
method='fmin_bfgs',verbose=False)
c_ts = np.matrix([cx_ts,cy_ts,0.]).T
x,y,a = st.x_list[i],st.y_list[i],st.a_list[i]
c_tl = smc.tlTts(c_ts,x,y,a)
cx_tl,cy_tl = c_tl[0,0],c_tl[1,0]
vt,a = smc.segway_motion_repulse(curr_pos_tl,cx_tl,cy_tl,cy_ts,
start_pos_ts,eq_pt_tl,bndry)
print 'angle:',math.degrees(a)
argv = sys.argv
fname = argv[1]
d = ut.load_pickle(fname)
st = d['segway']
ee_tl = at.joint_to_cartesian(d['actual'])
ee_ts = at.account_segway_motion(ee_tl,st)
eq_tl = at.joint_to_cartesian(d['eq_pt'])
eq_ts = at.account_segway_motion(eq_tl,st)
bndry = d['bndry']
wrkspc_pts = d['wrkspc']
rad_guess = 1.0
start_pos_ts = np.matrix(ee_ts.p_list[0]).T
x_guess = start_pos_ts[0,0]
y_guess = start_pos_ts[1,0] - rad_guess
plot_single_point()
#calc_motion_all()
#plot_eq_pt_motion_tl()
| [
[
1,
0,
0.1839,
0.0057,
0,
0.66,
0,
509,
0,
2,
0,
0,
509,
0,
0
],
[
1,
0,
0.1897,
0.0057,
0,
0.66,
0.04,
775,
0,
2,
0,
0,
775,
0,
0
],
[
1,
0,
0.1954,
0.0057,
0,
0.... | [
"import sys,time",
"import hrl_lib.util as ut, hrl_lib.transforms as tr",
"import matplotlib_util.util as mpu",
"import numpy as np, math",
"sys.path.append('../')",
"import segway_motion_calc as smc",
"import arm_trajectories as at",
"def plot_hook_translation(curr_pos_tl,cx_tl,cy_tl,cy_ts,\n ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.