Fixed trajectory baseline

This commit is contained in:
Michel Breyer 2021-04-29 10:50:33 +02:00
parent af955b40ed
commit cd9fb814e9
5 changed files with 188 additions and 18 deletions

5
config/active_grasp.yaml Normal file
View File

@ -0,0 +1,5 @@
active_grasp:
frame_id: task
camera_name: cam
vgn:
model: $(find vgn)/data/models/vgn_conv.pth

View File

@ -25,7 +25,7 @@ Panels:
Experimental: false Experimental: false
Name: Time Name: Time
SyncMode: 0 SyncMode: 0
SyncSource: "" SyncSource: Scene Cloud
Preferences: Preferences:
PromptSaveOnExit: true PromptSaveOnExit: true
Toolbars: Toolbars:
@ -191,6 +191,78 @@ Visualization Manager:
{} {}
Update Interval: 0 Update Interval: 0
Value: true Value: true
- Class: rviz/Marker
Enabled: true
Marker Topic: /workspace
Name: Workspace
Namespaces:
"": true
Queue Size: 100
Value: true
- Alpha: 0.20000000298023224
Autocompute Intensity Bounds: false
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: Intensity
Decay Time: 0
Enabled: false
Invert Rainbow: false
Max Color: 255; 255; 255
Min Color: 0; 0; 0
Name: TSDF
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 3
Size (m): 0.007499999832361937
Style: Boxes
Topic: /tsdf
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: false
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: Intensity
Decay Time: 0
Enabled: true
Invert Rainbow: false
Max Color: 255; 255; 255
Min Color: 0; 0; 0
Name: Scene Cloud
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 3
Size (m): 0.009999999776482582
Style: Spheres
Topic: /points
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: true
- Class: rviz/MarkerArray
Enabled: true
Marker Topic: /grasps
Name: Predicted Grasps
Namespaces:
"": true
Queue Size: 100
Value: true
Enabled: true Enabled: true
Global Options: Global Options:
Background Color: 48; 48; 48 Background Color: 48; 48; 48
@ -219,7 +291,7 @@ Visualization Manager:
Views: Views:
Current: Current:
Class: rviz/Orbit Class: rviz/Orbit
Distance: 1.3004181385040283 Distance: 1.5115783214569092
Enable Stereo Rendering: Enable Stereo Rendering:
Stereo Eye Separation: 0.05999999865889549 Stereo Eye Separation: 0.05999999865889549
Stereo Focal Distance: 1 Stereo Focal Distance: 1
@ -227,17 +299,17 @@ Visualization Manager:
Value: false Value: false
Field of View: 0.7853981852531433 Field of View: 0.7853981852531433
Focal Point: Focal Point:
X: 0.14782209694385529 X: 0.1477860063314438
Y: 0.10340728610754013 Y: 0.1189696341753006
Z: 0.3605891168117523 Z: 0.4057367146015167
Focal Shape Fixed Size: true Focal Shape Fixed Size: true
Focal Shape Size: 0.05000000074505806 Focal Shape Size: 0.05000000074505806
Invert Z Axis: false Invert Z Axis: false
Name: Current View Name: Current View
Near Clip Distance: 0.009999999776482582 Near Clip Distance: 0.009999999776482582
Pitch: 0.2697962522506714 Pitch: 0.10479649901390076
Target Frame: <Fixed Frame> Target Frame: <Fixed Frame>
Yaw: 5.108528137207031 Yaw: 5.323526859283447
Saved: ~ Saved: ~
Window Geometry: Window Geometry:
Displays: Displays:
@ -255,5 +327,5 @@ Window Geometry:
Views: Views:
collapsed: true collapsed: true
Width: 1200 Width: 1200
X: 659 X: 992
Y: 27 Y: 65

View File

@ -1,9 +1,10 @@
<?xml version="1.0" ?> <?xml version="1.0" ?>
<launch> <launch>
<!-- Arguments --> <!-- Arguments -->
<arg name="rviz" default="false" /> <arg name="rviz" default="true" />
<!-- Load parameters. --> <!-- Load parameters. -->
<rosparam command="load" file="$(find active_grasp)/config/active_grasp.yaml" subst_value="true" />
<param name="robot_description" command="$(find xacro)/xacro $(find active_grasp)/assets/urdfs/panda_arm_hand.urdf.xacro" /> <param name="robot_description" command="$(find xacro)/xacro $(find active_grasp)/assets/urdfs/panda_arm_hand.urdf.xacro" />
<!-- Bringup the robot --> <!-- Bringup the robot -->

View File

@ -1,33 +1,76 @@
from geometry_msgs.msg import Pose from pathlib import Path
import cv_bridge
import numpy as np import numpy as np
import rospy import rospy
import scipy.interpolate import scipy.interpolate
import torch
from geometry_msgs.msg import Pose
from sensor_msgs.msg import Image, CameraInfo
from robot_utils.spatial import Rotation, Transform from robot_utils.spatial import Rotation, Transform
from robot_utils.ros.conversions import * from robot_utils.ros.conversions import *
from robot_utils.ros.tf import TransformTree from robot_utils.ros.tf import TransformTree
from robot_utils.perception import *
from vgn import vis
from vgn.detection import *
from vgn.grasp import from_voxel_coordinates
def get_policy(name): def get_policy(name):
if name == "fixed-trajectory": if name == "single-view":
return FixedTrajectory() return SingleViewBaseline()
elif name == "fixed-trajectory":
return FixedTrajectoryBaseline()
else: else:
raise ValueError("{} policy does not exist.".format(name)) raise ValueError("{} policy does not exist.".format(name))
class BasePolicy: class Policy:
def __init__(self): def __init__(self):
self.frame_id = rospy.get_param("~frame_id")
# Robot
self.tf_tree = TransformTree() self.tf_tree = TransformTree()
self.target_pose_pub = rospy.Publisher("/target", Pose, queue_size=10) self.target_pose_pub = rospy.Publisher("/target", Pose, queue_size=10)
# Camera
camera_name = rospy.get_param("~camera_name")
self.cam_frame_id = camera_name + "_optical_frame"
self.cv_bridge = cv_bridge.CvBridge()
depth_topic = camera_name + "/depth/image_raw"
rospy.Subscriber(depth_topic, Image, self.sensor_cb, queue_size=1)
msg = rospy.wait_for_message(camera_name + "/depth/camera_info", CameraInfo)
self.intrinsic = from_camera_info_msg(msg)
# VGN
model_path = Path(rospy.get_param("vgn/model"))
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.net = load_network(model_path, self.device)
rospy.sleep(1.0) rospy.sleep(1.0)
self.H_B_T = self.tf_tree.lookup("panda_link0", self.frame_id, rospy.Time.now())
def sensor_cb(self, msg):
self.last_depth_img = self.cv_bridge.imgmsg_to_cv2(msg).astype(np.float32)
self.last_extrinsic = self.tf_tree.lookup(
self.cam_frame_id, self.frame_id, msg.header.stamp, rospy.Duration(0.1)
)
class FixedTrajectory(BasePolicy): class SingleViewBaseline(Policy):
pass
class FixedTrajectoryBaseline(Policy):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self.duration = 4.0 self.duration = 4.0
self.radius = 0.1 self.radius = 0.1
self.m = scipy.interpolate.interp1d([0, self.duration], [np.pi, 3.0 * np.pi]) self.m = scipy.interpolate.interp1d([0, self.duration], [np.pi, 3.0 * np.pi])
self.tsdf = UniformTSDFVolume(0.3, 40)
vis.draw_workspace(0.3)
def start(self): def start(self):
self.tic = rospy.Time.now() self.tic = rospy.Time.now()
@ -35,16 +78,63 @@ class FixedTrajectory(BasePolicy):
x0 = self.tf_tree.lookup("panda_link0", "panda_hand", self.tic, timeout) x0 = self.tf_tree.lookup("panda_link0", "panda_hand", self.tic, timeout)
self.origin = np.r_[x0.translation[0] + self.radius, x0.translation[1:]] self.origin = np.r_[x0.translation[0] + self.radius, x0.translation[1:]]
self.target = x0 self.target = x0
self.done = False
def update(self): def update(self):
elapsed_time = (rospy.Time.now() - self.tic).to_sec() elapsed_time = (rospy.Time.now() - self.tic).to_sec()
# Integrate image
self.tsdf.integrate(
self.last_depth_img,
self.intrinsic,
self.last_extrinsic,
)
# Visualize current integration
cloud = self.tsdf.get_scene_cloud()
vis.draw_points(np.asarray(cloud.points))
if elapsed_time > self.duration: if elapsed_time > self.duration:
return True # Plan grasps
map_cloud = self.tsdf.get_map_cloud()
points = np.asarray(map_cloud.points)
distances = np.asarray(map_cloud.colors)[:, 0]
tsdf_grid = grid_from_cloud(points, distances, self.tsdf.voxel_size)
vis.draw_tsdf(tsdf_grid.squeeze(), self.tsdf.voxel_size)
qual, rot, width = predict(tsdf_grid, self.net, self.device)
qual, rot, width = process(tsdf_grid, qual, rot, width)
grasps, scores = select(qual.copy(), rot, width)
grasps, scores = np.asarray(grasps), np.asarray(scores)
grasps = [from_voxel_coordinates(g, self.tsdf.voxel_size) for g in grasps]
# Select the highest grasp
heights = np.empty(len(grasps))
for i, grasp in enumerate(grasps):
heights[i] = grasp.pose.translation[2]
idx = np.argmax(heights)
grasp, score = grasps[idx], scores[idx]
vis.draw_grasps(grasps, scores, 0.05)
# Ensure that the camera is pointing forward.
rot = grasp.pose.rotation
axis = rot.as_matrix()[:, 0]
if axis[0] < 0:
grasp.pose.rotation = rot * Rotation.from_euler("z", np.pi)
# Add offset between grasp frame and panda_hand frame
T_task_grasp = grasp.pose * Transform(
Rotation.identity(), np.r_[0.0, 0.0, -0.06]
)
self.best_grasp = self.H_B_T * T_task_grasp
self.done = True
return
t = self.m(elapsed_time) t = self.m(elapsed_time)
self.target.translation = ( self.target.translation = (
self.origin + np.r_[self.radius * np.cos(t), self.radius * np.sin(t), 0.0] self.origin + np.r_[self.radius * np.cos(t), self.radius * np.sin(t), 0.0]
) )
self.target_pose_pub.publish(to_pose_msg(self.target)) self.target_pose_pub.publish(to_pose_msg(self.target))
return False

4
run.py
View File

@ -47,10 +47,12 @@ class GraspController:
target.translation[2] += 0.1 target.translation[2] += 0.1
self.target_pose_pub.publish(to_pose_msg(target)) self.target_pose_pub.publish(to_pose_msg(target))
rospy.sleep(2.0) rospy.sleep(2.0)
self.gripper.move(0.08)
rospy.sleep(1.0)
def main(args): def main(args):
rospy.init_node("panda_grasp") rospy.init_node("active_grasp")
policy = get_policy(args.policy) policy = get_policy(args.policy)
gc = GraspController(policy, args.rate) gc = GraspController(policy, args.rate)
gc.run() gc.run()