Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Receive & Handover skills #139

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion common/vision/lasr_vision_yolov8/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ find_package(catkin REQUIRED catkin_virtualenv)
catkin_python_setup()
catkin_generate_virtualenv(
INPUT_REQUIREMENTS requirements.in
PYTHON_INTERPRETER python3.8
PYTHON_INTERPRETER python3.10
)

################################################
Expand Down
2 changes: 1 addition & 1 deletion common/vision/lasr_vision_yolov8/package.xml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
<buildtool_depend>catkin</buildtool_depend>
<build_depend>catkin_virtualenv</build_depend>
<depend>lasr_vision_msgs</depend>
<exec_depend>markers</exec_depend>


<!-- The export tag contains other, unspecified, tags -->
<export>
Expand Down
90 changes: 46 additions & 44 deletions common/vision/lasr_vision_yolov8/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,53 +1,55 @@
certifi==2024.2.2 # via requests
charset-normalizer==3.3.2 # via requests
contourpy==1.1.1 # via matplotlib
cycler==0.12.1 # via matplotlib
certifi==2023.7.22 # via requests
charset-normalizer==3.2.0 # via requests
cmake==3.27.4.1 # via triton
contourpy==1.1.0 # via matplotlib
cycler==0.11.0 # via matplotlib
dill==0.3.7 # via -r requirements.in
filelock==3.13.1 # via torch, triton
fonttools==4.49.0 # via matplotlib
fsspec==2024.2.0 # via torch
idna==3.6 # via requests
importlib-resources==6.1.2 # via matplotlib
jinja2==3.1.3 # via torch
filelock==3.12.3 # via torch, triton
fonttools==4.42.1 # via matplotlib
idna==3.4 # via requests
jinja2==3.1.2 # via torch
kiwisolver==1.4.5 # via matplotlib
markupsafe==2.1.5 # via jinja2
matplotlib==3.7.5 # via seaborn, ultralytics
lit==16.0.6 # via triton
markupsafe==2.1.3 # via jinja2
matplotlib==3.7.3 # via seaborn, ultralytics
mpmath==1.3.0 # via sympy
networkx==3.1 # via torch
numpy==1.24.4 # via contourpy, matplotlib, opencv-python, pandas, scipy, seaborn, torchvision, ultralytics
nvidia-cublas-cu12==12.1.3.1 # via nvidia-cudnn-cu12, nvidia-cusolver-cu12, torch
nvidia-cuda-cupti-cu12==12.1.105 # via torch
nvidia-cuda-nvrtc-cu12==12.1.105 # via torch
nvidia-cuda-runtime-cu12==12.1.105 # via torch
nvidia-cudnn-cu12==8.9.2.26 # via torch
nvidia-cufft-cu12==11.0.2.54 # via torch
nvidia-curand-cu12==10.3.2.106 # via torch
nvidia-cusolver-cu12==11.4.5.107 # via torch
nvidia-cusparse-cu12==12.1.0.106 # via nvidia-cusolver-cu12, torch
nvidia-nccl-cu12==2.19.3 # via torch
nvidia-nvjitlink-cu12==12.4.99 # via nvidia-cusolver-cu12, nvidia-cusparse-cu12
nvidia-nvtx-cu12==12.1.105 # via torch
opencv-python==4.9.0.80 # via ultralytics
packaging==23.2 # via matplotlib
pandas==2.0.3 # via seaborn, ultralytics
pillow==10.2.0 # via matplotlib, torchvision, ultralytics
psutil==5.9.8 # via ultralytics
numpy==1.25.2 # via contourpy, matplotlib, opencv-python, pandas, scipy, seaborn, torchvision, ultralytics
nvidia-cublas-cu11==11.10.3.66 # via nvidia-cudnn-cu11, nvidia-cusolver-cu11, torch
nvidia-cuda-cupti-cu11==11.7.101 # via torch
nvidia-cuda-nvrtc-cu11==11.7.99 # via torch
nvidia-cuda-runtime-cu11==11.7.99 # via torch
nvidia-cudnn-cu11==8.5.0.96 # via torch
nvidia-cufft-cu11==10.9.0.58 # via torch
nvidia-curand-cu11==10.2.10.91 # via torch
nvidia-cusolver-cu11==11.4.0.1 # via torch
nvidia-cusparse-cu11==11.7.4.91 # via torch
nvidia-nccl-cu11==2.14.3 # via torch
nvidia-nvtx-cu11==11.7.91 # via torch
opencv-python==4.8.0.76 # via ultralytics
packaging==23.1 # via matplotlib
pandas==2.1.0 # via seaborn, ultralytics
pillow==10.0.0 # via matplotlib, torchvision, ultralytics
psutil==5.9.5 # via ultralytics
py-cpuinfo==9.0.0 # via ultralytics
pyparsing==3.1.2 # via matplotlib
python-dateutil==2.9.0.post0 # via matplotlib, pandas
pytz==2024.1 # via pandas
pyparsing==3.1.1 # via matplotlib
python-dateutil==2.8.2 # via matplotlib, pandas
pytz==2023.3.post1 # via pandas
pyyaml==6.0.1 # via ultralytics
requests==2.31.0 # via ultralytics
scipy==1.10.1 # via ultralytics
seaborn==0.13.2 # via ultralytics
requests==2.31.0 # via torchvision, ultralytics
scipy==1.11.2 # via ultralytics
seaborn==0.12.2 # via ultralytics
six==1.16.0 # via python-dateutil
sympy==1.12 # via torch
torch==2.2.1 # via torchvision, ultralytics
torchvision==0.17.1 # via ultralytics
tqdm==4.66.2 # via ultralytics
triton==2.2.0 # via torch
typing-extensions==4.10.0 # via torch
tzdata==2024.1 # via pandas
torch==2.0.1 # via torchvision, triton, ultralytics
torchvision==0.15.2 # via ultralytics
tqdm==4.66.1 # via ultralytics
triton==2.0.0 # via torch
typing-extensions==4.7.1 # via filelock, torch
tzdata==2023.3 # via pandas
ultralytics==8.0.168 # via -r requirements.in
urllib3==2.2.1 # via requests
zipp==3.17.0 # via importlib-resources
urllib3==2.0.4 # via requests
wheel==0.41.2 # via nvidia-cublas-cu11, nvidia-cuda-cupti-cu11, nvidia-cuda-runtime-cu11, nvidia-curand-cu11, nvidia-cusparse-cu11, nvidia-nvtx-cu11

# The following packages are considered to be unsafe in a requirements file:
# setuptools
59 changes: 43 additions & 16 deletions common/vision/lasr_vision_yolov8/src/lasr_vision_yolov8/yolo.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,47 @@
import cv2_pcl
import numpy as np

from PIL import Image
from ultralytics import YOLO

from sensor_msgs.msg import Image as SensorImage
from lasr_vision_msgs.msg import Detection, Detection3D
from lasr_vision_msgs.srv import (
YoloDetectionRequest,
YoloDetectionResponse,
YoloDetection3DRequest,
YoloDetection3DResponse,
)

import markers
import cv2
import ros_numpy as rnp

from geometry_msgs.msg import Point, PointStamped

import tf2_ros as tf
import tf2_geometry_msgs # noqa
import tf2_geometry_msgs

from visualization_msgs.msg import Marker


def create_point_marker(x, y, z, idx, g=1.0):
marker_msg = Marker()
marker_msg.header.frame_id = "map"
marker_msg.header.stamp = rospy.Time.now()
marker_msg.id = idx
marker_msg.type = Marker.SPHERE
marker_msg.action = Marker.ADD
marker_msg.pose.position.x = x
marker_msg.pose.position.y = y
marker_msg.pose.position.z = z
marker_msg.pose.orientation.w = 1.0
marker_msg.scale.x = 0.1
marker_msg.scale.y = 0.1
marker_msg.scale.z = 0.1
marker_msg.color.a = 1.0
marker_msg.color.r = 0.0
marker_msg.color.g = g
marker_msg.color.b = 0.0
return marker_msg


# model cache
Expand Down Expand Up @@ -96,7 +121,14 @@ def detect_3d(
"""
Run YOLO 3D inference on given detection request
"""

rospy.loginfo("Waiting for transformation to become available")
tf_buffer = tf.Buffer()
# Wait for the transformation to become available
while not tf_buffer.can_transform(
"map", request.pcl.header.frame_id, rospy.Time(0)
):
rospy.sleep(0.01)

# Extract rgb image from pointcloud
rospy.loginfo("Decoding")
Expand All @@ -122,31 +154,26 @@ def detect_3d(
detection.confidence = float(result.boxes.conf.cpu().numpy()[i])
detection.xywh = result.boxes.xywh[i].cpu().numpy().astype(int).tolist()

# copy segmented mask if available, and estimate 3D position
# copy segmented mask if available
if has_segment_masks:
detection.xyseg = result.masks.xy[i].flatten().astype(int).tolist()

# xyz_points = [pcl_xyz[x][y] for x, y in indices]
centroid = cv2_pcl.seg_to_centroid(request.pcl, np.array(detection.xyseg))

point = Point(*centroid)
point_stamped = PointStamped()
point_stamped.point = point
point_stamped.header.frame_id = request.pcl.header.frame_id
point_stamped.header.stamp = rospy.Time(0)

# TODO: handle tf errors properly
while not rospy.is_shutdown():
try:
detection.point = tf_buffer.transform(point_stamped, "map").point
break
except Exception:
continue
detection.point = tf_buffer.transform(
point_stamped, request.target_frame
).point

# publish to debug topic
if debug_point_publisher is not None:
markers.create_and_publish_marker(
debug_point_publisher, detection.point
marker = create_point_marker(
detection.point.x, detection.point.y, detection.point.z, i
)
debug_point_publisher.publish(marker)

detected_objects.append(detection)

Expand Down
5 changes: 0 additions & 5 deletions skills/config/demo.yaml

This file was deleted.

Empty file added skills/config/motions.yaml
Empty file.
3 changes: 3 additions & 0 deletions skills/src/lasr_skills/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,6 @@
from .look_to_point import LookToPoint
from .go_to_location import GoToLocation
from .go_to_semantic_location import GoToSemanticLocation
from .listen_wake_word import ListenWakeWord
from .handover_object import HandoverObject
from .receive_object import ReceiveObject
1 change: 1 addition & 0 deletions skills/src/lasr_skills/detect_3d.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#!/usr/bin/env python3
import rospy
import smach

Expand Down
22 changes: 12 additions & 10 deletions skills/src/lasr_skills/detect_3d_in_area.py
Original file line number Diff line number Diff line change
@@ -1,43 +1,45 @@
#!/usr/bin/env python3

import smach

from lasr_skills import Detect3D
from lasr_shapely import LasrShapely

from typing import List, Union

from shapely.geometry import Point
from shapely.geometry.polygon import Polygon
from typing import List, Union


class Detect3DInArea(smach.StateMachine):

class FilterDetections(smach.State):

def __init__(self, area_polygon: Polygon):
def __init__(self, area_polygon):
smach.State.__init__(
self,
outcomes=["succeeded", "failed"],
input_keys=["detections_3d"],
output_keys=["detections_3d"],
)
self.area_polygon = area_polygon
self.shapely = LasrShapely()

def execute(self, userdata):
satisfied_points = [
self.area_polygon.contains(Point(pose[0], pose[1]))
for (_, pose) in userdata.detections_3d
]
satisfied_points = self.shapely.are_points_in_polygon_2d(
userdata.area_polygon,
[[pose[0], pose[1]] for (_, pose) in userdata.detections_3d],
).inside
filtered_detections = [
userdata.detections_3d[i]
for i in range(0, len(userdata.detections_3d))
if satisfied_points[i]
]

userdata.detections_3d = filtered_detections

return "succeeded"

def __init__(
self,
area_polygon: Polygon,
area_polygon: List[List[float]],
depth_topic: str = "/xtion/depth_registered/points",
model: str = "yolov8n-seg.pt",
filter: Union[List[str], None] = None,
Expand Down
41 changes: 27 additions & 14 deletions skills/src/lasr_skills/go_to_location.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,31 @@
import smach_ros
#!/usr/bin/env python3

from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from geometry_msgs.msg import PoseStamped
from std_msgs.msg import Header
import rospy
import smach
from tiago_controllers.controllers import Controllers
from geometry_msgs.msg import Point, Quaternion, Pose


class GoToLocation(smach_ros.SimpleActionState):
class GoToLocation(smach.State):

def __init__(self):
super(GoToLocation, self).__init__(
"move_base",
MoveBaseAction,
goal_cb=lambda ud, _: MoveBaseGoal(
target_pose=PoseStamped(pose=ud.location, header=Header(frame_id="map"))
),
input_keys=["location"],
)
smach.State.__init__(self, outcomes=['succeeded', 'failed'], input_keys=['location'])
self.controllers = Controllers()

def execute(self, userdata):
try:
status = self.controllers.base_controller.sync_to_pose(userdata.location)
if status:
return 'succeeded'
return 'failed'
except rospy.ERROR as e:
rospy.logwarn(f"Unable to go to location. {userdata.location} -> ({str(e)})")
return 'failed'

if __name__ == '__main__':
rospy.init_node('go_to_location')
sm = smach.StateMachine(outcomes=['succeeded', 'failed'])
loc = rospy.get_param('/living_room/table/location')
sm.userdata.location = Pose(position=Point(**loc['position']), orientation=Quaternion(**loc['orientation']))
with sm:
smach.StateMachine.add('GoToLocation', GoToLocation(), transitions={'succeeded': 'succeeded', 'failed': 'failed'})
sm.execute()
48 changes: 25 additions & 23 deletions skills/src/lasr_skills/go_to_semantic_location.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,30 @@
import rospy
import smach_ros

from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion
from std_msgs.msg import Header
#!/usr/bin/env python3

import rospy
import smach
from tiago_controllers.controllers import Controllers
from geometry_msgs.msg import Point, Quaternion, Pose

class GoToSemanticLocation(smach_ros.SimpleActionState):
class GoToSemanticLocation(smach.State):

def __init__(self):
super(GoToSemanticLocation, self).__init__(
"move_base",
MoveBaseAction,
goal_cb=lambda ud, _: MoveBaseGoal(
target_pose=PoseStamped(
pose=self._to_pose(rospy.get_param(f"{ud.location}/location")),
header=Header(frame_id="map"),
)
),
input_keys=["location"],
)
smach.State.__init__(self, outcomes=['succeeded', 'failed'], input_keys=['location'])
self.controllers = Controllers()

def _to_pose(self, location):
return Pose(
position=Point(**location["position"]),
orientation=Quaternion(**location["orientation"]),
)
def execute(self, userdata):
loc = rospy.get_param(f"{userdata.location}/location")
try:
status = self.controllers.base_controller.sync_to_pose(Pose(position=Point(**loc['position']), orientation=Quaternion(**loc['orientation'])))
if status:
return 'succeeded'
return 'failed'
except rospy.ERROR as e:
rospy.logwarn(f"Unable to go to location. {loc} -> ({str(e)})")
return 'failed'
if __name__ == '__main__':
rospy.init_node('go_to_semantic_location')
sm = smach.StateMachine(outcomes=['succeeded', 'failed'])
sm.userdata.location = '/living_room/table'
with sm:
smach.StateMachine.add('GoToSemanticLocation', GoToSemanticLocation(), transitions={'succeeded': 'succeeded', 'failed': 'failed'})
sm.execute()
Loading
Loading