Skip to content

Commit

Permalink
Cml day 3 (#262)
Browse files Browse the repository at this point in the history
Co-authored-by: Jared Swift <[email protected]>
  • Loading branch information
fireblonde and jws-1 authored Jul 23, 2024
1 parent b55959e commit 63fd810
Show file tree
Hide file tree
Showing 10 changed files with 159 additions and 75 deletions.
3 changes: 3 additions & 0 deletions common/navigation/lasr_person_following/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ find_package(catkin REQUIRED COMPONENTS
genmsg
actionlib_msgs
actionlib
geometry_msgs
)

## System dependencies are found with CMake's conventions
Expand Down Expand Up @@ -73,6 +74,7 @@ add_action_files(
generate_messages(
DEPENDENCIES
actionlib_msgs
geometry_msgs
)

################################################
Expand Down Expand Up @@ -163,6 +165,7 @@ include_directories(
# in contrast to setup.py, you can choose the destination
catkin_install_python(PROGRAMS
nodes/person_following.py
nodes/stop_listener.py
DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
---
#result definition
geometry_msgs/Pose[] waypoints
---
#feedback
12 changes: 12 additions & 0 deletions common/navigation/lasr_person_following/nodes/person_following.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@

import warnings

from std_msgs.msg import Empty


class PersonFollowingServer:
_server: actionlib.SimpleActionServer
Expand All @@ -32,6 +34,10 @@ def __init__(self) -> None:
"/move_base/set_parameters", Reconfigure
)

self._dynamic_local_costmap = rospy.ServiceProxy(
"/move_base/local_costmap/inflation_layer/set_parameters", Reconfigure
)

self._update_params()
rospy.sleep(1)
print("Updated params")
Expand Down Expand Up @@ -78,6 +84,12 @@ def _update_params(self):

self._dynamic_recovery(config)

config = Config()
config.bools.append(BoolParameter(name="enabled", value=1))
config.doubles.append(DoubleParameter(name="inflation_radius", value=0.2))

self._dynamic_local_costmap(config)


if __name__ == "__main__":
rospy.init_node("person_following_server")
Expand Down
26 changes: 26 additions & 0 deletions common/navigation/lasr_person_following/nodes/stop_listener.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
#!/usr/bin/env python3
import rospy

from lasr_speech_recognition_msgs.msg import ( # type: ignore
TranscribeSpeechAction,
TranscribeSpeechGoal,
)

from std_msgs.msg import Empty

import actionlib

if __name__ == "__main__":
rospy.init_node("stop_listener")
finished_pub = rospy.Publisher("/stop_listener/finished", Empty, queue_size=1)
transcribe_speech_client = actionlib.SimpleActionClient(
"transcribe_speech", TranscribeSpeechAction
)
transcribe_speech_client.wait_for_server()

while not rospy.is_shutdown():
transcribe_speech_client.send_goal_and_wait(TranscribeSpeechGoal())
result = transcribe_speech_client.get_result()
if "stop" in result.sequence.lower():
finished_pub.publish(Empty())
rospy.spin()
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
Point,
)

from typing import Union, List
from typing import Union, List, Tuple

from move_base_msgs.msg import MoveBaseAction, MoveBaseActionGoal, MoveBaseGoal
from actionlib_msgs.msg import GoalStatus
Expand Down Expand Up @@ -40,6 +40,15 @@

from pal_interaction_msgs.msg import TtsGoal, TtsAction

from lasr_person_following.msg import (
FollowAction,
FollowGoal,
FollowResult,
FollowFeedback,
)

from std_msgs.msg import Empty

MAX_VISION_DIST: float = 5.0


Expand All @@ -63,6 +72,7 @@ class PersonFollower:
_tts_client_available: bool
_transcribe_speech_client: actionlib.SimpleActionClient
_transcribe_speech_client_available: bool
_stop_listener_sub: rospy.Subscriber

# Services
_make_plan: rospy.ServiceProxy
Expand All @@ -74,6 +84,9 @@ class PersonFollower:
# Publishers
_person_trajectory_pub: rospy.Publisher

# Waypoints
_waypoints: List[PoseStamped]

def __init__(
self,
start_following_radius: float = 2.0,
Expand Down Expand Up @@ -139,6 +152,17 @@ def __init__(
if not self._play_motion.wait_for_server(rospy.Duration.from_sec(10.0)):
rospy.logwarn("Play motion client not available")

self._stop_listener_sub = rospy.Subscriber(
"/stop_listener/finished", Empty, self._stop_listener_cb
)

self._should_stop = False

self._waypoints = []

def _stop_listener_cb(self, msg: Empty) -> None:
self._should_stop = True

def _tf_pose(self, pose: PoseStamped, target_frame: str):
trans = self._buffer.lookup_transform(
target_frame, pose.header.frame_id, rospy.Time(0), rospy.Duration(1.0)
Expand Down Expand Up @@ -258,23 +282,19 @@ def _recover_vision(self, robot_pose: PoseStamped, prev_pose: PoseStamped) -> bo
):
continue
else:
goal_pose = self._get_pose_on_path(
self._tf_pose(robot_pose, "map"),
self._tf_pose(
PoseStamped(
pose=Pose(
position=Point(
x=response.wave_position.point.x,
y=response.wave_position.point.y,
z=response.wave_position.point.z,
),
orientation=Quaternion(0, 0, 0, 1),
goal_pose = self._tf_pose(
PoseStamped(
pose=Pose(
position=Point(
x=response.wave_position.point.x,
y=response.wave_position.point.y,
z=response.wave_position.point.z,
),
header=prev_pose.header,
orientation=Quaternion(0, 0, 0, 1),
),
"map",
header=prev_pose.header,
),
self._stopping_distance,
"map",
)
if goal_pose is None:
rospy.logwarn("Could not find a path to the person")
Expand Down Expand Up @@ -322,15 +342,15 @@ def _cancel_goal(self) -> None:

def _check_finished(self) -> bool:
if self._tts_client_available:
self._tts("Have we arrived?", wait=True)
self._tts("Have we arrived? Please say Yes We Have Arrived", wait=True)

if self._transcribe_speech_client_available:
self._transcribe_speech_client.send_goal_and_wait(
TranscribeSpeechGoal()
)
transcription = self._transcribe_speech_client.get_result().sequence

return "yes" in transcription.lower()
return "yes" in transcription.lower() or "arrived" in transcription.lower() or "arrive" in transcription.lower()
return True

def _get_pose_on_path(
Expand Down Expand Up @@ -361,11 +381,14 @@ def _get_pose_on_path(

return chosen_pose

def _move_base(self, pose: PoseStamped) -> MoveBaseGoal:
def _move_base(self, pose: PoseStamped, wait=False) -> MoveBaseGoal:
goal: MoveBaseGoal = MoveBaseGoal()
goal.target_pose = pose
self._move_base_client.send_goal(goal)

if wait:
self._move_base_client.wait_for_result()

return goal

def _tts(self, text: str, wait: bool) -> None:
Expand All @@ -378,17 +401,21 @@ def _tts(self, text: str, wait: bool) -> None:
else:
self._tts_client.send_goal(tts_goal)

def follow(self) -> None:
def follow(self) -> FollowResult:

result = FollowResult()

person_trajectory: PoseArray = PoseArray()
person_trajectory.header.frame_id = "odom"
prev_track: Union[None, Person] = None
prev_goal: Union[None, PoseStamped] = None
last_goal_time: Union[None, rospy.Time] = None
going_to_person: bool = False
track_vels: List[float] = []
track_vels: List[Tuple[float, float]] = []
asking_time: rospy.Time = rospy.Time.now()

self._tts("I will follow you now. Please walk slowly!", wait=False)

while not rospy.is_shutdown():

tracks: PersonArray = rospy.wait_for_message("/people_tracked", PersonArray)
Expand All @@ -408,28 +435,7 @@ def follow(self) -> None:
rospy.loginfo("Lost track of person, recovering...")
person_trajectory = PoseArray()
ask_back: bool = False

if prev_track is not None:
robot_pose: PoseStamped = self._robot_pose_in_odom()
if robot_pose:
dist: float = self._euclidian_distance(
robot_pose.pose, prev_track.pose
)
rospy.loginfo(f"Distance to last known position: {dist}")
if dist >= MAX_VISION_DIST:
ask_back = True
else:
ask_back = True
else:
ask_back = True

if not ask_back:
self._recover_track(
say=not self._recover_vision(robot_pose, prev_goal)
)
else:
self._recover_track(say=True)

self._recover_track(say=True)
prev_track = None
continue

Expand Down Expand Up @@ -475,6 +481,8 @@ def follow(self) -> None:
"map",
)
self._move_base(goal_pose)

self._waypoints.append(goal_pose)
prev_goal = goal_pose
prev_track = track
last_goal_time = rospy.Time.now()
Expand Down Expand Up @@ -503,9 +511,21 @@ def follow(self) -> None:
# clear velocity buffer
track_vels = []

# clear waypoints
self._waypoints = []

if self._check_finished():
rospy.loginfo("Finished following person")
break
elif self._move_base_client.get_state() in [GoalStatus.SUCCEEDED]:
goal_pose = self._tf_pose(
PoseStamped(pose=track.pose, header=tracks.header),
"map",
)
self._move_base(goal_pose)
prev_goal = goal_pose
prev_track = track
rospy.loginfo("")
rospy.loginfo(np.mean([np.linalg.norm(vel) for vel in track_vels]))
rospy.loginfo("")
return result
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ def process_single_detection_request(
depth_image_topic: str = "/xtion/depth_registered/points",
yolo_2d_service_name: str = "/yolov8/detect",
yolo_3d_service_name: str = "/yolov8/detect3d",
robot_pose_topic: str = "/amcl_pose",
robot_pose_topic: str = "/robot_pose",
debug_topic: str = "/lasr_vision/cropped_detection/debug",
) -> CDResponse:
"""Dispatches a detection request to the appropriate bounding box/mask 2D or 3D cropped
Expand All @@ -311,7 +311,7 @@ def process_single_detection_request(
depth_image_topic (str, optional): The topic to getn an RGBD image from. Defaults to "/xtion/depth_registered/points".
yolo_2d_service_name (str, optional): Name of the 2D Yolo detection service. Defaults to "/yolov8/detect".
yolo_3d_service_name (str, optional): Name of the 3D Yolo detection service. Defaults to "/yolov8/detect3d".
robot_pose_topic (str, optional): Service to get the robot's current pose. Defaults to "/amcl_pose".
robot_pose_topic (str, optional): Service to get the robot's current pose. Defaults to "/robot_pose".
debug_topic (str, optional): Topic to publish results to for debugging. Defaults to "/lasr_vision/cropped_detection/debug".
Returns:
Expand Down
5 changes: 5 additions & 0 deletions skills/config/motions.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@ play_motion:
points:
- positions: [1.61, -0.93, -3.14, 1.83, -1.58, -0.53, 0.00]
time_from_start: 0.0
cml_arm_away:
joints: [ arm_1_joint, arm_2_joint, arm_3_joint, arm_4_joint, arm_5_joint, arm_6_joint, arm_7_joint ]
points:
- positions: [ 2.65, -1.36, -1.23, 1.86, -1.36, 1.39, -0.01 ]
time_from_start: 0.0
open_gripper:
joints: [gripper_left_finger_joint, gripper_right_finger_joint]
points:
Expand Down
6 changes: 3 additions & 3 deletions skills/src/lasr_skills/receive_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True):
smach.StateMachine.add(
"SAY_PLACE",
Say(
text=f"Please place the {object_name} in my end-effector. I will wait for a few seconds.",
text=f"Please place the {object_name} in my hand. I will wait for a few seconds.",
),
transitions={
"succeeded": "WAIT_5",
Expand All @@ -159,7 +159,7 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True):
smach.StateMachine.add(
"SAY_PLACE",
Say(
format_str="Please place the {} in my end-effector. I will wait for a few seconds.",
format_str="Please place the {} in my hand. I will wait for a few seconds.",
),
transitions={
"succeeded": "WAIT_5",
Expand Down Expand Up @@ -189,7 +189,7 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True):
)
smach.StateMachine.add(
"FOLD_ARM",
PlayMotion(motion_name="home"),
PlayMotion(motion_name="cml_arm_away"),
transitions={
"succeeded": "succeeded",
"aborted": "failed",
Expand Down
6 changes: 5 additions & 1 deletion tasks/carry_my_luggage/launch/setup.launch
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,13 @@
<rosparam command="load" file="$(find lasr_skills)/config/motions.yaml"/>

<!-- PERCEPTION -->
<include file="$(find lasr_vision_yolov8)/launch/service.launch" />
<!-- <include file="$(find lasr_vision_yolov8)/launch/service.launch" />-->
<include file="$(find lasr_vision_cropped_detection)/launch/cropped_detection.launch"/>
<include file="$(find lasr_vision_bodypix)/launch/bodypix.launch">
<param name="preload" type="yaml" value='resnet50' />
</include>

<!-- NAVIGATION -->
<include file="$(find lasr_person_following)/launch/person_following.launch" />

</launch>
Loading

0 comments on commit 63fd810

Please sign in to comment.