diff --git a/common/navigation/lasr_person_following/CMakeLists.txt b/common/navigation/lasr_person_following/CMakeLists.txt
index 660acec3d..5feb4522b 100644
--- a/common/navigation/lasr_person_following/CMakeLists.txt
+++ b/common/navigation/lasr_person_following/CMakeLists.txt
@@ -14,6 +14,7 @@ find_package(catkin REQUIRED COMPONENTS
genmsg
actionlib_msgs
actionlib
+ geometry_msgs
)
## System dependencies are found with CMake's conventions
@@ -73,6 +74,7 @@ add_action_files(
generate_messages(
DEPENDENCIES
actionlib_msgs
+ geometry_msgs
)
################################################
@@ -163,6 +165,7 @@ include_directories(
# in contrast to setup.py, you can choose the destination
catkin_install_python(PROGRAMS
nodes/person_following.py
+ nodes/stop_listener.py
DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
)
diff --git a/common/navigation/lasr_person_following/action/Follow.action b/common/navigation/lasr_person_following/action/Follow.action
index 97e350ec9..8c3685d5a 100644
--- a/common/navigation/lasr_person_following/action/Follow.action
+++ b/common/navigation/lasr_person_following/action/Follow.action
@@ -1,4 +1,5 @@
---
#result definition
+geometry_msgs/Pose[] waypoints
---
#feedback
diff --git a/common/navigation/lasr_person_following/nodes/person_following.py b/common/navigation/lasr_person_following/nodes/person_following.py
index 456f1eaa4..d495d1dea 100644
--- a/common/navigation/lasr_person_following/nodes/person_following.py
+++ b/common/navigation/lasr_person_following/nodes/person_following.py
@@ -16,6 +16,8 @@
import warnings
+from std_msgs.msg import Empty
+
class PersonFollowingServer:
_server: actionlib.SimpleActionServer
@@ -32,6 +34,10 @@ def __init__(self) -> None:
"/move_base/set_parameters", Reconfigure
)
+ self._dynamic_local_costmap = rospy.ServiceProxy(
+ "/move_base/local_costmap/inflation_layer/set_parameters", Reconfigure
+ )
+
self._update_params()
rospy.sleep(1)
print("Updated params")
@@ -78,6 +84,12 @@ def _update_params(self):
self._dynamic_recovery(config)
+ config = Config()
+ config.bools.append(BoolParameter(name="enabled", value=1))
+ config.doubles.append(DoubleParameter(name="inflation_radius", value=0.2))
+
+ self._dynamic_local_costmap(config)
+
if __name__ == "__main__":
rospy.init_node("person_following_server")
diff --git a/common/navigation/lasr_person_following/nodes/stop_listener.py b/common/navigation/lasr_person_following/nodes/stop_listener.py
new file mode 100644
index 000000000..a521b068f
--- /dev/null
+++ b/common/navigation/lasr_person_following/nodes/stop_listener.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+import rospy
+
+from lasr_speech_recognition_msgs.msg import ( # type: ignore
+ TranscribeSpeechAction,
+ TranscribeSpeechGoal,
+)
+
+from std_msgs.msg import Empty
+
+import actionlib
+
+if __name__ == "__main__":
+ rospy.init_node("stop_listener")
+ finished_pub = rospy.Publisher("/stop_listener/finished", Empty, queue_size=1)
+ transcribe_speech_client = actionlib.SimpleActionClient(
+ "transcribe_speech", TranscribeSpeechAction
+ )
+ transcribe_speech_client.wait_for_server()
+
+ while not rospy.is_shutdown():
+ transcribe_speech_client.send_goal_and_wait(TranscribeSpeechGoal())
+ result = transcribe_speech_client.get_result()
+ if "stop" in result.sequence.lower():
+ finished_pub.publish(Empty())
+ rospy.spin()
diff --git a/common/navigation/lasr_person_following/src/lasr_person_following/person_following.py b/common/navigation/lasr_person_following/src/lasr_person_following/person_following.py
index 58f4eab5d..06f3a2782 100644
--- a/common/navigation/lasr_person_following/src/lasr_person_following/person_following.py
+++ b/common/navigation/lasr_person_following/src/lasr_person_following/person_following.py
@@ -11,7 +11,7 @@
Point,
)
-from typing import Union, List
+from typing import Union, List, Tuple
from move_base_msgs.msg import MoveBaseAction, MoveBaseActionGoal, MoveBaseGoal
from actionlib_msgs.msg import GoalStatus
@@ -40,6 +40,15 @@
from pal_interaction_msgs.msg import TtsGoal, TtsAction
+from lasr_person_following.msg import (
+ FollowAction,
+ FollowGoal,
+ FollowResult,
+ FollowFeedback,
+)
+
+from std_msgs.msg import Empty
+
MAX_VISION_DIST: float = 5.0
@@ -63,6 +72,7 @@ class PersonFollower:
_tts_client_available: bool
_transcribe_speech_client: actionlib.SimpleActionClient
_transcribe_speech_client_available: bool
+ _stop_listener_sub: rospy.Subscriber
# Services
_make_plan: rospy.ServiceProxy
@@ -74,6 +84,9 @@ class PersonFollower:
# Publishers
_person_trajectory_pub: rospy.Publisher
+ # Waypoints
+ _waypoints: List[PoseStamped]
+
def __init__(
self,
start_following_radius: float = 2.0,
@@ -139,6 +152,17 @@ def __init__(
if not self._play_motion.wait_for_server(rospy.Duration.from_sec(10.0)):
rospy.logwarn("Play motion client not available")
+ self._stop_listener_sub = rospy.Subscriber(
+ "/stop_listener/finished", Empty, self._stop_listener_cb
+ )
+
+ self._should_stop = False
+
+ self._waypoints = []
+
+ def _stop_listener_cb(self, msg: Empty) -> None:
+ self._should_stop = True
+
def _tf_pose(self, pose: PoseStamped, target_frame: str):
trans = self._buffer.lookup_transform(
target_frame, pose.header.frame_id, rospy.Time(0), rospy.Duration(1.0)
@@ -258,23 +282,19 @@ def _recover_vision(self, robot_pose: PoseStamped, prev_pose: PoseStamped) -> bo
):
continue
else:
- goal_pose = self._get_pose_on_path(
- self._tf_pose(robot_pose, "map"),
- self._tf_pose(
- PoseStamped(
- pose=Pose(
- position=Point(
- x=response.wave_position.point.x,
- y=response.wave_position.point.y,
- z=response.wave_position.point.z,
- ),
- orientation=Quaternion(0, 0, 0, 1),
+ goal_pose = self._tf_pose(
+ PoseStamped(
+ pose=Pose(
+ position=Point(
+ x=response.wave_position.point.x,
+ y=response.wave_position.point.y,
+ z=response.wave_position.point.z,
),
- header=prev_pose.header,
+ orientation=Quaternion(0, 0, 0, 1),
),
- "map",
+ header=prev_pose.header,
),
- self._stopping_distance,
+ "map",
)
if goal_pose is None:
rospy.logwarn("Could not find a path to the person")
@@ -322,7 +342,7 @@ def _cancel_goal(self) -> None:
def _check_finished(self) -> bool:
if self._tts_client_available:
- self._tts("Have we arrived?", wait=True)
+ self._tts("Have we arrived? Please say Yes We Have Arrived", wait=True)
if self._transcribe_speech_client_available:
self._transcribe_speech_client.send_goal_and_wait(
@@ -330,7 +350,7 @@ def _check_finished(self) -> bool:
)
transcription = self._transcribe_speech_client.get_result().sequence
- return "yes" in transcription.lower()
+ return "yes" in transcription.lower() or "arrived" in transcription.lower() or "arrive" in transcription.lower()
return True
def _get_pose_on_path(
@@ -361,11 +381,14 @@ def _get_pose_on_path(
return chosen_pose
- def _move_base(self, pose: PoseStamped) -> MoveBaseGoal:
+ def _move_base(self, pose: PoseStamped, wait=False) -> MoveBaseGoal:
goal: MoveBaseGoal = MoveBaseGoal()
goal.target_pose = pose
self._move_base_client.send_goal(goal)
+ if wait:
+ self._move_base_client.wait_for_result()
+
return goal
def _tts(self, text: str, wait: bool) -> None:
@@ -378,7 +401,9 @@ def _tts(self, text: str, wait: bool) -> None:
else:
self._tts_client.send_goal(tts_goal)
- def follow(self) -> None:
+ def follow(self) -> FollowResult:
+
+ result = FollowResult()
person_trajectory: PoseArray = PoseArray()
person_trajectory.header.frame_id = "odom"
@@ -386,9 +411,11 @@ def follow(self) -> None:
prev_goal: Union[None, PoseStamped] = None
last_goal_time: Union[None, rospy.Time] = None
going_to_person: bool = False
- track_vels: List[float] = []
+ track_vels: List[Tuple[float, float]] = []
asking_time: rospy.Time = rospy.Time.now()
+ self._tts("I will follow you now. Please walk slowly!", wait=False)
+
while not rospy.is_shutdown():
tracks: PersonArray = rospy.wait_for_message("/people_tracked", PersonArray)
@@ -408,28 +435,7 @@ def follow(self) -> None:
rospy.loginfo("Lost track of person, recovering...")
person_trajectory = PoseArray()
ask_back: bool = False
-
- if prev_track is not None:
- robot_pose: PoseStamped = self._robot_pose_in_odom()
- if robot_pose:
- dist: float = self._euclidian_distance(
- robot_pose.pose, prev_track.pose
- )
- rospy.loginfo(f"Distance to last known position: {dist}")
- if dist >= MAX_VISION_DIST:
- ask_back = True
- else:
- ask_back = True
- else:
- ask_back = True
-
- if not ask_back:
- self._recover_track(
- say=not self._recover_vision(robot_pose, prev_goal)
- )
- else:
- self._recover_track(say=True)
-
+ self._recover_track(say=True)
prev_track = None
continue
@@ -475,6 +481,8 @@ def follow(self) -> None:
"map",
)
self._move_base(goal_pose)
+
+ self._waypoints.append(goal_pose)
prev_goal = goal_pose
prev_track = track
last_goal_time = rospy.Time.now()
@@ -503,9 +511,21 @@ def follow(self) -> None:
# clear velocity buffer
track_vels = []
+ # clear waypoints
+ self._waypoints = []
+
if self._check_finished():
rospy.loginfo("Finished following person")
break
+ elif self._move_base_client.get_state() in [GoalStatus.SUCCEEDED]:
+ goal_pose = self._tf_pose(
+ PoseStamped(pose=track.pose, header=tracks.header),
+ "map",
+ )
+ self._move_base(goal_pose)
+ prev_goal = goal_pose
+ prev_track = track
rospy.loginfo("")
rospy.loginfo(np.mean([np.linalg.norm(vel) for vel in track_vels]))
rospy.loginfo("")
+ return result
diff --git a/common/vision/lasr_vision_cropped_detection/src/lasr_vision_cropped_detection/cropped_detection.py b/common/vision/lasr_vision_cropped_detection/src/lasr_vision_cropped_detection/cropped_detection.py
index e9360cc57..da05f805f 100644
--- a/common/vision/lasr_vision_cropped_detection/src/lasr_vision_cropped_detection/cropped_detection.py
+++ b/common/vision/lasr_vision_cropped_detection/src/lasr_vision_cropped_detection/cropped_detection.py
@@ -299,7 +299,7 @@ def process_single_detection_request(
depth_image_topic: str = "/xtion/depth_registered/points",
yolo_2d_service_name: str = "/yolov8/detect",
yolo_3d_service_name: str = "/yolov8/detect3d",
- robot_pose_topic: str = "/amcl_pose",
+ robot_pose_topic: str = "/robot_pose",
debug_topic: str = "/lasr_vision/cropped_detection/debug",
) -> CDResponse:
"""Dispatches a detection request to the appropriate bounding box/mask 2D or 3D cropped
@@ -311,7 +311,7 @@ def process_single_detection_request(
depth_image_topic (str, optional): The topic to getn an RGBD image from. Defaults to "/xtion/depth_registered/points".
yolo_2d_service_name (str, optional): Name of the 2D Yolo detection service. Defaults to "/yolov8/detect".
yolo_3d_service_name (str, optional): Name of the 3D Yolo detection service. Defaults to "/yolov8/detect3d".
- robot_pose_topic (str, optional): Service to get the robot's current pose. Defaults to "/amcl_pose".
+ robot_pose_topic (str, optional): Service to get the robot's current pose. Defaults to "/robot_pose".
debug_topic (str, optional): Topic to publish results to for debugging. Defaults to "/lasr_vision/cropped_detection/debug".
Returns:
diff --git a/skills/config/motions.yaml b/skills/config/motions.yaml
index 9409d156b..5107e056d 100644
--- a/skills/config/motions.yaml
+++ b/skills/config/motions.yaml
@@ -10,6 +10,11 @@ play_motion:
points:
- positions: [1.61, -0.93, -3.14, 1.83, -1.58, -0.53, 0.00]
time_from_start: 0.0
+ cml_arm_away:
+ joints: [ arm_1_joint, arm_2_joint, arm_3_joint, arm_4_joint, arm_5_joint, arm_6_joint, arm_7_joint ]
+ points:
+ - positions: [ 2.65, -1.36, -1.23, 1.86, -1.36, 1.39, -0.01 ]
+ time_from_start: 0.0
open_gripper:
joints: [gripper_left_finger_joint, gripper_right_finger_joint]
points:
diff --git a/skills/src/lasr_skills/receive_object.py b/skills/src/lasr_skills/receive_object.py
index 6343d8dc2..c87fe3dae 100755
--- a/skills/src/lasr_skills/receive_object.py
+++ b/skills/src/lasr_skills/receive_object.py
@@ -147,7 +147,7 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True):
smach.StateMachine.add(
"SAY_PLACE",
Say(
- text=f"Please place the {object_name} in my end-effector. I will wait for a few seconds.",
+ text=f"Please place the {object_name} in my hand. I will wait for a few seconds.",
),
transitions={
"succeeded": "WAIT_5",
@@ -159,7 +159,7 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True):
smach.StateMachine.add(
"SAY_PLACE",
Say(
- format_str="Please place the {} in my end-effector. I will wait for a few seconds.",
+ format_str="Please place the {} in my hand. I will wait for a few seconds.",
),
transitions={
"succeeded": "WAIT_5",
@@ -189,7 +189,7 @@ def __init__(self, object_name: Union[str, None] = None, vertical: bool = True):
)
smach.StateMachine.add(
"FOLD_ARM",
- PlayMotion(motion_name="home"),
+ PlayMotion(motion_name="cml_arm_away"),
transitions={
"succeeded": "succeeded",
"aborted": "failed",
diff --git a/tasks/carry_my_luggage/launch/setup.launch b/tasks/carry_my_luggage/launch/setup.launch
index 0b8004029..45b355fc9 100644
--- a/tasks/carry_my_luggage/launch/setup.launch
+++ b/tasks/carry_my_luggage/launch/setup.launch
@@ -14,9 +14,13 @@
-
+
+
+
+
+
diff --git a/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py b/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py
index 0f8ae49b6..a04048e5b 100644
--- a/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py
+++ b/tasks/carry_my_luggage/src/carry_my_luggage/state_machine.py
@@ -7,6 +7,7 @@
DetectGesture,
ReceiveObject,
HandoverObject,
+ GoToLocation,
)
from lasr_skills.vision import GetCroppedImage
from lasr_skills import PlayMotion
@@ -17,8 +18,11 @@
)
from std_msgs.msg import Empty
+from pal_navigation_msgs.srv import Acknowledgment, AcknowledgmentRequest
from std_srvs.srv import Empty as EmptySrv
+from geometry_msgs.msg import Pose, Point, Quaternion
+
from pal_startup_msgs.srv import (
StartupStart,
StartupStop,
@@ -173,6 +177,7 @@ def wait_cb(ud, msg):
},
)
+ # TODO: ensure it doesnt get stuck here, maybe concurrenr
smach.StateMachine.add(
"CLEAR_COSTMAPS",
smach_ros.ServiceState(
@@ -180,41 +185,32 @@ def wait_cb(ud, msg):
EmptySrv,
),
transitions={
- "succeeded": "SAY_FOLLOW",
- "aborted": "SAY_FOLLOW",
- "preempted": "SAY_FOLLOW",
+ "succeeded": "GET_START_LOCATION",
+ "aborted": "GET_START_LOCATION",
+ "preempted": "GET_START_LOCATION",
},
)
+ def start_pose_cb(ud):
+ try:
+ ud.start_pose = rospy.wait_for_message("/robot_pose", PoseWithCovarianceStamped, timeout=rospy.Duration(5.0)).pose.pose
+ except rospy.ROSException:
+ rospy.logerr("Failed to get robot pose")
+ ud.start_pose = Pose(position=Point(0., 0., 0.0), orientation=Quaternion(0., 0., 0., 1.))
+ return "succeeded"
smach.StateMachine.add(
- "SAY_FOLLOW",
- Say(text="I will follow you now."),
- transitions={
- "succeeded": "SAY_STEP",
- # "succeeded": "INITIALISE_PERSON_TRACK",
- "aborted": "failed",
- "preempted": "failed",
- },
+ "GET_START_LOCATION",
+ smach.CBState(
+ start_pose_cb,
+ outcomes=["succeeded"],
+ output_keys=["start_pose"],
+ ),
+ transitions={"succeeded": "SAY_STEP"}
)
+
smach.StateMachine.add(
"SAY_STEP",
Say(text="First walk slowly towards me and then I will follow you."),
- transitions={
- "succeeded": "FOLLOW",
- "aborted": "failed",
- "preempted": "failed",
- },
- )
- smach.StateMachine.add(
- "INITIALISE_PERSON_TRACK",
- smach_ros.ServiceState(
- "/cml/initialise_person_with_vision",
- InitialisePersonWithVision,
- request_cb=lambda ud, req: InitialisePersonWithVisionRequest(
- point=ud.person_point,
- ),
- input_keys=["person_point"],
- ),
transitions={
"succeeded": "FOLLOW",
"aborted": "FOLLOW",
@@ -253,3 +249,20 @@ def wait_cb(ud, msg):
"failed": "failed",
},
)
+
+ smach.StateMachine.add(
+ "SAY_GOING_BACK",
+ Say(text="I am going back to the start position."),
+ transitions={
+ "succeeded": "GO_TO_START",
+ "preempted": "GO_TO_START",
+ "aborted": "GO_TO_START",
+ },
+ )
+
+ smach.StateMachine.add(
+ "GO_TO_START", # todo: instead, get the start position within the state machine, and return to it at the end
+ GoToLocation(),
+ remapping={"location" : "start_pose"},
+ transitions={"succeeded": "succeeded", "failed": "succeeded"},
+ )