Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/gpsr-state-machine-factory' in…
Browse files Browse the repository at this point in the history
…to my-beautyful-implementation-with-fucking-celeb-dataset
  • Loading branch information
Benteng Ma committed Jul 15, 2024
2 parents 917e14f + 0773842 commit dc9ac55
Show file tree
Hide file tree
Showing 32 changed files with 1,819 additions and 475 deletions.
Empty file added 100644
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def __init__(self) -> None:

def _execute_cb(self, _: FollowGoal) -> None:
print("Executing follow_person action")
while not self._follower.begin_tracking(ask=False):
while not self._follower.begin_tracking():
rospy.logwarn("No people found, retrying...")
rospy.sleep(rospy.Duration(1.0))
warnings.warn(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from play_motion_msgs.msg import PlayMotionAction
from play_motion_msgs.msg import PlayMotionGoal

from lasr_vision_msgs.srv import DetectWaveRequest
from lasr_vision_msgs.srv import DetectWaveRequest, DetectWaveResponse

from math import atan2
import numpy as np
Expand Down Expand Up @@ -79,17 +79,17 @@ def __init__(
self,
start_following_radius: float = 2.0,
start_following_angle: float = 45.0,
n_secs_static_finished: float = 8.0,
n_secs_static_plan_close: float = 10.0,
new_goal_threshold: float = 2.0,
stopping_distance: float = 1.0,
static_speed: float = 0.0015,
max_speed: float = 0.5,
):
self._start_following_radius = start_following_radius
self._start_following_angle = start_following_angle
self._new_goal_threshold = new_goal_threshold
self._stopping_distance = stopping_distance
self._static_speed = static_speed
self._max_speed = max_speed

self._track_id = None

Expand Down Expand Up @@ -147,7 +147,9 @@ def _tf_pose(self, pose: PoseStamped, target_frame: str):
def _robot_pose_in_odom(self) -> Union[PoseStamped, None]:
try:
current_pose: PoseWithCovarianceStamped = rospy.wait_for_message(
"/robot_pose", PoseWithCovarianceStamped
"/robot_pose",
PoseWithCovarianceStamped,
timeout=rospy.Duration.from_sec(2.0),
)
except rospy.ROSException:
return None
Expand Down Expand Up @@ -208,10 +210,10 @@ def begin_tracking(self) -> bool:

def _recover_track(self, say) -> bool:
if not say:
self._tts("I SAW A person waving", wait=True)
self._tts("I see you are waving", wait=True)

if self._tts_client_available and say:
self._tts("I lost track of you, please come back", wait=True)
self._tts("Please could you come back...", wait=True)

while not self.begin_tracking() and not rospy.is_shutdown():
rospy.loginfo("Recovering track...")
Expand All @@ -221,57 +223,62 @@ def _recover_track(self, say) -> bool:

return True

# recover with vision, look up and check if person is waving
def _recover_vision(self, prev_pose: PoseStamped) -> bool:
# look up with playmotion and detect wave service
# if detected, begin tracking

# use play motion to look up
self._cancel_goal()

goal = PlayMotionGoal()
goal.motion_name = "look_centre"
self._play_motion.send_goal_and_wait(goal)

self._tts("Can you wave at me so that i can try to find you easily", wait=True)

# detect wave
def _detect_waving_person(self) -> DetectWaveResponse:
try:
pcl = rospy.wait_for_message("/xtion/depth_registered/points", PointCloud2)
req = DetectWaveRequest()
req.pcl_msg = pcl
req.dataset = "resnet50"
req.confidence = 0.1
response = self._detect_wave(req)
if response.wave_detected:
rospy.loginfo("Wave detected, beginning tracking")
if np.isnan(response.wave_position.point.x) or np.isnan(
response.wave_position.point.y
):
return False
goal_pose = self._tf_pose(
PoseStamped(
pose=Pose(
position=Point(
x=response.wave_position.point.x,
y=response.wave_position.point.y,
z=response.wave_position.point.z,
),
orientation=Quaternion(0, 0, 0, 1),
),
header=pcl.header,
),
"map",
)
rospy.loginfo(goal_pose.pose.position)
goal_pose.pose.orientation = self._compute_face_quat(
prev_pose.pose, goal_pose.pose
)
self._move_base(goal_pose)
return True
return response
except rospy.ServiceException as e:
rospy.loginfo(f"Error detecting wave: {e}")
return False
return DetectWaveResponse()

# recover with vision, look around and check if person is waving
def _recover_vision(self, prev_pose: PoseStamped) -> bool:

# cancel current goal
self._cancel_goal()

self._tts("Can you wave at me so that i can try to find you easily", wait=True)

for motion in self._vision_recovery_motions:
rospy.loginfo(f"Performing motion: {motion}")
goal = PlayMotionGoal()
goal.motion_name = motion
self._play_motion.send_goal_and_wait(goal)
for _ in range(self._vision_recovery_attempts):
response = self._detect_waving_person()
if response.wave_detected:
if np.isnan(response.wave_position.point.x) or np.isnan(
response.wave_position.point.y
):
continue
else:
goal_pose = self._tf_pose(
PoseStamped(
pose=Pose(
position=Point(
x=response.wave_position.point.x,
y=response.wave_position.point.y,
z=response.wave_position.point.z,
),
orientation=Quaternion(0, 0, 0, 1),
),
header=prev_pose.header,
),
"map",
)
goal_pose.pose.orientation = self._compute_face_quat(
prev_pose.pose, goal_pose.pose
)
rospy.loginfo(goal_pose.pose.position)
self._move_base(goal_pose)
return True

return False

def _euclidian_distance(self, p1: Pose, p2: Pose) -> float:
return np.linalg.norm(
Expand Down Expand Up @@ -375,7 +382,7 @@ def follow(self) -> None:
prev_goal: Union[None, PoseStamped] = None
last_goal_time: Union[None, rospy.Time] = None
going_to_person: bool = False
track_vels: [float] = []
track_vels: List[float] = []

while not rospy.is_shutdown():

Expand All @@ -395,8 +402,27 @@ def follow(self) -> None:
if track is None:
rospy.loginfo("Lost track of person, recovering...")
person_trajectory = PoseArray()
recovery = self._recover_vision(prev_goal)
self._recover_track(say=not recovery)
ask_back: bool = False

if prev_track is not None:
robot_pose: PoseStamped = self._robot_pose_in_odom()
if robot_pose:
dist: float = self._euclidian_distance(
robot_pose.pose, prev_track.pose
)
rospy.loginfo(f"Distance to last known position: {dist}")
if dist >= MAX_VISION_DIST:
ask_back = True
else:
ask_back = True
else:
ask_back = True

if not ask_back:
self._recover_track(say=not self._recover_vision(prev_goal))
else:
self._recover_track(say=True)

prev_track = None
continue

Expand All @@ -418,6 +444,9 @@ def follow(self) -> None:
last_goal_time = rospy.Time.now()
prev_track = track

if np.mean([np.linalg.norm(vel) for vel in track_vels]) > self._max_speed:
self._tts("Please walk slower, I am struggling to keep up", wait=False)

# Distance to the previous pose
dist_to_prev = (
self._euclidian_distance(track.pose, prev_track.pose)
Expand Down Expand Up @@ -448,10 +477,51 @@ def follow(self) -> None:
elif (
np.mean([np.linalg.norm(vel) for vel in track_vels])
< self._static_speed
):

rospy.logwarn("Person has been static for too long, stopping")
) and len(track_vels) == 10:
rospy.logwarn(
"Person has been static for too long, going to them and stopping"
)
# cancel current goal
self._cancel_goal()

# clear velocity buffer
track_vels = []

robot_pose: PoseStamped = self._robot_pose_in_odom()
dist: float = self._euclidian_distance(track.pose, robot_pose.pose)
rospy.loginfo(f"Distance to person: {dist}")

# If the person is too far away, go to them
if dist > self._stopping_distance:
goal_pose = self._get_pose_on_path(
self._tf_pose(robot_pose, "map"),
self._tf_pose(
PoseStamped(
pose=track.pose,
header=tracks.header,
),
"map",
),
self._stopping_distance,
)
# If we can't find a path, face them
if goal_pose is None:
rospy.logwarn("Could not find a path to the person")
goal_pose = robot_pose
goal_pose.pose.orientation = self._compute_face_quat(
robot_pose.pose, track.pose
)
goal_pose = self._tf_pose(goal_pose, "map")
# Otherwise, face them
else:
goal_pose = robot_pose
goal_pose.pose.orientation = self._compute_face_quat(
robot_pose.pose, track.pose
)
goal_pose = self._tf_pose(goal_pose, "map")

self._move_base(goal_pose)

if self._check_finished():
rospy.loginfo("Finished following person")
break
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ def load_model(model_name: str = "all-MiniLM-L6-v2") -> SentenceTransformer:
Returns:
sentence_transformers.SentenceTransformer: the loaded model
"""
print(f"Loading model...")
return SentenceTransformer(model_name, device=DEVICE)


Expand Down
7 changes: 4 additions & 3 deletions common/vision/lasr_vision_clip/nodes/vqa
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
import rospy
from typing import List
from lasr_vision_clip.clip_utils import load_model, query_image_stream
from lasr_vision_clip.clip_utils import load_model, query_image
from lasr_vision_msgs.srv import VqaRequest, VqaResponse, Vqa
from sensor_msgs.msg import Image

Expand Down Expand Up @@ -29,8 +29,9 @@ class VqaService:
Returns:
VqaResult
"""
answer, cos_score, annotated_img = query_image_stream(
request.image, self._model, request.possible_answers, annotate=True
possible_answers = request.possible_answers
answer, cos_score, annotated_img = query_image(
self._model, request.image_raw, possible_answers, annotate=True
)

self._debug_pub.publish(annotated_img)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ def encode_img(model, img_msg: Image) -> np.ndarray:
def query_image(
img_msg: Image,
model: SentenceTransformer,
img_msg: Image,
answers: list[str],
annotate: bool = False,
) -> tuple[str, torch.Tensor, Image]:
Expand Down
Loading

0 comments on commit dc9ac55

Please sign in to comment.