Skip to content

Commit

Permalink
feat: FindPerson and FindNamedPerson.
Browse files Browse the repository at this point in the history
  • Loading branch information
jws-1 committed Apr 18, 2024
1 parent bab2d71 commit a94b357
Show file tree
Hide file tree
Showing 3 changed files with 123 additions and 144 deletions.
7 changes: 4 additions & 3 deletions skills/src/lasr_skills/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,19 @@
from .detect_3d import Detect3D
from .detect_3d_in_area import Detect3DInArea
from .wait_for_person import WaitForPerson
from .say import Say
from .wait_for_person_in_area import WaitForPersonInArea
from .describe_people import DescribePeople
from .look_to_point import LookToPoint
from .play_motion import PlayMotion
from .go_to_location import GoToLocation
from .go_to_person import GoToPerson
from .listen import Listen
from .listen_for import ListenFor
from .ask_and_listen import AskAndListen
from .find_person import FindPerson
from .find_named_person import FindNamedPerson
from .go_to_semantic_location import GoToSemanticLocation
from .say import Say
from .listen import Listen
from .listen_for import ListenFor
from .receive_object import ReceiveObject
from .handover_object import HandoverObject
from .ask_and_listen import AskAndListen
Expand Down
130 changes: 54 additions & 76 deletions skills/src/lasr_skills/find_named_person.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
#!/usr/bin/env python3

import smach
from smach_ros import SimpleActionState

from geometry_msgs.msg import Pose, PoseStamped, PoseWithCovarianceStamped
from std_msgs.msg import Header
from geometry_msgs.msg import Pose, PoseWithCovarianceStamped

from typing import List

from lasr_skills import Detect3D, LookToPoint, GoToPerson, GoToLocation, AskAndListen

from lasr_skills import Detect3D, GoToLocation, AskAndListen

import navigation_helpers

Expand All @@ -18,23 +15,48 @@

class FindNamedPerson(smach.StateMachine):

class GetPose(smach.State):
def __init__(self):
smach.State.__init__(
self,
outcomes=["succeeded", "failed"],
output_keys=["current_pose"],
)

def execute(self, userdata):
userdata.current_pose = rospy.wait_for_message(
"/amcl_pose", PoseWithCovarianceStamped
).pose.pose
return "succeeded"

class ComputePath(smach.State):
def __init__(self, waypoints: List[Pose]):
smach.State.__init__(
self,
outcomes=["succeeded", "failed"],
input_keys=["current_pose"],
output_keys=["waypoints"],
)
self._waypoints = waypoints

def execute(self, userdata):
userdata.waypoints = navigation_helpers.min_hamiltonian_path(
userdata.current_pose, self._waypoints
)
return "succeeded"

class HandleDetections(smach.State):
def __init__(self):
smach.State.__init__(
self,
outcomes=["succeeded", "failed"],
input_keys=["detections_3d"],
output_keys=["person_point", "person_max_point"],
output_keys=["person_point"],
)

def execute(self, userdata):
if len(userdata.detections_3d.detected_objects) == 0:
return "failed"

userdata.person_point = userdata.detections_3d.detected_objects[0].point
userdata.person_max_point = userdata.detections_3d.detected_objects[
0
].max_point
return "succeeded"

class HandleResponse(smach.State):
Expand All @@ -46,26 +68,36 @@ def __init__(self):
)

def execute(self, userdata):
# TODO: make this smarter,e.g. levenshtein distance
if "yes" in userdata.transcribed_speech.lower():
return "succeeded"
return "failed"

def __init__(self, waypoints: List[Pose], name: str):
smach.StateMachine.__init__(self, outcomes=["succeeded", "failed"])
current_point = rospy.wait_for_message(
"/amcl_pose", PoseWithCovarianceStamped
).pose.pose
self.waypoints = navigation_helpers.min_hamiltonian_path(
current_point, waypoints
smach.StateMachine.__init__(
self, outcomes=["succeeded", "failed"], input_keys=["person_point"]
)

with self:

smach.StateMachine.add(
"GET_POSE",
self.GetPose(),
transitions={"succeeded": "COMPUTE_PATH", "failed": "failed"},
)

smach.StateMachine.add(
"COMPUTE_PATH",
self.ComputePath(waypoints),
transitions={"succeeded": "WAYPOINT_ITERATOR", "failed": "failed"},
)

waypoint_iterator = smach.Iterator(
outcomes=["succeeded", "failed"],
it=self.waypoints,
it=self.userdata.waypoints,
it_label="location",
input_keys=[],
output_keys=[],
output_keys=["person_point"],
exhausted_outcome="failed",
)

Expand All @@ -74,6 +106,7 @@ def __init__(self, waypoints: List[Pose], name: str):
container_sm = smach.StateMachine(
outcomes=["succeeded", "failed", "continue"],
input_keys=["location"],
output_keys=["person_point"],
)

with container_sm:
Expand All @@ -83,8 +116,7 @@ def __init__(self, waypoints: List[Pose], name: str):
GoToLocation(),
transitions={
"succeeded": "DETECT3D",
"preempted": "continue",
"aborted": "continue",
"failed": "failed",
},
)
smach.StateMachine.add(
Expand All @@ -99,28 +131,10 @@ def __init__(self, waypoints: List[Pose], name: str):
"HANDLE_DETECTIONS",
self.HandleDetections(),
transitions={
"succeeded": "GO_TO_PERSON",
"succeeded": "CHECK_NAME",
"failed": "continue",
},
)
smach.StateMachine.add(
"GO_TO_PERSON",
GoToPerson(),
transitions={"succeeded": "LOOK_AT_PERSON", "failed": "failed"},
remapping={"point": "person_point"},
)
smach.StateMachine.add(
"LOOK_AT_PERSON",
LookToPoint(),
transitions={
"succeeded": "succeeded",
"aborted": "failed",
"preempted": "failed",
},
remapping={
"point": "person_max_point",
},
)
smach.StateMachine.add(
"CHECK_NAME",
AskAndListen(f"I'm looking for {name}. Are you {name}?"),
Expand All @@ -143,39 +157,3 @@ def __init__(self, waypoints: List[Pose], name: str):
waypoint_iterator,
{"succeeded": "succeeded", "failed": "failed"},
)


if __name__ == "__main__":
import rospy
from smach_ros import IntrospectionServer
from geometry_msgs.msg import Pose, Point, Quaternion

rospy.init_node("find_person")

sm = FindPerson(
[
Pose(
position=Point(1.446509335239015, -2.2460076897430974, 0.0),
orientation=Quaternion(
0.0, 0.0, -0.6459923698644408, 0.763343866207703
),
),
Pose(
position=Point(-2.196456229125565, -0.27387058028873024, 0.0),
orientation=Quaternion(
0.0, 0.0, 0.9778384065708362, 0.20936105329073992
),
),
Pose(
position=Point(-0.8129574905602319, -5.8536586556997445, 0.0),
orientation=Quaternion(
0.0, 0.0, -0.9982013547068731, 0.05995044171116081
),
),
]
)
sis = IntrospectionServer("find_person", sm, "/FIND_PERSON")
sis.start()
sm.execute()
rospy.spin()
sis.stop()
Loading

0 comments on commit a94b357

Please sign in to comment.