Skip to content

Commit

Permalink
Merge pull request SUTURO#51 from Julislz/MS2
Browse files Browse the repository at this point in the history
[Receptionist] alternate gaze during introduction and expanded Human …
  • Loading branch information
Julislz authored Mar 21, 2024
2 parents 30ec7c4 + 4be2c49 commit a4b3b81
Show file tree
Hide file tree
Showing 5 changed files with 104 additions and 96 deletions.
60 changes: 29 additions & 31 deletions demos/pycram_receptionist_demo/Ms3_receptionist_demo.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import rospy

from pycram.designators.action_designator import DetectAction, NavigateAction
from demos.pycram_receptionist_demo.utils.misc import *
from pycram.designators.action_designator import DetectAction, NavigateAction, LookAtAction
from pycram.designators.motion_designator import TalkingMotion
import demos.pycram_receptionist_demo.utils.misc as misc
from pycram.process_module import real_robot
import pycram.external_interfaces.giskard as giskardpy
from pycram.ros.robot_state_updater import RobotStateUpdater
Expand All @@ -10,7 +9,6 @@
from pycram.designators.object_designator import *
from pycram.bullet_world import BulletWorld, Object
from std_msgs.msg import String, Bool
import pycram.external_interfaces.navigate as moveBase

world = BulletWorld("DIRECT")
v = VizMarkerPublisher()
Expand All @@ -27,19 +25,20 @@

with real_robot:

#wait for doorbell
#bell_subscriber = rospy.Subscriber("doorbell", Bool, doorbell_cb)
#while not doorbell:
# # TODO: spin or sleep better?
# rospy.spin()
# wait for doorbell
bell_subscriber = rospy.Subscriber("doorbell", Bool, misc.doorbell_cb)
while not misc.doorbell:
# TODO: spin or sleep better?
# TODO: Failure Handling, when no bell is heard for a longer period of time
rospy.spin()

# subscriber not needed anymore
#bell_subscriber.unregister()
bell_subscriber.unregister()

# question: is the hsr standing in front of the door already??
# NavigateAction([misc.pose_door]).resolve().perform()
# giskardpy.opendoor()

TalkingMotion("Welcome, please step in").resolve().perform()
TalkingMotion("Welcome, please come in").resolve().perform()

# look for human
DetectAction(technique='human', state='start').resolve().perform()
Expand All @@ -54,33 +53,31 @@
pub_nlp.publish("start listening")

# receives name and drink via topic
rospy.Subscriber("nlp_out", String, talk_request_nlp)
rospy.Subscriber("nlp_out", String, misc.talk_request_nlp)

# failure handling
rospy.Subscriber("nlp_feedback", Bool, talk_error)
rospy.Subscriber("nlp_feedback", Bool, misc.talk_error)

while guest1.name == "guest1":
while misc.guest1.name == "guest1":
rospy.sleep(1)
print(guest1.name)
print(understood)

TalkingMotion("it is so noisy here, please confirm if i got your name right").resolve().perform()
rospy.sleep(1)
TalkingMotion("is your name " + guest1.name + "?").resolve().perform()
TalkingMotion("is your name " + misc.guest1.name + "?").resolve().perform()
pub_nlp.publish("start")

rospy.Subscriber("nlp_confirmation", Bool, name_cb)
rospy.Subscriber("nlp_confirmation", Bool, misc.name_cb)

while not guest1.understood:
while not misc.understood:
rospy.sleep(1)
print(str(guest1.understood))


# stop looking
TalkingMotion("i will show you the living room now").resolve().perform()
rospy.sleep(1)
TalkingMotion("please step out of the way and follow me").resolve().perform()
rospy.loginfo("stop looking now")
# TODO: look in direction of navigation maybe?
giskardpy.stop_looking()

# stop perceiving human
Expand All @@ -90,21 +87,22 @@
# lead human to living room
# TODO: check if rospy.sleep is needed and how long
rospy.sleep(2)
#NavigateAction([pose_kitchen_to_couch]).resolve().perform()
#NavigateAction([pose_couch]).resolve().perform()
#NavigateAction([misc.pose_kitchen_to_couch]).resolve().perform()
#NavigateAction([misc.pose_couch]).resolve().perform()
TalkingMotion("Welcome to the living room").resolve().perform()
rospy.sleep(1)

# search for free place to sit and host
# DetectAction(technique='human', state='stop').resolve().perform()
# TODO: get pose of host that sits in living room
# TODO: Failure Handling: scan room if no human detected on couch
human_pose = DetectAction(technique='human', state='start').resolve().perform()
print(human_pose)
misc.host.set_pose(human_pose)
# TODO: HSR looks to his right??
misc.guest1.set_pose()

# point to free place
# giskardpy.point_to_seat

# introduce humans and look at them
# giskardpy.look()
introduce(guest1.name, guest1.fav_drink, host.name, host.fav_drink)




misc.introduce()
58 changes: 7 additions & 51 deletions demos/pycram_receptionist_demo/tests/nlp_planning.py
Original file line number Diff line number Diff line change
@@ -1,60 +1,16 @@
import rospy

from pycram.designators.action_designator import DetectAction, NavigateAction
from pycram.designators.motion_designator import TalkingMotion
from pycram.fluent import Fluent
from demos.pycram_receptionist_demo.utils.misc import *
from pycram.helper import axis_angle_to_quaternion
from pycram.process_module import real_robot
import pycram.external_interfaces.giskard as giskardpy
from pycram.external_interfaces.knowrob import get_guest_info
from pycram.ros.robot_state_updater import RobotStateUpdater
from pycram.ros.viz_marker_publisher import VizMarkerPublisher
from pycram.designators.location_designator import *
from pycram.designators.object_designator import *
from pycram.bullet_world import BulletWorld, Object
from std_msgs.msg import String, Bool
from demos.pycram_receptionist_demo.deprecated import talk_actions
import pycram.external_interfaces.navigate as moveBase

world = BulletWorld("DIRECT")
# /pycram/viz_marker topic bei Marker Array
v = VizMarkerPublisher()

robot = Object("hsrb", "robot", "../../resources/" + robot_description.name + ".urdf")
robot_desig = ObjectDesignatorDescription(names=["hsrb"]).resolve()
robot.set_color([0.5, 0.5, 0.9, 1])

# carefull that u spawn the correct kitchen
kitchen = Object("kitchen", "environment", "kitchen.urdf")
giskardpy.init_giskard_interface()
RobotStateUpdater("/tf", "/giskard_joint_states")

pub_nlp = rospy.Publisher('/startListener', String, queue_size=10)


def test():
with real_robot:
print("start demo")
#TalkingMotion("Hello we will test NLP now").resolve().perform()
#rospy.sleep(1)
import demos.pycram_receptionist_demo.utils.misc as misc

TalkingMotion("Hello, i am Toya and my favorite drink is oil. What about you, talk to me?").resolve().perform()
rospy.sleep(6)
pub_nlp.publish("start listening")

# failure handling
rospy.loginfo("before Subscriber")
def doorbell_fct():

# failure handling
rospy.Subscriber("nlp_feedback", Bool, talk_error)
rospy.Subscriber("doorbell", Bool, misc.doorbell_cb)
while not misc.doorbell:
rospy.sleep(1)

# receives name and drink via topic
rospy.Subscriber("nlp_out", String, talk_request_nlp)
print("doorbell sound was heard!")

while True:
print("Hello")


if __name__ == '__main__':
test()
doorbell_fct()
41 changes: 29 additions & 12 deletions demos/pycram_receptionist_demo/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@

# Publisher for NLP
pub_nlp = rospy.Publisher('/startListener', String, queue_size=10)
pub_pose = rospy.Publisher('/human_pose', PoseStamped, queue_size=10)
understood = False
doorbell = False
name = False


# Declare variables for humans
host = HumanDescription("Yannis", fav_drink="ice tea")
Expand All @@ -16,6 +17,7 @@
# Pose variables
# Pose in front of the couch, HSR looks in direction of couch
pose_couch = Pose([3, 5, 0], [0, 0, 1, 0])
pose_door = Pose([1.4, 0.25, 0], [0, 0, 1, 0])

# Pose in the passage between kitchen and living room
robot_orientation = axis_angle_to_quaternion([0, 0, 1], 90)
Expand Down Expand Up @@ -64,9 +66,6 @@ def talk_request_nlp(data: String):
pub_nlp.publish("start")





def talk_error(data):
"""
callback function if no name/drink was heard
Expand All @@ -77,15 +76,33 @@ def talk_error(data):
pub_nlp.publish("start listening")


def introduce(name1, drink1, host_name, host_drink):
def introduce(name_a: Optional[str] = guest1.name, drink_a: Optional[str] = guest1.fav_drink, pose_a: Optional[PoseStamped] = None,
name_b: Optional[str] = host.name, drink_b: Optional[str] = host.fav_drink, pose_b: Optional[PoseStamped] = None):
"""
Text for robot to introduce two people to each other
Text for robot to introduce two people to each other and alternate gaze
:param name_a: name of the person that gets introduced to person b first
:param drink_a: favorite drink of person a
:param pose_a: position of person a where the hsr will look at
:param name_b: name of the person that
:param drink_b: favorite drink of person b
:param pose_b: position of person b where the hsr will look at
"""
first = "Hey " + str(host_name) + " This is " + str(name1) + " and the favorite drink of your guest is " + str(drink1)
second = str(name1) + " This is " + str(host_name) + " his favorite drink is " + str(host_drink)
TalkingMotion(first).resolve().perform()
# TODO: needs to be tested!
if pose_b:
pub_pose.publish(pose_b)
TalkingMotion(f"Hey, {name_b}").resolve().perform()

if pose_a:
pub_pose.publish(pose_b)
TalkingMotion(f" This is {name_a} and their favorite drink is {drink_a}").resolve().perform()
rospy.sleep(2)
TalkingMotion(f"Hey, {name_a}").resolve().perform()

if pose_b:
pub_pose.publish(pose_b)
TalkingMotion(f" This is {name_b} and their favorite drink is {drink_b}").resolve().perform()

rospy.sleep(3)
TalkingMotion(second).resolve().perform()


def doorbell_cb(data):
Expand All @@ -102,10 +119,10 @@ def name_cb(data):
callback function for a subscriber to NLP script.
is called when name was correctly understood
"""
print("name cb:" + str(data))
global understood
if data.data:
TalkingMotion("perfect, nice to meet you").resolve().perform()
guest1.set_understood(True)
understood = True
else:
TalkingMotion("i am sorry, please repeat your name loud and clear").resolve().perform()
rospy.sleep(1)
Expand Down
12 changes: 10 additions & 2 deletions src/pycram/designators/object_designator.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ class HumanDescription:
"""
Class that represents humans. this class does not spawn a human in a simulation.
"""
def __init__(self, name: String, fav_drink: Optional = None):
def __init__(self, name: String, fav_drink: Optional = None, pose: Optional = None):
"""
:param name: name of human
:param fav_drink: favorite drink of human
Expand All @@ -197,7 +197,8 @@ def __init__(self, name: String, fav_drink: Optional = None):
# characteristics to consider: height, hair color, and age.
# self.human_pose = Fluent()
self.name = name
self.fav_drink = fav_drink # self.shirt_color = shirt_color # self.gender = gender
self.fav_drink = fav_drink
self.pose = pose

# self.human_pose_sub = rospy.Subscriber("/human_pose", PoseStamped, self.human_pose_cb)

Expand Down Expand Up @@ -225,3 +226,10 @@ def set_drink(self, new_drink):
:param new_drink: name of drink
"""
self.fav_drink = new_drink

def set_pose(self, new_pose):
"""
function for changing pose of human
:param new_pose: new pose of human
"""
self.pose = new_pose
29 changes: 29 additions & 0 deletions src/pycram/external_interfaces/giskard.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,6 +472,35 @@ def stop_looking():
rospy.loginfo("hsr looks forward instead of looking at human")


def move_head_to_pose(pose: PointStamped):
"""
moves head to given position
:param pose: pose that head will rotate to
"""
# TODO: necessary? there is a LookAtAction?
# TODO: needs to be tested!
p_axis = Vector3Stamped
p_axis.vector = (0, 0, 1)
giskard_wrapper.set_pointing_goal(goal_point=pose,
tip_link="head_center_camera_frame",
pointing_axis=p_axis,
root_link="base_footprint")


def move_arm_to_pose(pose: PointStamped):
"""
moves arm to given position
:param pose: pose that arm will point to
"""
# TODO: needs to be tested!
p_axis = Vector3Stamped
p_axis.vector = (0, 0, 1)
giskard_wrapper.set_pointing_goal(goal_point=pose,
tip_link="hand_gripper_tool_frame",
pointing_axis=p_axis,
root_link="map")


def spawn_kitchen():
env_urdf = rospy.get_param('kitchen_description')
kitchen_pose = tf.lookup_pose('map', 'iai_kitchen/urdf_main')
Expand Down

0 comments on commit a4b3b81

Please sign in to comment.