From 80719d1adfaaeeb3d378c1d494813c5af0364ff4 Mon Sep 17 00:00:00 2001 From: juschulz Date: Thu, 29 Feb 2024 14:19:58 +0100 Subject: [PATCH] [Receptionist Code clean up] --- demos/pycram_receptionist_demo/M2_Demo.py | 24 +--- .../pycram_receptionist_demo/MS2_example-v.py | 131 ------------------ demos/pycram_receptionist_demo/School_Demo.py | 31 +---- .../tests/connection_knowledge.py | 2 +- .../tests/nlp_planning.py | 27 +--- .../tests/perception_humans.py | 30 +--- demos/pycram_receptionist_demo/utils/misc.py | 17 ++- launch/receptionist.launch | 14 ++ launch/receptionist_starter.sh | 16 +++ src/pycram/designators/object_designator.py | 2 +- src/pycram/external_interfaces/knowrob.py | 2 +- 11 files changed, 65 insertions(+), 231 deletions(-) delete mode 100644 demos/pycram_receptionist_demo/MS2_example-v.py create mode 100644 launch/receptionist.launch create mode 100755 launch/receptionist_starter.sh diff --git a/demos/pycram_receptionist_demo/M2_Demo.py b/demos/pycram_receptionist_demo/M2_Demo.py index 03484729e..313abd823 100644 --- a/demos/pycram_receptionist_demo/M2_Demo.py +++ b/demos/pycram_receptionist_demo/M2_Demo.py @@ -45,7 +45,9 @@ def demo_ms2(area): + with real_robot: + # declare variables for humans host = HumanDescription("Yannis", fav_drink="Tea") guest1 = HumanDescription("guest1") @@ -58,23 +60,20 @@ def demo_ms2(area): # look at guest and introduce giskardpy.move_head_to_human() TalkingMotion("Hello, i am Toya and my favorite drink is oil. What about you, talk to me?").resolve().perform() - rospy.sleep(1) # signal to start listening pub_nlp.publish("start listening") - #wait for human to say something - rospy.sleep(13) + # wait for human to say something + rospy.sleep(15) - # guest_data format is = ["name", "drink"] + # guest_data format is = ['person infos: "name', 'drink"'] guest_data = get_guest_info("1.0") - print("guest data: " + str(guest_data)) while guest_data == ['person_infos: "No name saved under this ID!"']: talk_error("no name") rospy.sleep(13) guest_data = get_guest_info("1.0") - print("guest data: " + str(guest_data)) # set heard name and drink of guest guest1.set_name(guest_data[0][13:]) @@ -98,12 +97,10 @@ def demo_ms2(area): rospy.sleep(5) NavigateAction([pose_kitchen_to_couch]).resolve().perform() NavigateAction([pose_couch]).resolve().perform() - rospy.sleep(2) TalkingMotion("Welcome to the living room").resolve().perform() rospy.sleep(1) elif area == 'from_couch': rospy.loginfo("Navigating now") - TalkingMotion("navigating to kitchen, please step away").resolve().perform() rospy.sleep(5) NavigateAction([pose_from_couch]).resolve().perform() NavigateAction([pose_home]).resolve().perform() @@ -112,18 +109,7 @@ def demo_ms2(area): TalkingMotion("not navigating").resolve().perform() rospy.sleep(1) - introduce(guest1.name, guest1.fav_drink, host.name, host.fav_drink) - #TalkingMotion("End of demo").resolve().perform() - - -def nav_test(): - with real_robot: - robot_orientation = axis_angle_to_quaternion([0, 0, 1], 90) - test_pose1 = Pose([4.2, 3, 0], robot_orientation) - test_pose = Pose([3, 5, 0], [0, 0, 0, 1]) - moveBase.queryPoseNav(test_pose1) - moveBase.queryPoseNav(test_pose) # demo_test('from_couch') diff --git a/demos/pycram_receptionist_demo/MS2_example-v.py b/demos/pycram_receptionist_demo/MS2_example-v.py deleted file mode 100644 index 2e90e176d..000000000 --- a/demos/pycram_receptionist_demo/MS2_example-v.py +++ /dev/null @@ -1,131 +0,0 @@ -import rospy - -from pycram.designators.action_designator import DetectAction, NavigateAction -from pycram.designators.motion_designator import TalkingMotion -from pycram.fluent import Fluent -from pycram.helper import axis_angle_to_quaternion -from pycram.process_module import real_robot -import pycram.external_interfaces.giskard as giskardpy -from pycram.ros.robot_state_updater import RobotStateUpdater -from pycram.ros.viz_marker_publisher import VizMarkerPublisher -from pycram.designators.location_designator import * -from pycram.designators.object_designator import * -from pycram.bullet_world import BulletWorld, Object -from std_msgs.msg import String -from demos.pycram_receptionist_demo.deprecated import talk_actions -import pycram.external_interfaces.navigate as moveBase - -world = BulletWorld("DIRECT") -# /pycram/viz_marker topic bei Marker Array -v = VizMarkerPublisher() - -robot = Object("hsrb", "robot", "../../resources/" + robot_description.name + ".urdf") -robot_desig = ObjectDesignatorDescription(names=["hsrb"]).resolve() -robot.set_color([0.5, 0.5, 0.9, 1]) - -# carefull that u spawn the correct kitchen -kitchen = Object("kitchen", "environment", "kitchen.urdf") -giskardpy.init_giskard_interface() -RobotStateUpdater("/tf", "/giskard_joint_states") - -robot_orientation_couch = axis_angle_to_quaternion([0, 0, 1], 0) -pose_couch = Pose([3, 5, 0], robot_orientation_couch) - -robot_orientation_from_couch = axis_angle_to_quaternion([0, 0, 1], -90) -pose_from_couch = Pose([4.2, 3.8, 0], robot_orientation_from_couch) - -robot_orientation = axis_angle_to_quaternion([0, 0, 1], 90) -pose_kitchen_to_couch = Pose([4.2, 3, 0], robot_orientation) - -pose_home = Pose([3, 1.7, 0], robot_orientation) - -pub_nlp = rospy.Publisher('/startListener', String, queue_size=10) - - -class HumanDescription: - - def __init__(self, name, fav_drink): - # TODO: coordinate with Perception on what is easy to implement - # characteristics to consider: height, hair color, and age. - self.human_pose = Fluent() - self.name = name - self.fav_drink = fav_drink # self.shirt_color = shirt_color # self.gender = gender - - self.human_pose_sub = rospy.Subscriber("/human_pose", String, self.human_pose_cb) - - def human_pose_cb(self, HumanPoseMsg): - self.human_pose.set_value(HumanPoseMsg.data) - - - -def demo_test(area): - with real_robot: - # Guest arrives: - TalkingMotion("Hello, i am Toya let me open my eyes for you").resolve().perform() - # Perception, detect first guest -> First detect guest, then listen - DetectAction(technique='default', state='start').resolve().perform() - - host = HumanDescription("Bob", fav_drink="Coffee") - - # While loop, human is detected - while host.human_pose: - TalkingMotion("Please step in front of me").resolve.perform() - rospy.sleep(5) - - rospy.loginfo("human detected") - - # look at guest and introduction - giskardpy.move_head_to_human() - TalkingMotion("Hello, i am Toya and my favorite drink is oil. What about you, talk to me?").resolve().perform() - - # reicht sleep 1? - rospy.sleep(1) - - # signal to start listening - pub_nlp.publish("start listening") - # TODO: How to erst weiter machen, wenn Knowledge Daten geschickt hat - - TalkingMotion("Hey i will stop looking now").resolve().perform() - - rospy.loginfo("stop looking now") - giskardpy.stop_looking() - rospy.loginfo("Navigating now") - TalkingMotion("navigating to couch area now, pls step away").resolve().perform() - - if area == 'to_couch': - NavigateAction([pose_kitchen_to_couch]).resolve().perform() - NavigateAction([pose_couch]).resolve().perform() - elif area == 'from_couch': - NavigateAction([pose_from_couch]).resolve().perform() - NavigateAction([pose_home]).resolve().perform() - - # failure handling - # rospy.Subscriber("nlp_feedback", Bool, talk_error) - - # receives name and drink via topic - # rospy.Subscriber("nlp_out", String, talk_request) - - -def nav_test(): - with real_robot: - robot_orientation = axis_angle_to_quaternion([0, 0, 1], 90) - test_pose1 = Pose([4.2, 3, 0], robot_orientation) - test_pose = Pose([3, 5, 0], [0, 0, 0, 1]) - moveBase.queryPoseNav(test_pose1) - moveBase.queryPoseNav(test_pose) - -# demo_test('from_couch') -# demo_test('to_couch') - -# receives name and drink via topic -# rospy.Subscriber("nlp_out", String, talk_request) - - -# 1. rasa run --enable-api -> start Rasa Server -# 2. python3 activate_language_processing.py -> NLP -# 3. roslaunch suturo_bringup suturo_bringup.launch -> Map -# 4. roslaunch_hsr_velocity_controller unloas_my_controller.launch -# 5. roslaunch giskardpy giskardpy_hsr_real_vel.launch -> Giskard -# starten -# 6. rosrun robokudo main.py _ae=humandetection_demo_ros_pkg=milestone1 -> Perception -# 7. run demo in Pycharm -> Planning diff --git a/demos/pycram_receptionist_demo/School_Demo.py b/demos/pycram_receptionist_demo/School_Demo.py index e87e00212..52bf27043 100644 --- a/demos/pycram_receptionist_demo/School_Demo.py +++ b/demos/pycram_receptionist_demo/School_Demo.py @@ -1,6 +1,6 @@ from pycram.designators.action_designator import DetectAction, NavigateAction from pycram.designators.motion_designator import TalkingMotion -# from demos.pycram_receptionist_demo.utils.misc import * +from demos.pycram_receptionist_demo.utils.misc import * from pycram.helper import axis_angle_to_quaternion from pycram.process_module import real_robot import pycram.external_interfaces.giskard as giskardpy @@ -42,7 +42,7 @@ data_received = False -def talk_request(data: String): +def talk_request_school(data: String): """ callback function that takes the data from nlp (name and drink) and lets the robot talk :param data: String "name drink" @@ -55,30 +55,19 @@ def talk_request(data: String): data_received = True -def talk_error(data): - """ - callback function if no name/drink was heard - """ - - error_msgs = "i could not hear you, please repeat" - TalkingMotion(error_msgs).resolve().perform() - pub_nlp.publish("start listening") - - def demo_test(area): with real_robot: global data_received data_received = False print("start demo") + # look for human DetectAction(technique='human', state='start').resolve().perform() - rospy.loginfo("human detected") # look at guest and introduction giskardpy.move_head_to_human() TalkingMotion("Hello, i am Toya and my favorite drink is oil. What about you, talk to me?").resolve().perform() - rospy.sleep(1) # signal to start listening @@ -90,7 +79,7 @@ def demo_test(area): # get name and drink rospy.Subscriber("nlp_feedback", Bool, talk_error) - rospy.Subscriber("nlp_out", String, talk_request) + rospy.Subscriber("nlp_out", String, talk_request_school) while not data_received: rospy.sleep(0.5) @@ -119,13 +108,14 @@ def demo_test(area): rospy.sleep(1) TalkingMotion("Welcome to the living room").resolve().perform() TalkingMotion("take a seat").resolve().perform() + elif area == 'from_couch': rospy.loginfo("Navigating now") rospy.sleep(3) NavigateAction([pose_from_couch]).resolve().perform() NavigateAction([pose_home]).resolve().perform() + else: - rospy.loginfo("in else") TalkingMotion("not navigating").resolve().perform() rospy.sleep(3) print("end") @@ -133,14 +123,5 @@ def demo_test(area): TalkingMotion("End of demo").resolve().perform() -def nav_test(): - with real_robot: - robot_orientation = axis_angle_to_quaternion([0, 0, 1], 90) - test_pose1 = Pose([4.2, 3, 0], robot_orientation) - test_pose = Pose([3, 5, 0], [0, 0, 0, 1]) - moveBase.queryPoseNav(test_pose1) - moveBase.queryPoseNav(test_pose) - - # demo_test('from_couch') demo_test('to_couch') diff --git a/demos/pycram_receptionist_demo/tests/connection_knowledge.py b/demos/pycram_receptionist_demo/tests/connection_knowledge.py index 8556f9e09..ecc35fe41 100644 --- a/demos/pycram_receptionist_demo/tests/connection_knowledge.py +++ b/demos/pycram_receptionist_demo/tests/connection_knowledge.py @@ -58,7 +58,7 @@ def demo_test(): pub_nlp.publish("start listening") rospy.sleep(5) #asyncron?? - # TODO: test on real HSR + guest_data = get_guest_info("3.0") print("guest data: " + str(guest_data)) while guest_data == "No name saved under this ID!": diff --git a/demos/pycram_receptionist_demo/tests/nlp_planning.py b/demos/pycram_receptionist_demo/tests/nlp_planning.py index b6f51fe71..b8fcdd3d1 100644 --- a/demos/pycram_receptionist_demo/tests/nlp_planning.py +++ b/demos/pycram_receptionist_demo/tests/nlp_planning.py @@ -3,7 +3,7 @@ from pycram.designators.action_designator import DetectAction, NavigateAction from pycram.designators.motion_designator import TalkingMotion from pycram.fluent import Fluent -#from demos.pycram_receptionist_demo.utils.misc import * +from demos.pycram_receptionist_demo.utils.misc import * from pycram.helper import axis_angle_to_quaternion from pycram.process_module import real_robot import pycram.external_interfaces.giskard as giskardpy @@ -32,27 +32,6 @@ pub_nlp = rospy.Publisher('/startListener', String, queue_size=10) -def talk_error(data): - """ - callback function if no name/drink was heard - """ - rospy.loginfo(data) - rospy.loginfo("in callback error") - error_msgs = "i could not hear you, please repeat" - TalkingMotion(error_msgs).resolve().perform() - rospy.sleep(3) - pub_nlp.publish("start listening") - -def talk_request(data: String): - """ - callback function that takes the data from nlp (name and drink) and lets the robot talk - :param data: String "name drink" - """ - rospy.loginfo("in callback success") - name_drink = data.data.split(" ") - talk_actions.name_drink_talker(name_drink) - rospy.loginfo("nlp data:" + name_drink[0] + " " + name_drink[1]) - def test(): with real_robot: @@ -71,10 +50,10 @@ def test(): rospy.Subscriber("nlp_feedback", Bool, talk_error) # receives name and drink via topic - rospy.Subscriber("nlp_out", String, talk_request) + rospy.Subscriber("nlp_out", String, talk_request_nlp) while True: - print("HEllo") + print("Hello") if __name__ == '__main__': diff --git a/demos/pycram_receptionist_demo/tests/perception_humans.py b/demos/pycram_receptionist_demo/tests/perception_humans.py index 59a2298ba..4a47e6cdf 100644 --- a/demos/pycram_receptionist_demo/tests/perception_humans.py +++ b/demos/pycram_receptionist_demo/tests/perception_humans.py @@ -26,30 +26,14 @@ # carefull that u spawn the correct kitchen kitchen = Object("kitchen", "environment", "kitchen.urdf") giskardpy.init_giskard_interface() -#RobotStateUpdater("/tf", "/giskard_joint_states") + + def test(): with real_robot: - guest1 = HumanDescription("x") - # print("...................... nothing" ) - guest1.human_pose.set_value(False) - if not guest1.human_pose.get_value(): - print("wrong? " + str(guest1.human_pose)) - if guest1.human_pose.get_value(): - print("right? " + str(guest1.human_pose)) - - print("----------------------------------") DetectAction(technique='human', state='start').resolve().perform() - if not guest1.human_pose.get_value(): - print("wrong? " + str(guest1.human_pose)) - - if guest1.human_pose.get_value(): - print("right? " + str(guest1.human_pose)) - - - rospy.loginfo("human detected") print("---------- " + "start") @@ -67,13 +51,5 @@ def test(): print("end") - - - - - - - - if __name__ == '__main__': - test() \ No newline at end of file + test() diff --git a/demos/pycram_receptionist_demo/utils/misc.py b/demos/pycram_receptionist_demo/utils/misc.py index 9e5c233e6..ae0a8d68b 100644 --- a/demos/pycram_receptionist_demo/utils/misc.py +++ b/demos/pycram_receptionist_demo/utils/misc.py @@ -22,7 +22,21 @@ def talk_request(data: list): TalkingMotion(toyas_text).resolve().perform() rospy.sleep(1) TalkingMotion("nice to meet you").resolve().perform() - rospy.loginfo("data from Knowledge -> data[0] = " + data[0] + ", data[1] = " + data[1]) + + +def talk_request_nlp(data: str): + """ + callback function that takes the data from nlp (name and drink) and lets the robot talk + :param data: String "name drink" + """ + + rospy.loginfo("in callback success") + data = data.split(",") + toyas_text = "Hey " + data[0] + " your favorite drink is " + data[ + 1] + TalkingMotion(toyas_text).resolve().perform() + rospy.sleep(1) + TalkingMotion("nice to meet you").resolve().perform() def talk_error(data): @@ -48,4 +62,3 @@ def introduce(name1, drink1, host_name, host_drink): TalkingMotion(second).resolve().perform() - diff --git a/launch/receptionist.launch b/launch/receptionist.launch new file mode 100644 index 000000000..c133f03ec --- /dev/null +++ b/launch/receptionist.launch @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/launch/receptionist_starter.sh b/launch/receptionist_starter.sh new file mode 100755 index 000000000..d4dd44bb1 --- /dev/null +++ b/launch/receptionist_starter.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Wechsle zum angegebenen Verzeichnis +#cd /home/suturo/suturo23_24/nlp_ws/src/suturo_nlp/activate_language_processing/scripts + +cd /home/jule/nlp_ws/src/suturo_nlp/activate_language_processing/scripts + +source /home/jule/nlp_ws/rasa_venv/bin/activate + + +# Führe das Python-Skript aus +python3 activate_language_processing.py + +# Rufe die Funktion run_rasa aus der .bashrc-Datei auf +source ~/.bashrc # Lädt die .bashrc-Datei, um sicherzustellen, dass run_rasa verfügbar ist +#run_rasa diff --git a/src/pycram/designators/object_designator.py b/src/pycram/designators/object_designator.py index 0e77408b1..b25e39333 100644 --- a/src/pycram/designators/object_designator.py +++ b/src/pycram/designators/object_designator.py @@ -187,7 +187,7 @@ class HumanDescription: """ Class that represents humans. this class does not spawn a human in a simulation. """ - def __init__(self, name: String, fav_drink: Optional = None): + def __init__(self, name: str, fav_drink: Optional = None): """ :param name: name of human :param fav_drink: favorite drink of human diff --git a/src/pycram/external_interfaces/knowrob.py b/src/pycram/external_interfaces/knowrob.py index ee82a2853..c356cfd22 100644 --- a/src/pycram/external_interfaces/knowrob.py +++ b/src/pycram/external_interfaces/knowrob.py @@ -133,7 +133,7 @@ def get_guest_info(id): """ function that uses Knowledge Service to get Name and drink from new guest via ID :param id: integer for person - :return: ["name", " drink"] + :return: ['person_infos: "name', 'drink"'] """ rospy.wait_for_service('name_server')