diff --git a/demos/pycram_receptionist_demo/Ms3_receptionist_demo.py b/demos/pycram_receptionist_demo/Ms3_receptionist_demo.py index c06dda0cd..72e7e5744 100644 --- a/demos/pycram_receptionist_demo/Ms3_receptionist_demo.py +++ b/demos/pycram_receptionist_demo/Ms3_receptionist_demo.py @@ -1,4 +1,4 @@ -from pycram.designators.action_designator import DetectAction, NavigateAction +from pycram.designators.action_designator import DetectAction, NavigateAction, OpenAction from demos.pycram_receptionist_demo.utils.new_misc import * from pycram.process_module import real_robot import pycram.external_interfaces.giskard as giskardpy @@ -19,14 +19,16 @@ kitchen = Object("kitchen", "environment", "kitchen.urdf") giskardpy.init_giskard_interface() RobotStateUpdater("/tf", "/giskard_joint_states") +kitchen_desig = ObjectDesignatorDescription(names=["kitchen"]) # variables for communcation with nlp pub_nlp = rospy.Publisher('/startListener', String, queue_size=10) response = "" callback = False +doorbell = True # Declare variables for humans -host = HumanDescription("Yannis", fav_drink="ice tea") +host = HumanDescription("Bob", fav_drink="coffee") guest1 = HumanDescription("guest1") guest2 = HumanDescription("guest2") seat_number = 2 @@ -48,22 +50,24 @@ def data_cb(data): while not doorbell: # TODO: spin or sleep better? - # TODO: Failure Handling, when no bell is heard for a longer period of time rospy.spin() + # TODO: find name of door handle + # link in rviz: iai_kitchen:arena:door_handle_inside + # door_handle_desig = ObjectPart(names=["door_handle_inside"], part_of=kitchen_desig.resolve()) + # OpenAction(object_designator_description=door_handle_desig, arms=["left"]).resolve().perform() # NavigateAction([pose_door]).resolve().perform() - # giskardpy.opendoor() - TalkingMotion("Welcome, please come in").resolve().perform() + TalkingMotion("Welcome, please step in").resolve().perform() # look for human - human_desig = DetectAction(technique='attributes', state='start').resolve().perform() + attr_list = DetectAction(technique='attributes', state='start').resolve().perform() rospy.loginfo("human detected") - # TODO: check what perception returns exactly - attr_list = human_desig.attribute + guest1.set_attributes(attr_list) print(attr_list) - guest1.set_attributes(human_desig.attribute) + + DetectAction(technique='human').resolve().perform() # look at guest and introduce giskardpy.move_head_to_human() @@ -80,7 +84,7 @@ def data_cb(data): if response[0] == "": # success a name and intent was understood if response[1] != "": - TalkingMotion("it is so noisy here, please confirm if i got your name right").resolve().perform() + TalkingMotion("please confirm if i got your name right").resolve().perform() guest1.set_drink(response[2]) rospy.sleep(1) guest1.set_name(name_confirm(response[1])) @@ -132,37 +136,22 @@ def data_cb(data): NavigateAction([pose_couch]).resolve().perform() TalkingMotion("Welcome to the living room").resolve().perform() rospy.sleep(1) + TalkingMotion("please take a seat next to your host").resolve().perform() # search for free place to sit and host # TODO: Failure Handling: scan room if no human detected on couch - for i in range(seat_number): - state = "seat" + str(i) - seat_desig = DetectAction(technique='location', state=state).resolve().perform() - print(seat_desig) - # TODO get pose of occupied seat - if not seat_desig.occupied: - guest1.set_pose(seat_desig.pose) - # point to free place - # giskardpy.point_to_seat - TalkingMotion("please sit over there").resolve().perform() - - # failure handling if all seats are taken - elif i+1 == seat_number: - guest1.set_pose(seat_desig.pose) - # point to free place - # giskardpy.point_to_seat - TalkingMotion("please sit over there").resolve().perform() + # TODO: is it ok to seat guest bevore introducing?? pose_host = PoseStamped() - pose_host.header.frame_id = 'map' + pose_host.header.frame_id = '/map' pose_host.pose.position.x = 1.0 pose_host.pose.position.y = 5.9 pose_host.pose.position.z = 0.9 pose_guest = PoseStamped() - pose_guest.header.frame_id = 'map' + pose_guest.header.frame_id = '/map' pose_guest.pose.position.x = 1.0 pose_guest.pose.position.y = 4.7 pose_guest.pose.position.z = 1.0 @@ -173,4 +162,5 @@ def data_cb(data): # introduce humans and look at them introduce(host, guest1) + describe(guest1) diff --git a/demos/pycram_receptionist_demo/tests/MS3_testable.py b/demos/pycram_receptionist_demo/tests/MS3_testable.py index f2c98eb8a..daa1fb9fe 100644 --- a/demos/pycram_receptionist_demo/tests/MS3_testable.py +++ b/demos/pycram_receptionist_demo/tests/MS3_testable.py @@ -1,4 +1,5 @@ import rospy +from geometry_msgs.msg import PointStamped from pycram.designators.action_designator import DetectAction, NavigateAction from demos.pycram_receptionist_demo.utils.new_misc import * @@ -27,8 +28,9 @@ response = "" callback = False + # Declare variables for humans -host = HumanDescription("Yannis", fav_drink="ice tea") +host = HumanDescription("Angel", fav_drink="ice tea") guest1 = HumanDescription("guest1") guest2 = HumanDescription("guest2") seat_number = 2 @@ -41,6 +43,7 @@ def data_cb(data): response.append("None") callback = True + def demo_tst(): """ testing HRI and introduction and navigating @@ -50,13 +53,18 @@ def demo_tst(): global response test_all = True + TalkingMotion("Hello").resolve().perform() + giskardpy.move_head_to_human() + rospy.Subscriber("nlp_out", String, data_cb) + desig = DetectAction(technique='attributes').resolve().perform() + guest1.set_attributes(desig) + DetectAction(technique='human', state='start').resolve().perform() rospy.loginfo("human detected") - giskardpy.move_head_to_human() - TalkingMotion("Hello, i am Toya and my favorite drink is oil. What about you, talk to me?").resolve().perform() - rospy.sleep(0.9) + TalkingMotion("I am Toya and my favorite drink is oil. What about you, talk to me loud and clear?").resolve().perform() + rospy.sleep(1.2) # signal to start listening pub_nlp.publish("start listening") @@ -67,7 +75,7 @@ def demo_tst(): if response[0] == "": if response[1] != "": - TalkingMotion("it is so noisy here, please confirm if i got your name right").resolve().perform() + TalkingMotion("please confirm if i got your name right").resolve().perform() guest1.set_drink(response[2]) rospy.sleep(1) guest1.set_name(name_confirm(response[1])) @@ -116,10 +124,18 @@ def demo_tst(): TalkingMotion("Welcome to the living room").resolve().perform() rospy.sleep(1) + # hard coded poses for seat1 as PointStamped + pose_seat = PointStamped() + pose_seat.header.frame_id = "/map" + pose_seat.point.x = 1.1 + pose_seat.point.y = 4.7 + pose_seat.point.z = 1 + + giskardpy.move_arm_to_pose(pose_seat) TalkingMotion("please take a seat next to your host").resolve().perform() rospy.sleep(2) - # hard coded poses for seat1 and seat2 + # hard coded poses for seat1 and seat2 as PoseStamped pose_host = PoseStamped() pose_host.header.frame_id = "/map" pose_host.pose.position.x = 1 @@ -147,45 +163,57 @@ def demo_tst2(): just testing the gazing between humans -> introduce function """ with real_robot: + jule = False TalkingMotion("Welcome, please come in").resolve().perform() - # look for human - # TODO: test new technique - #DetectAction(technique='human', state='start').resolve().perform() - rospy.loginfo("human detected") - #giskardpy.move_head_to_human() - #rospy.sleep(7) - #DetectAction(technique='human', state='stop').resolve().perform() + pose_seat = PointStamped() + pose_seat.header.frame_id = "/map" + pose_seat.point.x = 1.1 + pose_seat.point.y = 4.7 + pose_seat.point.z = 1 - pose_host = PoseStamped() - pose_host.header.frame_id = 'map' - pose_host.pose.position.x = 1.0 - pose_host.pose.position.y = 5.9 - pose_host.pose.position.z = 0.9 + giskardpy.move_arm_to_pose(pose_seat) + TalkingMotion("please take a seat next to your host").resolve().perform() + rospy.sleep(2) - pose_guest = PoseStamped() - pose_guest.header.frame_id = 'map' - pose_guest.pose.position.x = 1.0 - pose_guest.pose.position.y = 4.7 - pose_guest.pose.position.z = 1.0 + if jule: + # look for human + # TODO: test new technique + #DetectAction(technique='human', state='start').resolve().perform() + rospy.loginfo("human detected") + #giskardpy.move_head_to_human() + #rospy.sleep(7) + #DetectAction(technique='human', state='stop').resolve().perform() - host.set_pose(pose_host) + pose_host = PoseStamped() + pose_host.header.frame_id = 'map' + pose_host.pose.position.x = 1.0 + pose_host.pose.position.y = 5.9 + pose_host.pose.position.z = 0.9 - guest1.set_pose(pose_guest) + pose_guest = PoseStamped() + pose_guest.header.frame_id = 'map' + pose_guest.pose.position.x = 1.0 + pose_guest.pose.position.y = 4.7 + pose_guest.pose.position.z = 1.0 - # introduce humans and look at them - giskardpy.move_head_to_human() - rospy.sleep(3) - introduce(host, guest1) + host.set_pose(pose_host) - rospy.sleep(2) - TalkingMotion("Introducing again").resolve().perform() - rospy.sleep(2) - introduce(host, guest1) + guest1.set_pose(pose_guest) + # introduce humans and look at them + giskardpy.move_head_to_human() + rospy.sleep(3) + introduce(host, guest1) + + rospy.sleep(2) + TalkingMotion("Introducing again").resolve().perform() + rospy.sleep(2) + introduce(host, guest1) -demo_tst() +demo_tst() +# demo_tst2() diff --git a/demos/pycram_receptionist_demo/tests/perception_humans.py b/demos/pycram_receptionist_demo/tests/perception_humans.py index 803cd56ea..41c27b8ee 100644 --- a/demos/pycram_receptionist_demo/tests/perception_humans.py +++ b/demos/pycram_receptionist_demo/tests/perception_humans.py @@ -1,19 +1,13 @@ import rospy from pycram.designators.action_designator import DetectAction, NavigateAction -from pycram.designators.motion_designator import TalkingMotion -from pycram.fluent import Fluent -from pycram.helper import axis_angle_to_quaternion from pycram.process_module import semi_real_robot, real_robot import pycram.external_interfaces.giskard as giskardpy -from pycram.ros.robot_state_updater import RobotStateUpdater from pycram.ros.viz_marker_publisher import VizMarkerPublisher from pycram.designators.location_designator import * from pycram.designators.object_designator import * from pycram.bullet_world import BulletWorld, Object -from std_msgs.msg import String -from demos.pycram_receptionist_demo.deprecated import talk_actions -import pycram.external_interfaces.navigate as moveBase +from demos.pycram_receptionist_demo.utils.new_misc import * world = BulletWorld("DIRECT") # /pycram/viz_marker topic bei Marker Array @@ -26,42 +20,53 @@ # careful that u spawn the correct kitchen kitchen = Object("kitchen", "environment", "kitchen.urdf") giskardpy.init_giskard_interface() +guest1 = HumanDescription("guest1") def p(): with real_robot: - seat = True - attributes = False + seat = False + attributes = True if attributes: # to signal the start of demo # TalkingMotion("Hello, i am ready for the test").resolve().perform() # does the code work with the manipulation feature? - giskardpy.move_head_to_human() + # + - # new Query to detect attributes of a human TalkingMotion("detecting attributes now").resolve().perform() + giskardpy.move_head_to_human() + + desig = DetectAction(technique='attributes').resolve().perform() - rospy.loginfo("Attributes: " + str(desig)) - print("#####################") - print(desig[1].res[0].attribute) - gender = desig[1].res[0].attribute[0][13:19] - clothes = desig[1].res[0].attribute[2][20:] - brightness_clothes = desig[1].res[0].attribute[1] - hat = desig[1].res[0].attribute[3][20:] - - print("#####################") - print(gender) - print("#####################") - print(clothes) - print("#####################") - print(brightness_clothes) - print("#####################") - print(hat) - - # rospy.sleep(5) + print("msgs from PPP: " + str(desig[1])) + guest1.set_attributes(desig) + + + + DetectAction(technique='human').resolve().perform() + + describe(guest1) + + + + rospy.sleep(3) + + giskardpy.stop_looking() + DetectAction(technique='human', state='stop').resolve().perform() + + rospy.sleep(3) + + TalkingMotion("looking again").resolve().perform() + DetectAction(technique='human').resolve().perform() + giskardpy.move_head_to_human() + + rospy.sleep(6) + + TalkingMotion("demo over").resolve().perform() if seat: # new Query for free seat diff --git a/demos/pycram_receptionist_demo/utils/new_misc.py b/demos/pycram_receptionist_demo/utils/new_misc.py index 081efb7e1..b26caf9df 100644 --- a/demos/pycram_receptionist_demo/utils/new_misc.py +++ b/demos/pycram_receptionist_demo/utils/new_misc.py @@ -1,4 +1,6 @@ import rospy + +from pycram.designators.action_designator import LookAtAction from pycram.designators.object_designator import Pose, PoseStamped, HumanDescription from pycram.designators.motion_designator import TalkingMotion from pycram.helper import axis_angle_to_quaternion @@ -12,7 +14,7 @@ callback = False # TODO: set to False when NLP has implemented that feature -doorbell = True + # Pose variables # Pose in front of the couch, HSR looks in direction of couch @@ -143,11 +145,13 @@ def introduce(human1: HumanDescription, human2: HumanDescription): """ if human1.pose: + LookAtAction(targets=[human1.pose]).resolve().perform() pub_pose.publish(human1.pose) rospy.sleep(3.5) TalkingMotion(f"Hey, {human1.name}").resolve().perform() if human2.pose: + LookAtAction(targets=[human2.pose]).resolve().perform() pub_pose.publish(human2.pose) rospy.sleep(3.5) TalkingMotion(f" This is {human2.name} and their favorite drink is {human2.fav_drink}").resolve().perform() @@ -155,6 +159,7 @@ def introduce(human1: HumanDescription, human2: HumanDescription): TalkingMotion(f"Hey, {human2.name}").resolve().perform() if human1.pose: + LookAtAction(targets=[human1.pose]).resolve().perform() pub_pose.publish(human1.pose) rospy.sleep(3.5) TalkingMotion(f" This is {human1.name} and their favorite drink is {human1.fav_drink}").resolve().perform() @@ -167,9 +172,12 @@ def describe(human: HumanDescription): the following will be stated: gender, headgear, clothing, brightness of clothes :param human: human to be described """ + if human.pose: + LookAtAction(targets=[human.pose]).resolve().perform() + pub_pose.publish(human.pose) - # TalkingMotion(f"I will describe {human.name} further now").resolve().perform() - # rospy.sleep(1) + TalkingMotion(f"I will describe {human.name} further now").resolve().perform() + rospy.sleep(1) # gender TalkingMotion(f"i think your gender is {human.attributes[0]}").resolve().perform() diff --git a/src/pycram/designators/object_designator.py b/src/pycram/designators/object_designator.py index 30d274b4c..c3afef5b9 100644 --- a/src/pycram/designators/object_designator.py +++ b/src/pycram/designators/object_designator.py @@ -244,4 +244,4 @@ def set_attributes(self, attribute_list): function for setting attributes :param attribute_list: list with attributes: gender, headgear, kind of clothes, bright/dark clothes """ - self.attributes = attribute_list + self.attributes = attribute_list[1] diff --git a/src/pycram/external_interfaces/giskard.py b/src/pycram/external_interfaces/giskard.py index fa5b1b29d..7da9ab3b6 100644 --- a/src/pycram/external_interfaces/giskard.py +++ b/src/pycram/external_interfaces/giskard.py @@ -480,6 +480,8 @@ def stop_looking(): stops the move_head_to_human function so that hsr looks forward """ + # cancels all goals in giskard + # giskard_wrapper.cancel_all_goals() # moves hsr in standard position giskard_wrapper.take_pose("park") giskard_wrapper.plan_and_execute(wait=False) @@ -493,7 +495,7 @@ def move_head_to_pose(pose: PointStamped): """ # TODO: necessary? there is a LookAtAction? # TODO: needs to be tested! - p_axis = Vector3Stamped + p_axis = Vector3Stamped() p_axis.vector = (0, 0, 1) giskard_wrapper.set_pointing_goal(goal_point=pose, tip_link="head_center_camera_frame", @@ -507,8 +509,12 @@ def move_arm_to_pose(pose: PointStamped): :param pose: pose that arm will point to """ # TODO: needs to be tested! - p_axis = Vector3Stamped - p_axis.vector = (0, 0, 1) + print("in move arm") + p_axis = Vector3Stamped() + #p_axis.header.frame_id = "/map" + p_axis.vector.x = 0 + p_axis.vector.y = 1 + p_axis.vector.z = 1 giskard_wrapper.set_pointing_goal(goal_point=pose, tip_link="hand_gripper_tool_frame", pointing_axis=p_axis, diff --git a/src/pycram/external_interfaces/robokudo.py b/src/pycram/external_interfaces/robokudo.py index 35908e9a5..34bfb2d79 100644 --- a/src/pycram/external_interfaces/robokudo.py +++ b/src/pycram/external_interfaces/robokudo.py @@ -148,6 +148,8 @@ def feedback_callback(msg): def queryHuman() -> Any: """ Sends a query to RoboKudo to look for a Human + returns a PoseStamped of pose where human is. keeps publishing it onto the + topic /human_pose """ init_robokudo_interface() from robokudo_msgs.msg import QueryAction, QueryGoal, QueryResult @@ -189,21 +191,11 @@ def callback(pose): rospy.Subscriber("/human_pose", PoseStamped, callback) while not human_bool: - rospy.loginfo_throttle(3, "Waiting for human to be detected") - pub = rospy.Publisher('/talk_request', Voice, queue_size=10) - texttospeech = Voice() - texttospeech.language = 1 - texttospeech.sentence = "please step in front of me" - if waiting_human: - pub.publish(texttospeech) - waiting_human = True - rospy.sleep(5) - pass + rospy.sleep(0.5) return human_pose - def stop_queryHuman() -> Any: """ Sends a query to RoboKudo to stop human detection diff --git a/src/pycram/process_modules/hsrb_process_modules.py b/src/pycram/process_modules/hsrb_process_modules.py index 0eae1f14a..1566c0bdf 100644 --- a/src/pycram/process_modules/hsrb_process_modules.py +++ b/src/pycram/process_modules/hsrb_process_modules.py @@ -346,30 +346,41 @@ class HSRBDetectingReal(ProcessModule): """ def _execute(self, desig: DetectingMotion.Motion) -> Any: + """ + specifies the query send to robokudo + :param desig.technique: if this is set to human the hsr searches for human and publishes the pose + to /human_pose. returns PoseStamped of Human. + this value can also be set to 'attributes' or 'location' to get the attributes and pose of a human or a bool + if a seat specified in the sematic map is taken + """ + # todo at the moment perception ignores searching for a specific object type so we do as well on real if desig.technique == 'human' and (desig.state == 'start' or desig.state == None): human_pose = queryHuman() - pose = Pose.from_pose_stamped(human_pose) - pose.position.z = 0 - human = [Object("human", ObjectType.HUMAN, "human_male.stl", pose=pose)] - object_dict = {} - - # Iterate over the list of objects and store each one in the dictionary - for i, obj in enumerate(human): - object_dict[obj.name] = obj - return object_dict + return human_pose elif desig.state == "stop": stop_queryHuman() return "stopped" + elif desig.technique == 'location': - # TODO: test what and how Perception returns Query msg and make it fit rest of code seat = desig.state seat_human_pose = seat_queryHuman(seat) return seat_human_pose + elif desig.technique == 'attributes': human_pose_attr = attributes_queryHuman() - return human_pose_attr + + # extract information from query + gender = human_pose_attr.res[0].attribute[0][13:19] + if gender[0] != 'f': + gender = gender[:4] + clothes = human_pose_attr.res[0].attribute[2][20:] + brightness_clothes = human_pose_attr.res[0].attribute[1][5:] + hat = human_pose_attr.res[0].attribute[3][20:] + attr_list = [gender, hat, clothes, brightness_clothes] + + return attr_list query_result = queryEmpty(ObjectDesignatorDescription(types=[desig.object_type])) perceived_objects = []