Skip to content

Commit

Permalink
[Receptionist] tested everything together, fixed attribute detection,…
Browse files Browse the repository at this point in the history
… cleaned up perception human query, human query faster now -j
  • Loading branch information
Celina1272001 committed Apr 17, 2024
1 parent 6f2c059 commit 5c062c6
Show file tree
Hide file tree
Showing 8 changed files with 161 additions and 121 deletions.
48 changes: 19 additions & 29 deletions demos/pycram_receptionist_demo/Ms3_receptionist_demo.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from pycram.designators.action_designator import DetectAction, NavigateAction
from pycram.designators.action_designator import DetectAction, NavigateAction, OpenAction
from demos.pycram_receptionist_demo.utils.new_misc import *
from pycram.process_module import real_robot
import pycram.external_interfaces.giskard as giskardpy
Expand All @@ -19,14 +19,16 @@
kitchen = Object("kitchen", "environment", "kitchen.urdf")
giskardpy.init_giskard_interface()
RobotStateUpdater("/tf", "/giskard_joint_states")
kitchen_desig = ObjectDesignatorDescription(names=["kitchen"])

# variables for communcation with nlp
pub_nlp = rospy.Publisher('/startListener', String, queue_size=10)
response = ""
callback = False
doorbell = True

# Declare variables for humans
host = HumanDescription("Yannis", fav_drink="ice tea")
host = HumanDescription("Bob", fav_drink="coffee")
guest1 = HumanDescription("guest1")
guest2 = HumanDescription("guest2")
seat_number = 2
Expand All @@ -48,22 +50,24 @@ def data_cb(data):

while not doorbell:
# TODO: spin or sleep better?
# TODO: Failure Handling, when no bell is heard for a longer period of time
rospy.spin()

# TODO: find name of door handle
# link in rviz: iai_kitchen:arena:door_handle_inside
# door_handle_desig = ObjectPart(names=["door_handle_inside"], part_of=kitchen_desig.resolve())
# OpenAction(object_designator_description=door_handle_desig, arms=["left"]).resolve().perform()
# NavigateAction([pose_door]).resolve().perform()
# giskardpy.opendoor()

TalkingMotion("Welcome, please come in").resolve().perform()
TalkingMotion("Welcome, please step in").resolve().perform()

# look for human
human_desig = DetectAction(technique='attributes', state='start').resolve().perform()
attr_list = DetectAction(technique='attributes', state='start').resolve().perform()
rospy.loginfo("human detected")

# TODO: check what perception returns exactly
attr_list = human_desig.attribute
guest1.set_attributes(attr_list)
print(attr_list)
guest1.set_attributes(human_desig.attribute)

DetectAction(technique='human').resolve().perform()

# look at guest and introduce
giskardpy.move_head_to_human()
Expand All @@ -80,7 +84,7 @@ def data_cb(data):
if response[0] == "<GUEST>":
# success a name and intent was understood
if response[1] != "<None>":
TalkingMotion("it is so noisy here, please confirm if i got your name right").resolve().perform()
TalkingMotion("please confirm if i got your name right").resolve().perform()
guest1.set_drink(response[2])
rospy.sleep(1)
guest1.set_name(name_confirm(response[1]))
Expand Down Expand Up @@ -132,37 +136,22 @@ def data_cb(data):
NavigateAction([pose_couch]).resolve().perform()
TalkingMotion("Welcome to the living room").resolve().perform()
rospy.sleep(1)
TalkingMotion("please take a seat next to your host").resolve().perform()

# search for free place to sit and host
# TODO: Failure Handling: scan room if no human detected on couch
for i in range(seat_number):
state = "seat" + str(i)
seat_desig = DetectAction(technique='location', state=state).resolve().perform()
print(seat_desig)
# TODO get pose of occupied seat
if not seat_desig.occupied:
guest1.set_pose(seat_desig.pose)
# point to free place
# giskardpy.point_to_seat
TalkingMotion("please sit over there").resolve().perform()

# failure handling if all seats are taken
elif i+1 == seat_number:
guest1.set_pose(seat_desig.pose)
# point to free place
# giskardpy.point_to_seat
TalkingMotion("please sit over there").resolve().perform()


# TODO: is it ok to seat guest bevore introducing??

pose_host = PoseStamped()
pose_host.header.frame_id = 'map'
pose_host.header.frame_id = '/map'
pose_host.pose.position.x = 1.0
pose_host.pose.position.y = 5.9
pose_host.pose.position.z = 0.9

pose_guest = PoseStamped()
pose_guest.header.frame_id = 'map'
pose_guest.header.frame_id = '/map'
pose_guest.pose.position.x = 1.0
pose_guest.pose.position.y = 4.7
pose_guest.pose.position.z = 1.0
Expand All @@ -173,4 +162,5 @@ def data_cb(data):

# introduce humans and look at them
introduce(host, guest1)
describe(guest1)

96 changes: 62 additions & 34 deletions demos/pycram_receptionist_demo/tests/MS3_testable.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import rospy
from geometry_msgs.msg import PointStamped

from pycram.designators.action_designator import DetectAction, NavigateAction
from demos.pycram_receptionist_demo.utils.new_misc import *
Expand Down Expand Up @@ -27,8 +28,9 @@
response = ""
callback = False


# Declare variables for humans
host = HumanDescription("Yannis", fav_drink="ice tea")
host = HumanDescription("Angel", fav_drink="ice tea")
guest1 = HumanDescription("guest1")
guest2 = HumanDescription("guest2")
seat_number = 2
Expand All @@ -41,6 +43,7 @@ def data_cb(data):
response.append("None")
callback = True


def demo_tst():
"""
testing HRI and introduction and navigating
Expand All @@ -50,13 +53,18 @@ def demo_tst():
global response
test_all = True

TalkingMotion("Hello").resolve().perform()
giskardpy.move_head_to_human()

rospy.Subscriber("nlp_out", String, data_cb)
desig = DetectAction(technique='attributes').resolve().perform()
guest1.set_attributes(desig)

DetectAction(technique='human', state='start').resolve().perform()
rospy.loginfo("human detected")

giskardpy.move_head_to_human()
TalkingMotion("Hello, i am Toya and my favorite drink is oil. What about you, talk to me?").resolve().perform()
rospy.sleep(0.9)
TalkingMotion("I am Toya and my favorite drink is oil. What about you, talk to me loud and clear?").resolve().perform()
rospy.sleep(1.2)

# signal to start listening
pub_nlp.publish("start listening")
Expand All @@ -67,7 +75,7 @@ def demo_tst():

if response[0] == "<GUEST>":
if response[1] != "<None>":
TalkingMotion("it is so noisy here, please confirm if i got your name right").resolve().perform()
TalkingMotion("please confirm if i got your name right").resolve().perform()
guest1.set_drink(response[2])
rospy.sleep(1)
guest1.set_name(name_confirm(response[1]))
Expand Down Expand Up @@ -116,10 +124,18 @@ def demo_tst():
TalkingMotion("Welcome to the living room").resolve().perform()
rospy.sleep(1)

# hard coded poses for seat1 as PointStamped
pose_seat = PointStamped()
pose_seat.header.frame_id = "/map"
pose_seat.point.x = 1.1
pose_seat.point.y = 4.7
pose_seat.point.z = 1

giskardpy.move_arm_to_pose(pose_seat)
TalkingMotion("please take a seat next to your host").resolve().perform()
rospy.sleep(2)

# hard coded poses for seat1 and seat2
# hard coded poses for seat1 and seat2 as PoseStamped
pose_host = PoseStamped()
pose_host.header.frame_id = "/map"
pose_host.pose.position.x = 1
Expand Down Expand Up @@ -147,45 +163,57 @@ def demo_tst2():
just testing the gazing between humans -> introduce function
"""
with real_robot:
jule = False

TalkingMotion("Welcome, please come in").resolve().perform()

# look for human
# TODO: test new technique
#DetectAction(technique='human', state='start').resolve().perform()
rospy.loginfo("human detected")
#giskardpy.move_head_to_human()
#rospy.sleep(7)
#DetectAction(technique='human', state='stop').resolve().perform()
pose_seat = PointStamped()
pose_seat.header.frame_id = "/map"
pose_seat.point.x = 1.1
pose_seat.point.y = 4.7
pose_seat.point.z = 1

pose_host = PoseStamped()
pose_host.header.frame_id = 'map'
pose_host.pose.position.x = 1.0
pose_host.pose.position.y = 5.9
pose_host.pose.position.z = 0.9
giskardpy.move_arm_to_pose(pose_seat)
TalkingMotion("please take a seat next to your host").resolve().perform()
rospy.sleep(2)

pose_guest = PoseStamped()
pose_guest.header.frame_id = 'map'
pose_guest.pose.position.x = 1.0
pose_guest.pose.position.y = 4.7
pose_guest.pose.position.z = 1.0
if jule:
# look for human
# TODO: test new technique
#DetectAction(technique='human', state='start').resolve().perform()
rospy.loginfo("human detected")
#giskardpy.move_head_to_human()
#rospy.sleep(7)
#DetectAction(technique='human', state='stop').resolve().perform()

host.set_pose(pose_host)
pose_host = PoseStamped()
pose_host.header.frame_id = 'map'
pose_host.pose.position.x = 1.0
pose_host.pose.position.y = 5.9
pose_host.pose.position.z = 0.9

guest1.set_pose(pose_guest)
pose_guest = PoseStamped()
pose_guest.header.frame_id = 'map'
pose_guest.pose.position.x = 1.0
pose_guest.pose.position.y = 4.7
pose_guest.pose.position.z = 1.0

# introduce humans and look at them
giskardpy.move_head_to_human()
rospy.sleep(3)
introduce(host, guest1)
host.set_pose(pose_host)

rospy.sleep(2)
TalkingMotion("Introducing again").resolve().perform()
rospy.sleep(2)
introduce(host, guest1)
guest1.set_pose(pose_guest)

# introduce humans and look at them
giskardpy.move_head_to_human()
rospy.sleep(3)
introduce(host, guest1)

rospy.sleep(2)
TalkingMotion("Introducing again").resolve().perform()
rospy.sleep(2)
introduce(host, guest1)


demo_tst()

demo_tst()
# demo_tst2()

63 changes: 34 additions & 29 deletions demos/pycram_receptionist_demo/tests/perception_humans.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,13 @@
import rospy

from pycram.designators.action_designator import DetectAction, NavigateAction
from pycram.designators.motion_designator import TalkingMotion
from pycram.fluent import Fluent
from pycram.helper import axis_angle_to_quaternion
from pycram.process_module import semi_real_robot, real_robot
import pycram.external_interfaces.giskard as giskardpy
from pycram.ros.robot_state_updater import RobotStateUpdater
from pycram.ros.viz_marker_publisher import VizMarkerPublisher
from pycram.designators.location_designator import *
from pycram.designators.object_designator import *
from pycram.bullet_world import BulletWorld, Object
from std_msgs.msg import String
from demos.pycram_receptionist_demo.deprecated import talk_actions
import pycram.external_interfaces.navigate as moveBase
from demos.pycram_receptionist_demo.utils.new_misc import *

world = BulletWorld("DIRECT")
# /pycram/viz_marker topic bei Marker Array
Expand All @@ -26,42 +20,53 @@
# careful that u spawn the correct kitchen
kitchen = Object("kitchen", "environment", "kitchen.urdf")
giskardpy.init_giskard_interface()
guest1 = HumanDescription("guest1")



def p():
with real_robot:
seat = True
attributes = False
seat = False
attributes = True

if attributes:
# to signal the start of demo
# TalkingMotion("Hello, i am ready for the test").resolve().perform()

# does the code work with the manipulation feature?
giskardpy.move_head_to_human()
#


# new Query to detect attributes of a human
TalkingMotion("detecting attributes now").resolve().perform()
giskardpy.move_head_to_human()


desig = DetectAction(technique='attributes').resolve().perform()
rospy.loginfo("Attributes: " + str(desig))
print("#####################")
print(desig[1].res[0].attribute)
gender = desig[1].res[0].attribute[0][13:19]
clothes = desig[1].res[0].attribute[2][20:]
brightness_clothes = desig[1].res[0].attribute[1]
hat = desig[1].res[0].attribute[3][20:]

print("#####################")
print(gender)
print("#####################")
print(clothes)
print("#####################")
print(brightness_clothes)
print("#####################")
print(hat)

# rospy.sleep(5)
print("msgs from PPP: " + str(desig[1]))
guest1.set_attributes(desig)



DetectAction(technique='human').resolve().perform()

describe(guest1)



rospy.sleep(3)

giskardpy.stop_looking()
DetectAction(technique='human', state='stop').resolve().perform()

rospy.sleep(3)

TalkingMotion("looking again").resolve().perform()
DetectAction(technique='human').resolve().perform()
giskardpy.move_head_to_human()

rospy.sleep(6)

TalkingMotion("demo over").resolve().perform()

if seat:
# new Query for free seat
Expand Down
Loading

0 comments on commit 5c062c6

Please sign in to comment.