Skip to content

Commit

Permalink
[Receptionist] describing a human demo for event tomorrow/demo
Browse files Browse the repository at this point in the history
  • Loading branch information
Celina1272001 committed Apr 8, 2024
1 parent 43c3f88 commit 6f2c059
Show file tree
Hide file tree
Showing 3 changed files with 218 additions and 30 deletions.
169 changes: 169 additions & 0 deletions demos/pycram_receptionist_demo/Tab_Demo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
from pycram.designators.action_designator import DetectAction, NavigateAction
from demos.pycram_receptionist_demo.utils.new_misc import *
from pycram.process_module import real_robot
import pycram.external_interfaces.giskard as giskardpy
from pycram.ros.robot_state_updater import RobotStateUpdater
from pycram.ros.viz_marker_publisher import VizMarkerPublisher
from pycram.designators.location_designator import *
from pycram.designators.object_designator import *
from pycram.bullet_world import BulletWorld, Object
from std_msgs.msg import String, Bool

world = BulletWorld("DIRECT")
v = VizMarkerPublisher()

robot = Object("hsrb", "robot", "../../resources/" + robot_description.name + ".urdf")
robot_desig = ObjectDesignatorDescription(names=["hsrb"]).resolve()
robot.set_color([0.5, 0.5, 0.9, 1])

kitchen = Object("kitchen", "environment", "kitchen.urdf")
giskardpy.init_giskard_interface()
RobotStateUpdater("/tf", "/giskard_joint_states")

# variables for communcation with nlp
pub_nlp = rospy.Publisher('/startListener', String, queue_size=10)
response = ""
callback = False

# Declare variables for humans
host = HumanDescription("Yannis", fav_drink="ice tea")
guest1 = HumanDescription("guest1")
guest2 = HumanDescription("guest2")
seat_number = 2


def data_cb(data):
global response
global callback

response = data.data.split(",")
response.append("None")
callback = True


def describe_human():
"""
testing HRI and introduction and navigating
"""
with real_robot:

giskardpy.move_head_to_human()
while True:
desig = DetectAction(technique='attributes').resolve().perform()

# extract information from query
gender = desig[1].res[0].attribute[0][13:19]
if gender[0] != 'f':
gender = gender[:4]
print("gender: " + str(gender))
clothes = desig[1].res[0].attribute[2][20:]
brightness_clothes = desig[1].res[0].attribute[1][5:]
hat = desig[1].res[0].attribute[3][20:]

TalkingMotion("Hello, i am Toya and i learned to describe humans better this week").resolve().perform()
DetectAction(technique='human', state='start').resolve().perform()

TalkingMotion("let me show you. I will try to describe you").resolve().perform()

# gender, headgear, clothing, brightness of clothes
guest1.set_attributes([gender, hat, clothes, brightness_clothes])
rospy.sleep(2)

describe(guest1)
rospy.sleep(2)

TalkingMotion("i hope that was accurate").resolve().perform()
rospy.sleep(1)

while True:
x = input("start again?(type 'y')")
if x == 'y':
break




def lead_to_room():
"""
short 'demo' where Toya leads someone to the living room
"""
with real_robot:
TalkingMotion("i can show you the living room, if you like").resolve().perform()
rospy.sleep(2)
TalkingMotion("Just follow me").resolve().perform()
rospy.sleep(1)

# lead human to living room
NavigateAction([pose_kitchen_to_couch]).resolve().perform()
NavigateAction([pose_couch]).resolve().perform()

rospy.sleep(1)
TalkingMotion("Welcome to the living room").resolve().perform()


def hri():
"""
testing HRI and introduction and navigating
"""

with real_robot:

global callback
global response
test_all = True

rospy.Subscriber("nlp_out", String, data_cb)
DetectAction(technique='human', state='start').resolve().perform()
rospy.loginfo("human detected")

giskardpy.move_head_to_human()
TalkingMotion("Hello, i am Toya and my favorite drink is oil. What about you, talk to me?").resolve().perform()
rospy.sleep(0.9)

# signal to start listening
pub_nlp.publish("start listening")

while not callback:
rospy.sleep(1)
callback = False

if response[0] == "<GUEST>":
if response[1] != "<None>":
TalkingMotion("it is so noisy here, please confirm if i got your name right").resolve().perform()
guest1.set_drink(response[2])
rospy.sleep(1)
guest1.set_name(name_confirm(response[1]))

else:
# save heard drink
guest1.set_drink(response[2])

# ask for name again once
guest1.set_name(name_repeat())

# confirm favorite drink
guest1.set_drink(drink_confirm(guest1.fav_drink))

else:
# two chances to get name and drink
i = 0
while i < 2:
TalkingMotion("please repeat your name and drink loud and clear").resolve().perform()
pub_nlp.publish("start")

while not callback:
rospy.sleep(1)
callback = False

if response[0] == "<GUEST>":
guest1.set_name(response[1])
guest1.set_drink(response[2])
break
else:
i += 1


if __name__ == '__main__':
describe_human()
# lead_to_room()
# hri()
67 changes: 43 additions & 24 deletions demos/pycram_receptionist_demo/tests/perception_humans.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,30 +31,49 @@

def p():
with real_robot:

# to signal the start of demo
TalkingMotion("Hello, i am ready for the test").resolve().perform()

# does the code work with the manipulation feature?
giskardpy.move_head_to_human()

# new Query to detect attributes of a human
# TalkingMotion("detecting attributes now").resolve().perform()
# attributes = DetectAction(technique='attributes').resolve().perform()
# rospy.loginfo("Attributes: " + str(attributes))

# rospy.sleep(5)

# new Query for free seat
TalkingMotion("detecting free seat now").resolve().perform()
seat = DetectAction(technique='location', state='seat2').resolve().perform()
rospy.loginfo("seat bool: " + str(seat))
rospy.sleep(1)

TalkingMotion("detecting free seat number 2 now").resolve().perform()
seat = DetectAction(technique='location', state='seat1').resolve().perform()
rospy.loginfo("seat bool: " + str(seat))
rospy.sleep(3)
seat = True
attributes = False

if attributes:
# to signal the start of demo
# TalkingMotion("Hello, i am ready for the test").resolve().perform()

# does the code work with the manipulation feature?
giskardpy.move_head_to_human()

# new Query to detect attributes of a human
TalkingMotion("detecting attributes now").resolve().perform()
desig = DetectAction(technique='attributes').resolve().perform()
rospy.loginfo("Attributes: " + str(desig))
print("#####################")
print(desig[1].res[0].attribute)
gender = desig[1].res[0].attribute[0][13:19]
clothes = desig[1].res[0].attribute[2][20:]
brightness_clothes = desig[1].res[0].attribute[1]
hat = desig[1].res[0].attribute[3][20:]

print("#####################")
print(gender)
print("#####################")
print(clothes)
print("#####################")
print(brightness_clothes)
print("#####################")
print(hat)

# rospy.sleep(5)

if seat:
# new Query for free seat
TalkingMotion("detecting free seat now").resolve().perform()
seat = DetectAction(technique='location', state='seat2').resolve().perform()
rospy.loginfo("seat bool: " + str(seat))
rospy.sleep(1)

TalkingMotion("detecting free seat number 2 now").resolve().perform()
seat = DetectAction(technique='location', state='seat1').resolve().perform()
rospy.loginfo("seat bool: " + str(seat))
rospy.sleep(3)


print("end")
Expand Down
12 changes: 6 additions & 6 deletions demos/pycram_receptionist_demo/utils/new_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,28 +161,28 @@ def introduce(human1: HumanDescription, human2: HumanDescription):

rospy.sleep(3)

def describe(human: HumanDescription):
def describe(human: HumanDescription):
"""
HRI-function for describing a human more detailed.
the following will be stated: gender, headgear, clothing, brightness of clothes
:param human: human to be described
"""

TalkingMotion(f"I will describe {human.name} further now").resolve().perform()
rospy.sleep(1)
# TalkingMotion(f"I will describe {human.name} further now").resolve().perform()
# rospy.sleep(1)

# gender
TalkingMotion(f"i think your gender is {human.attributes[0]}").resolve().perform()
rospy.sleep(1)

# headgear or not
TalkingMotion(f"you are wearing {human.attributes[1]}").resolve().perform()
TalkingMotion(f"you are {human.attributes[1]}").resolve().perform()
rospy.sleep(1)

# kind of clothes
TalkingMotion(f"you are wearing {human.attributes[2]}").resolve().perform()
TalkingMotion(f"you are {human.attributes[2]}").resolve().perform()
rospy.sleep(1)

# brightness of clothes
TalkingMotion(f"your clothes are {human.attributes[3]}").resolve().perform()
TalkingMotion(f"you are wearing {human.attributes[3]}").resolve().perform()
rospy.sleep(1)

0 comments on commit 6f2c059

Please sign in to comment.