Skip to content

Commit

Permalink
Merge branch 'MS2' of github.com:SUTURO/pycram into MS2
Browse files Browse the repository at this point in the history
  • Loading branch information
Celina1272001 committed Feb 1, 2024
2 parents c52c714 + 594799b commit f7ebbc2
Show file tree
Hide file tree
Showing 40 changed files with 440 additions and 1,005 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/reusable-sphinx-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: Build Sphinx documentation 📖
on:
push:
branches:
- dev
- MS2

# ----------------------------------------------------------------------------------------------------------------------

Expand Down Expand Up @@ -40,4 +40,4 @@ jobs:
working-directory: ./doc
run: |
sudo apt install pandoc
make html
make html
24 changes: 0 additions & 24 deletions demos/hsrb-real-test-demos/gripper-real.py

This file was deleted.

49 changes: 0 additions & 49 deletions demos/hsrb-real-test-demos/rigid-body-test.py

This file was deleted.

88 changes: 0 additions & 88 deletions demos/hsrb_test_demos/pick-up-semi-real.py

This file was deleted.

File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,20 +1,15 @@
import rospy
from geometry_msgs.msg import PoseStamped
from robokudo_msgs.msg import QueryActionGoal
from pycram.designators.action_designator import DetectAction, LookAtAction, NavigateAction
from pycram.designators.motion_designator import TalkingMotion, MoveMotion
from pycram.external_interfaces import robokudo
from pycram.process_module import simulated_robot, with_simulated_robot, real_robot, with_real_robot, semi_real_robot
from pycram.designators.action_designator import DetectAction, NavigateAction
from pycram.designators.motion_designator import TalkingMotion
from pycram.process_module import real_robot
import pycram.external_interfaces.giskard as giskardpy
from pycram.ros.robot_state_updater import RobotStateUpdater
from pycram.ros.viz_marker_publisher import VizMarkerPublisher
from pycram.designators.location_designator import *
from pycram.designators.object_designator import *
from pycram.enums import ObjectType
from pycram.bullet_world import BulletWorld, Object
from pycram.external_interfaces.knowrob import instances_of, get_guest_info
from std_msgs.msg import String, Bool
import talk_actions
from demos.pycram_receptionist_demo.deprecated import talk_actions

world = BulletWorld("DIRECT")
# /pycram/viz_marker topic bei Marker Array
Expand Down
131 changes: 131 additions & 0 deletions demos/pycram_receptionist_demo/MS2_example-v.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
import rospy

from pycram.designators.action_designator import DetectAction, NavigateAction
from pycram.designators.motion_designator import TalkingMotion
from pycram.fluent import Fluent
from pycram.helper import axis_angle_to_quaternion
from pycram.process_module import real_robot
import pycram.external_interfaces.giskard as giskardpy
from pycram.ros.robot_state_updater import RobotStateUpdater
from pycram.ros.viz_marker_publisher import VizMarkerPublisher
from pycram.designators.location_designator import *
from pycram.designators.object_designator import *
from pycram.bullet_world import BulletWorld, Object
from std_msgs.msg import String
from demos.pycram_receptionist_demo.deprecated import talk_actions
import pycram.external_interfaces.navigate as moveBase

world = BulletWorld("DIRECT")
# /pycram/viz_marker topic bei Marker Array
v = VizMarkerPublisher()

robot = Object("hsrb", "robot", "../../resources/" + robot_description.name + ".urdf")
robot_desig = ObjectDesignatorDescription(names=["hsrb"]).resolve()
robot.set_color([0.5, 0.5, 0.9, 1])

# carefull that u spawn the correct kitchen
kitchen = Object("kitchen", "environment", "kitchen.urdf")
giskardpy.init_giskard_interface()
RobotStateUpdater("/tf", "/giskard_joint_states")

robot_orientation_couch = axis_angle_to_quaternion([0, 0, 1], 0)
pose_couch = Pose([3, 5, 0], robot_orientation_couch)

robot_orientation_from_couch = axis_angle_to_quaternion([0, 0, 1], -90)
pose_from_couch = Pose([4.2, 3.8, 0], robot_orientation_from_couch)

robot_orientation = axis_angle_to_quaternion([0, 0, 1], 90)
pose_kitchen_to_couch = Pose([4.2, 3, 0], robot_orientation)

pose_home = Pose([3, 1.7, 0], robot_orientation)

pub_nlp = rospy.Publisher('/startListener', String, queue_size=10)


class HumanDescription:

def __init__(self, name, fav_drink):
# TODO: coordinate with Perception on what is easy to implement
# characteristics to consider: height, hair color, and age.
self.human_pose = Fluent()
self.name = name
self.fav_drink = fav_drink # self.shirt_color = shirt_color # self.gender = gender

self.human_pose_sub = rospy.Subscriber("/human_pose", String, self.human_pose_cb)

def human_pose_cb(self, HumanPoseMsg):
self.human_pose.set_value(HumanPoseMsg.data)



def demo_test(area):
with real_robot:
# Guest arrives:
TalkingMotion("Hello, i am Toya let me open my eyes for you").resolve().perform()
# Perception, detect first guest -> First detect guest, then listen
DetectAction(technique='default', state='start').resolve().perform()

host = HumanDescription("Bob", fav_drink="Coffee")

# While loop, human is detected
while host.human_pose:
TalkingMotion("Please step in front of me").resolve.perform()
rospy.sleep(5)

rospy.loginfo("human detected")

# look at guest and introduction
giskardpy.move_head_to_human()
TalkingMotion("Hello, i am Toya and my favorite drink is oil. What about you, talk to me?").resolve().perform()

# reicht sleep 1?
rospy.sleep(1)

# signal to start listening
pub_nlp.publish("start listening")
# TODO: How to erst weiter machen, wenn Knowledge Daten geschickt hat

TalkingMotion("Hey i will stop looking now").resolve().perform()

rospy.loginfo("stop looking now")
giskardpy.stop_looking()
rospy.loginfo("Navigating now")
TalkingMotion("navigating to couch area now, pls step away").resolve().perform()

if area == 'to_couch':
NavigateAction([pose_kitchen_to_couch]).resolve().perform()
NavigateAction([pose_couch]).resolve().perform()
elif area == 'from_couch':
NavigateAction([pose_from_couch]).resolve().perform()
NavigateAction([pose_home]).resolve().perform()

# failure handling
# rospy.Subscriber("nlp_feedback", Bool, talk_error)

# receives name and drink via topic
# rospy.Subscriber("nlp_out", String, talk_request)


def nav_test():
with real_robot:
robot_orientation = axis_angle_to_quaternion([0, 0, 1], 90)
test_pose1 = Pose([4.2, 3, 0], robot_orientation)
test_pose = Pose([3, 5, 0], [0, 0, 0, 1])
moveBase.queryPoseNav(test_pose1)
moveBase.queryPoseNav(test_pose)

# demo_test('from_couch')
# demo_test('to_couch')

# receives name and drink via topic
# rospy.Subscriber("nlp_out", String, talk_request)


# 1. rasa run --enable-api -> start Rasa Server
# 2. python3 activate_language_processing.py -> NLP
# 3. roslaunch suturo_bringup suturo_bringup.launch -> Map
# 4. roslaunch_hsr_velocity_controller unloas_my_controller.launch
# 5. roslaunch giskardpy giskardpy_hsr_real_vel.launch -> Giskard
# starten
# 6. rosrun robokudo main.py _ae=humandetection_demo_ros_pkg=milestone1 -> Perception
# 7. run demo in Pycharm -> Planning
Loading

0 comments on commit f7ebbc2

Please sign in to comment.