Skip to content

Commit

Permalink
tested try_pick_up and added comments
Browse files Browse the repository at this point in the history
  • Loading branch information
Celina1272001 committed Mar 7, 2024
1 parent 1a58151 commit 2f4d869
Show file tree
Hide file tree
Showing 6 changed files with 43 additions and 24 deletions.
4 changes: 2 additions & 2 deletions demos/pycram_hsrb_real_test_demos/pick-up-real.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
CUTLERY = ["Spoon", "Fork", "Knife", "Plasticknife"]

# Wished objects for the Demo
#wished_sorted_obj_list = ["Metalplate", "Bowl", "Metalmug", "Fork", "Cerealbox"]
wished_sorted_obj_list = ["Milkpack"]
wished_sorted_obj_list = ["Metalplate", "Bowl", "Metalmug", "Fork", "Cerealbox"]


# length of wished list for failure handling
LEN_WISHED_SORTED_OBJ_LIST = len(wished_sorted_obj_list)
Expand Down
23 changes: 12 additions & 11 deletions demos/pycram_hsrb_real_test_demos/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,27 +78,28 @@ def try_pick_up(robot: BulletWorld.robot, obj: ObjectDesignatorDescription, gras
PickUpAction(obj, ["left"], [grasps]).resolve().perform()
except (EnvironmentUnreachable, GripperClosedCompletely):
print("try pick up again")
TalkingMotion("Try pick up again")
TalkingMotion("Try pick up again").resolve().perform()
# after failed attempt to pick up the object, the robot moves 30cm back on x pose
# TODO: x-pose und orentation sollten allgemein sein
NavigateAction(
[Pose([robot.get_pose().position.x + 0.3, robot.get_pose().position.y,
robot.get_pose().position.z])]).resolve().perform()
robot.get_pose().position.z], [0, 0, 1, 0])]).resolve().perform()
ParkArmsAction([Arms.LEFT]).resolve().perform()
# try to detect the object again
object_desig = DetectAction(technique='default').resolve().perform()
# TODO nur wenn key (name des vorherigen objektes) in object_desig enthalten ist
# TODO umschreiben, geht so nicht mehr, da das dict in einem tupel ist
new_object = object_desig[1][obj.type]
print(new_object)
# when the robot just grasped next to the object
# TODO wieso unterscheiden wir hier überhaupt, wenn er daneben gegriffen hat, hat er das objekt
# TODO wahrscheinlich verschoben und sollte auch nochmal perceiven
new_object = sort_objects(robot, object_desig, [obj.type])[0]

# second try to pick up the object
try:
TalkingMotion("try again").resolve().perform()
PickUpAction(new_object, ["left"], [grasps]).resolve().perform()
# ask for human interaction if it fails a second time
except:
TalkingMotion(f"Can you pleas give me the {obj.type} on the table?")
except (EnvironmentUnreachable, GripperClosedCompletely):
NavigateAction(
[Pose([robot.get_pose().position.x + 0.3, robot.get_pose().position.y,
robot.get_pose().position.z], [0, 0, 1, 0])]).resolve().perform()
ParkArmsAction([Arms.LEFT]).resolve().perform()
TalkingMotion(f"Can you pleas give me the {obj.type} on the table?").resolve().perform()
MoveGripperMotion("open", "left").resolve().perform()
time.sleep(4)
MoveGripperMotion("close", "left").resolve().perform()
15 changes: 6 additions & 9 deletions demos/pycram_receptionist_demo/M2_Demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
RobotStateUpdater("/tf", "/giskard_joint_states")

robot_orientation_couch = axis_angle_to_quaternion([0, 0, 1], 180)
pose_couch = Pose([3, 5, 0])
pose_couch = Pose([3, 5, 0], [0, 0, 1, 0])

robot_orientation_from_couch = axis_angle_to_quaternion([0, 0, 1], -90)
pose_from_couch = Pose([4.2, 3.8, 0], robot_orientation_from_couch)
Expand All @@ -53,7 +53,6 @@ def demo_ms2(area):
guest1 = HumanDescription("guest1")

# look for human
# TODO: only continue when human stands for longer than 3s in front of robot
DetectAction(technique='human', state='start').resolve().perform()
rospy.loginfo("human detected")

Expand All @@ -68,18 +67,17 @@ def demo_ms2(area):
# wait for human to say something
rospy.sleep(15)

# guest_data format is = ['person infos: "name', 'drink"']
# guest_data format is = [ "name", "drink"]
guest_data = get_guest_info("1.0")
print(get_guest_info("1.0"))
while guest_data == ['person_infos: "No name saved under this ID!"']:
while guest_data == [' "No name saved under this ID!"']:
talk_error("no name")
rospy.sleep(13)
guest_data = get_guest_info("1.0")

# set heard name and drink of guest
guest1.set_name(guest_data[0][13:])
guest1.set_name(guest_data[0])
guest1.set_drink(guest_data[1])
rospy.loginfo("after while")
talk_request(guest_data)
rospy.sleep(1)

Expand All @@ -96,7 +94,7 @@ def demo_ms2(area):

if area == 'to_couch':
# wait for human to step out of way
rospy.sleep(5)
rospy.sleep(3)
NavigateAction([pose_kitchen_to_couch]).resolve().perform()
NavigateAction([pose_couch]).resolve().perform()
TalkingMotion("Welcome to the living room").resolve().perform()
Expand All @@ -114,5 +112,4 @@ def demo_ms2(area):
introduce(guest1.name, guest1.fav_drink, host.name, host.fav_drink)


# demo_test('from_couch')
demo_ms2('now')
demo_ms2('to_couch')
22 changes: 22 additions & 0 deletions demos/pycram_receptionist_demo/tests/fortoday.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import rospy

from pycram.designators.action_designator import DetectAction, NavigateAction
from pycram.designators.motion_designator import TalkingMotion
from pycram.fluent import Fluent
from demos.pycram_receptionist_demo.utils.misc import *
from pycram.helper import axis_angle_to_quaternion
from pycram.process_module import real_robot
import pycram.external_interfaces.giskard as giskardpy
from pycram.external_interfaces.knowrob import get_guest_info
from pycram.ros.robot_state_updater import RobotStateUpdater
from pycram.ros.viz_marker_publisher import VizMarkerPublisher
from pycram.designators.location_designator import *
from pycram.designators.object_designator import *
from pycram.bullet_world import BulletWorld, Object
from std_msgs.msg import String, Bool
from demos.pycram_receptionist_demo.deprecated import talk_actions
import pycram.external_interfaces.navigate as moveBase

x = get_guest_info("1.0")
print(x[0])
print(x[1])
2 changes: 1 addition & 1 deletion demos/pycram_receptionist_demo/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def talk_request(data: list):
"""

rospy.loginfo("in callback success")
toyas_text = "Hey " + data[0][13:] + " your favorite drink is " + data[
toyas_text = "Hey " + data[0] + " your favorite drink is " + data[
1]
TalkingMotion(toyas_text).resolve().perform()
rospy.sleep(1)
Expand Down
1 change: 0 additions & 1 deletion src/pycram/designators/action_designator.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,6 @@ def perform(self) -> None:
# Execute Bool, because sometimes u only want to visualize the poses to test things
if execute:
MoveTCPMotion(oTmG, self.arm).resolve().perform()
raise EnvironmentUnreachable
# Calculate and apply any special knowledge offsets based on the robot and object type
# Note: This currently includes robot-specific logic that should be generalized
tool_frame = robot_description.get_tool_frame(self.arm)
Expand Down

0 comments on commit 2f4d869

Please sign in to comment.