Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Nov 26, 2024
1 parent 3e58cc1 commit 2817e71
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 17 deletions.
2 changes: 0 additions & 2 deletions agimus_controller_ros/agimus_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,3 @@ def update_state_machine(self):
self.elapsed_time >= 0.1 and self.last_elapsed_time < 0.1
):
self.change_state()


36 changes: 21 additions & 15 deletions agimus_controller_ros/controller_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,13 +182,15 @@ def vision_callback(self, vision_msg: Detection2DArray):
rot = pose.orientation
pose_array = [trans.x, trans.y, trans.z, rot.w, rot.x, rot.y, rot.z]
in_prev_world_M_object = pin.XYZQUATToSE3(pose_array)
if self.state_machine in [HPPStateMachine.WAITING_PICK_TRAJECTORY,
if self.state_machine in [
HPPStateMachine.WAITING_PICK_TRAJECTORY,
HPPStateMachine.RECEIVING_PICK_TRAJECTORY,
HPPStateMachine.WAITING_PLACE_TRAJECTORY,]:
HPPStateMachine.WAITING_PLACE_TRAJECTORY,
]:
self.in_world_M_object = self.in_world_M_prev_world * in_prev_world_M_object
if self.init_in_world_M_object is None:
self.init_in_world_M_object = self.in_world_M_object

def change_state(self):
self.state_machine_timeline_idx = (self.state_machine_timeline_idx + 1) % len(
self.state_machine_timeline
Expand Down Expand Up @@ -284,9 +286,13 @@ def solve(self, x0):
self.effector_frame_id,
x0[: self.rmodel.nq],
).translation
if np.linalg.norm(self.in_world_M_effector.translation - current_pose) < 0.01 and self.state_machine == HPPStateMachine.WAITING_PLACE_TRAJECTORY:
print("changing state for visual servoing")
self.change_state()
if (
np.linalg.norm(self.in_world_M_effector.translation - current_pose)
< 0.01
and self.state_machine == HPPStateMachine.WAITING_PLACE_TRAJECTORY
):
print("changing state for visual servoing")
self.change_state()
else:
self.mpc.ocp.set_last_running_model_placement_weight(0)
self.mpc.mpc_step(x0, new_x_ref, new_a_ref, self.in_world_M_effector)
Expand Down Expand Up @@ -324,9 +330,9 @@ def update_effector_placement_with_vision(self):
self.in_world_M_object.translation
+ self.target_translation_object_to_effector
)
#self.in_world_M_effector.rotation = (
# self.in_world_M_effector.rotation = (
# self.in_world_M_object.rotation * in_object_rot_effector
#)
# )

def pick_traj_last_point_is_in_horizon(self):
return (
Expand Down Expand Up @@ -368,10 +374,10 @@ def pick_traj_last_point_is_near(self, x0):
and np.linalg.norm(self.pick_traj_last_pose - current_pose)
< self.params.start_visual_servoing_dist
)

if self.do_visual_servoing:
self.start_visual_servoing_time = time.time()

else:
self.do_visual_servoing = False
return self.do_visual_servoing
Expand All @@ -381,11 +387,11 @@ def get_sensor_msg(self):
sensor_msg = deepcopy(self.sensor_msg)
return sensor_msg

def set_control_inside_limits(self,u):
control_limits=[86]*4 + [11]*3
for idx,control_limit in enumerate(control_limits):
if np.abs(u[idx]) > control_limit :
u[idx] *= control_limit/np.abs(u[idx])
def set_control_inside_limits(self, u):
control_limits = [86] * 4 + [11] * 3
for idx, control_limit in enumerate(control_limits):
if np.abs(u[idx]) > control_limit:
u[idx] *= control_limit / np.abs(u[idx])
return u

def send(self, sensor_msg, u, k):
Expand Down

0 comments on commit 2817e71

Please sign in to comment.