Skip to content

Commit

Permalink
Small optimization in MinEffort. Tested and working
Browse files Browse the repository at this point in the history
  • Loading branch information
EnricoMingo committed Feb 13, 2024
1 parent 9e80f03 commit 5e986eb
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 13 deletions.
2 changes: 2 additions & 0 deletions include/OpenSoT/tasks/velocity/MinimumEffort.h
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@

ComputeGTauGradient _gTauGradientWorker;
double _step;
Eigen::VectorXd _gradient;
Eigen::VectorXd _deltas;

public:

Expand Down
26 changes: 13 additions & 13 deletions src/tasks/velocity/MinimumEffort.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ MinimumEffort::MinimumEffort(const XBot::ModelInterface& robot_model, const doub
_A.resize(_x_size, _x_size);
_A.setIdentity(_x_size, _x_size);

_gradient.resize(_model.getNv());
_deltas.resize(_model.getNv());

/* first update. Setting desired pose equal to the actual pose */
this->_update(Eigen::VectorXd(0));
}
Expand All @@ -50,28 +53,25 @@ void MinimumEffort::_update(const Eigen::VectorXd &x) {

/************************* COMPUTING TASK *****************************/


Eigen::VectorXd gradient(_model.getNv());
gradient.setZero();
Eigen::VectorXd deltas(_model.getNv());
deltas.setZero();
_gradient.setZero();
_deltas.setZero();


for(unsigned int i = 0; i < gradient.size(); ++i)
for(unsigned int i = 0; i < _gradient.size(); ++i)
{
if(this->getActiveJointsMask()[i])
{
deltas[i] = _step;
double fun_a = _gTauGradientWorker.compute(_model.sum(_q, deltas));
double fun_b = _gTauGradientWorker.compute(_model.sum(_q, -deltas));
_deltas[i] = _step;
double fun_a = _gTauGradientWorker.compute(_model.sum(_q, _deltas));
double fun_b = _gTauGradientWorker.compute(_model.sum(_q, -_deltas));

gradient[i] = (fun_a - fun_b)/(2.0*_step);
deltas[i] = 0.0;
_gradient[i] = (fun_a - fun_b)/(2.0*_step);
_deltas[i] = 0.0;
} else
gradient[i] = 0.0;
_gradient[i] = 0.0;
}

_b = -1.0 * _lambda * gradient;
_b = -1.0 * _lambda * _gradient;

/**********************************************************************/
}
Expand Down

0 comments on commit 5e986eb

Please sign in to comment.