Skip to content

Commit

Permalink
Merge pull request #190 from IOHprofiler/minor-patches
Browse files Browse the repository at this point in the history
Minor patches
  • Loading branch information
jacobdenobel authored Nov 4, 2023
2 parents 6ce9a3d + 9c4d0d5 commit 2250f72
Show file tree
Hide file tree
Showing 10 changed files with 174 additions and 56 deletions.
11 changes: 9 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ project(ioh
)

add_compile_definitions(PROJECT_VER="${CMAKE_PROJECT_VERSION}")
add_compile_definitions(HAS_JSON)
add_compile_definitions(HAS_JSON)
set(CMAKE_CXX_STANDARD 17)
set(EXTERNAL_DIR "${PROJECT_SOURCE_DIR}/external")
set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH})
Expand All @@ -33,7 +33,14 @@ if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
link_libraries(stdc++fs)
add_compile_definitions(FSEXPERIMENTAL)
endif()
endif()
endif()

# find_package(OpenMP)
# if (OPENMP_FOUND)
# set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}")
# set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}")
# set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${OpenMP_EXE_LINKER_FLAGS}")
# endif()



Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.3.12
0.3.13
3 changes: 2 additions & 1 deletion example/tutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -612,7 +612,8 @@
" iids = [1, 10], # the instances \n",
" dims = [2, 10], # the dimensions\n",
" reps = 3, # the number of runs,\n",
" zip_output = True \n",
" zip_output = True,\n",
" old_logger = True \n",
")"
]
},
Expand Down
31 changes: 31 additions & 0 deletions include/ioh/logger/triggers.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,37 @@ namespace ioh
*/
inline OnImprovement on_improvement; // Uncomment if one want a library.


struct OnDeltaImprovement: logger::Trigger {
double delta;
double best_so_far;

OnDeltaImprovement(const double delta = 1e-10): delta(delta) {
reset();
}

OnDeltaImprovement(const double delta, const double best_so_far): delta(delta), best_so_far(best_so_far) {
}

/** @returns true if a log event is to be triggered given the passed state. */
virtual bool operator()(const logger::Info &log_info, const problem::MetaData &pb_info){
if (best_so_far == std::numeric_limits<double>::signaling_NaN()){
best_so_far = log_info.y;
return true;
}

if (pb_info.optimization_type(best_so_far, log_info.y) && std::abs(best_so_far - log_info.y) > delta) {
best_so_far = log_info.y;
return true;
}
return false;
};

virtual void reset() {
best_so_far = std::numeric_limits<double>::signaling_NaN();
}
};

//! Trigger when there is constraint violation
struct OnViolation: logger::Trigger {
//! Track the number of violations
Expand Down
81 changes: 65 additions & 16 deletions include/ioh/problem/single.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,49 +47,98 @@ namespace ioh::problem
{
}

//! Main call interface
virtual double operator()(const std::vector<T> &x) override
void evaluate_for_state(const std::vector<T> &x, State<T, SingleObjective> &state)
{
if (!this->check_input(x))
return std::numeric_limits<double>::signaling_NaN();

this->state_.current.x = x;
state.current.x = x;
if (this->constraintset_.hard_violation(x))
{
this->state_.current_internal.x = x;
this->state_.current_internal.y =
state.current_internal.x = x;
state.current_internal.y =
this->constraintset_.penalize(this->meta_data_.optimization_type.initial_value());
this->state_.y_unconstrained = this->state_.current_internal.y;
this->state_.current.y = this->state_.current_internal.y;
state.y_unconstrained = state.current_internal.y;
state.current.y = state.current_internal.y;
}
else
{
this->state_.current_internal.x = this->transform_variables(x);
this->state_.current_internal.y = this->evaluate(this->state_.current_internal.x);
this->state_.y_unconstrained = this->transform_objectives(this->state_.current_internal.y);
this->state_.current.y = this->constraintset_.penalize(this->state_.y_unconstrained);
state.current_internal.x = this->transform_variables(x);
state.current_internal.y = this->evaluate(state.current_internal.x);
state.y_unconstrained = this->transform_objectives(state.current_internal.y);
state.current.y = this->constraintset_.penalize(state.y_unconstrained);
}
}

void update_state_and_log() {
this->state_.update(this->meta_data_, this->optimum_);

if (this->logger_ != nullptr)
{
this->log_info_.update(this->state_, this->constraintset_);
this->logger_->log(this->log_info());
}
}


//! Main call interface
virtual double operator()(const std::vector<T> &x) override
{
if (!this->check_input(x))
return std::numeric_limits<double>::signaling_NaN();

evaluate_for_state(x, this->state_);
update_state_and_log();

return this->state_.current.y;
}


#if defined(_OPENMP)
virtual std::vector<double> operator()(const std::vector<std::vector<T>> &X) override
{

const size_t n = X.size();
std::vector<int> checked(n, 0);
std::vector<State<T, SingleObjective>> states(n, this->state_);

//------------------------------------ //
//-----------[threaded code]---------- //
//------------------------------------ //

#pragma omp parallel for
for (size_t i = 0; i < n; i++)
{
if (this->check_input(X[i])) {
evaluate_for_state(X[i], states[i]);
checked[i] = 1;
}
}
//------------------------------------ //

std::vector<double> y(n);
for (size_t i = 0; i < n; i++)
{
if(checked[i]) {
this->state_.current.x = states[i].current.x;
this->state_.current_internal.x = states[i].current_internal.x;
this->state_.current_internal.y = states[i].current_internal.y;
this->state_.y_unconstrained = states[i].y_unconstrained;
this->state_.current.y = states[i].current.y;
update_state_and_log();
y[i] = states[i].current.y;
} else{
y[i] = std::numeric_limits<double>::signaling_NaN();
}
}
return y;
}
#else
virtual std::vector<double> operator()(const std::vector<std::vector<T>> &X) override
{
std::vector<double> y(X.size());
for (size_t i = 0; i < y.size(); i++)
{
y[i] = (*this)(X[i]);
}
return y;
}
#endif
};


Expand Down
2 changes: 1 addition & 1 deletion ioh/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def __init__(
zip_output: bool = True,
remove_data: bool = False,
enforce_bounds: bool = False,
old_logger: bool = True
old_logger: bool = False
):
'''
Parameters
Expand Down
78 changes: 44 additions & 34 deletions ioh/src/logger.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,17 @@
namespace py = pybind11;
using namespace ioh;

// Trampoline
// Trampoline
struct AbstractProperty : logger::Property
{
AbstractProperty(const std::string& name): logger::Property(name) {}
AbstractProperty(const std::string &name) : logger::Property(name) {}

std::string call_to_string(const logger::Info &log_info, const std::string &nan = "") const override
{
PYBIND11_OVERRIDE(std::string, logger::Property, call_to_string, log_info, nan);
}

std::optional<double> operator()(const logger::Info & info) const override
std::optional<double> operator()(const logger::Info &info) const override
{
PYBIND11_OVERRIDE_PURE_NAME(std::optional<double>, logger::Property, "__call__", operator(), info);
}
Expand Down Expand Up @@ -52,18 +52,22 @@ class PyProperty : public logger::Property
}
};

// Trampoline
struct AbstractWatcher: logger::Watcher {
// Trampoline
struct AbstractWatcher : logger::Watcher
{

using logger::Watcher::Watcher;

void attach_problem(const problem::MetaData& problem) override {
void attach_problem(const problem::MetaData &problem) override
{
PYBIND11_OVERRIDE(void, logger::Watcher, attach_problem, problem);
}
void attach_suite(const std::string& suite_name) override {
void attach_suite(const std::string &suite_name) override
{
PYBIND11_OVERRIDE_PURE(void, logger::Watcher, attach_suite, suite_name);
}
void call(const logger::Info& log_info) override {
void call(const logger::Info &log_info) override
{
PYBIND11_OVERRIDE_PURE_NAME(void, logger::Watcher, "__call__", call, log_info);
}
};
Expand All @@ -82,7 +86,7 @@ class PyWatcher : public WatcherType

void watch(const py::object &container, const std::string &attribute)
{
auto p = std::make_unique<PyProperty>(container, attribute);
auto p = std::make_unique<PyProperty>(container, attribute);
watch(*p);
property_ptrs_.push_back(std::move(p));
}
Expand All @@ -107,9 +111,8 @@ class PyWatcher : public WatcherType
};



// Python spec. implementation
template<typename A>
template <typename A>
class PyAnalyzer : public PyWatcher<A>
{
std::unordered_map<std::string, std::unique_ptr<double>> double_ptrs_;
Expand All @@ -119,14 +122,9 @@ class PyAnalyzer : public PyWatcher<A>
using AnalyzerType = PyWatcher<A>;
using AnalyzerType::AnalyzerType;

virtual void close() override
{
AnalyzerType::close();
}
virtual void close() override { AnalyzerType::close(); }

virtual ~PyAnalyzer() {
close();
}
virtual ~PyAnalyzer() { close(); }

void add_run_attribute_python(const py::object &container, const std::string &name)
{
Expand All @@ -148,9 +146,7 @@ class PyAnalyzer : public PyWatcher<A>
double_ptrs_[name] = std::move(ptr);
}

void set_run_attribute_python(const std::string &name, double value) {
*(double_ptrs_.at(name)) = value;
}
void set_run_attribute_python(const std::string &name, double value) { *(double_ptrs_.at(name)) = value; }

void set_run_attributes_python(const std::map<std::string, double> &attributes)
{
Expand All @@ -159,14 +155,15 @@ class PyAnalyzer : public PyWatcher<A>
}


virtual void handle_last_eval() override {
for (auto& ptr : prop_ptrs_)
virtual void handle_last_eval() override
{
for (auto &ptr : prop_ptrs_)
set_run_attribute_python(ptr->name(), (*ptr)(logger::Info{}).value());
AnalyzerType::handle_last_eval();
}
};

template<typename A>
template <typename A>
void define_analyzer(py::module &m)
{
using namespace logger;
Expand Down Expand Up @@ -202,17 +199,17 @@ void define_analyzer(py::module &m)
.def("add_experiment_attribute", &PyAnalyzer::add_experiment_attribute)
.def("set_experiment_attributes", &PyAnalyzer::set_experiment_attributes)

.def("add_run_attribute",
py::overload_cast<const std::string &, double>(&PyAnalyzer::add_run_attribute_python))
.def("add_run_attribute", py::overload_cast<const std::string &, double>(&PyAnalyzer::add_run_attribute_python))
.def("add_run_attribute",
py::overload_cast<const py::object &, const std::string &>(&PyAnalyzer::add_run_attribute_python))
.def("add_run_attributes",
py::overload_cast<const py::object &, const std::vector<std::string> &>(
&PyAnalyzer::add_run_attributes_python))

.def("set_run_attributes", &PyAnalyzer::set_run_attributes_python) // takes a map<str, double>
.def("set_run_attribute", &PyAnalyzer::set_run_attribute_python) // takes str, double>
.def_property_readonly("output_directory", [](const PyAnalyzer& self) {return self.output_directory().generic_string();})
.def("set_run_attribute", &PyAnalyzer::set_run_attribute_python) // takes str, double>
.def_property_readonly("output_directory",
[](const PyAnalyzer &self) { return self.output_directory().generic_string(); })
.def("close", &PyAnalyzer::close)
.def("watch", py::overload_cast<Property &>(&PyAnalyzer::watch))
.def("watch", py::overload_cast<const py::object &, const std::string &>(&PyAnalyzer::watch))
Expand Down Expand Up @@ -247,6 +244,18 @@ void define_triggers(py::module &m)
;
t.attr("ON_IMPROVEMENT") = py::cast(trigger::on_improvement);

py::class_<trigger::OnDeltaImprovement, logger::Trigger, std::shared_ptr<trigger::OnDeltaImprovement>>(
t, "OnDeltaImprovement",
"Trigger that evaluates to true when improvement of the objective function is observed of at least greater "
"than delta")
.def(py::init<double>(), py::arg("delta") = 1e-10)
.def(py::pickle([](const trigger::OnDeltaImprovement &t) { return py::make_tuple(t.delta, t.best_so_far); },
[](py::tuple t) {
return trigger::OnDeltaImprovement{t[0].cast<double>(), t[1].cast<double>()};
}));

;

py::class_<trigger::OnViolation, logger::Trigger, std::shared_ptr<trigger::OnViolation>>(
t, "OnViolation", "Trigger that evaluates to true when there is a contraint violation")
.def(py::init<>())
Expand Down Expand Up @@ -307,8 +316,9 @@ void define_triggers(py::module &m)
[](py::tuple t) { return trigger::During{t[0].cast<std::set<std::pair<size_t, size_t>>>()}; }));
}

template<typename P>
void define_property(py::module &m, std::string name, P predef){
template <typename P>
void define_property(py::module &m, std::string name, P predef)
{

py::class_<P, logger::Property, std::shared_ptr<P>>(m, name.c_str(), py::buffer_protocol())
.def(py::init<std::string, std::string>(), py::arg("name"), py::arg("format"),
Expand All @@ -317,7 +327,7 @@ void define_property(py::module &m, std::string name, P predef){
[](py::tuple t) {
return P{t[0].cast<std::string>(), t[1].cast<std::string>()};
}));

std::transform(name.begin(), name.end(), name.begin(), ::toupper);
m.attr(name.c_str()) = py::cast(predef);
}
Expand Down Expand Up @@ -378,8 +388,8 @@ void define_bases(py::module &m)
.def_property_readonly("problem", &Logger::problem, "Reference to the currently attached problem");

using namespace logger;
py::class_<Watcher, AbstractWatcher, Logger, std::shared_ptr<Watcher>>(m, "AbstractLogger",
"Base class for loggers which track properties")
py::class_<Watcher, AbstractWatcher, Logger, std::shared_ptr<Watcher>>(
m, "AbstractLogger", "Base class for loggers which track properties")
.def(py::init<Triggers, Properties>(), py::arg("triggers") = Triggers{}, py::arg("properties") = Properties{})
.def("watch", &Watcher::watch);
}
Expand Down
Loading

0 comments on commit 2250f72

Please sign in to comment.