update irace/fastga to follow recent IOH refactoring

This commit is contained in:
Johann Dreo 2021-07-07 12:59:41 +02:00
commit 8e960b23f3
6 changed files with 423 additions and 434 deletions

View file

@ -1,5 +1,6 @@
# ParadiseO # ParadiseO
###################################################################################### ######################################################################################
### 0) Check the CMake version ### 0) Check the CMake version
###################################################################################### ######################################################################################

View file

@ -45,46 +45,23 @@ set(PARADISEO_LIBRARIES ga eoutils eo)
# IOH # IOH
set(IOH_ROOT "~/code/IOHexperimenter/" CACHE PATH "Where to find IOHexperimenter") set(IOH_ROOT "~/code/IOHexperimenter/" CACHE PATH "Where to find IOHexperimenter")
find_path(IOH_PROBLEM_H "IOHprofiler_problem.h" PATHS ${IOH_ROOT}/src/Template/) find_path(IOH_HPP "ioh.hpp" PATHS ${IOH_ROOT}/include/)
find_library(IOH_LIBRARY "IOH" PATHS ${IOH_ROOT} PATH_SUFFIXES release Release debug Debug build Build) # find_library(IOH_LIBRARY "IOH" PATHS ${IOH_ROOT} PATH_SUFFIXES release Release debug Debug build Build)
if(EXISTS ${IOH_PROBLEM_H} AND EXISTS ${IOH_LIBRARY}) if(EXISTS ${IOH_HPP}) # AND EXISTS ${IOH_LIBRARY})
message(STATUS "Found IOH in ${IOH_ROOT}") message(STATUS "Found IOH in ${IOH_ROOT}")
include_directories(${IOH_ROOT}/build/Cpp/src/) include_directories(${IOH_ROOT}/include/)
link_directories(${IOH_ROOT}/build/Cpp/bin/) include_directories(${IOH_ROOT}/external/fmt/include/)
include_directories(${IOH_ROOT}/external/clutchlog/)
# Workaround IOH's poorly designed headers inclusion scheme. link_directories(${IOH_ROOT}/release/external/fmt/)
SET(PROBLEMS_BBOB_DIR "src/Problems/BBOB")
SET(PROBLEMS_BBOB_COMMON_DIR "src/Problems/BBOB/bbob_common_used_functions")
SET(PROBLEMS_COMMON_DIR "src/Problems/common_used_functions")
SET(PROBLEMS_PBO_DIR "src/Problems/PBO")
SET(PROBLEMS_WMODEL_DIR "src/Problems/WModel")
SET(PROBLEMS_PYTHON_DIR "src/Problems/Python")
SET(SUITES_DIR "src/Suites")
SET(TEMPLATE_DIR "src/Template")
SET(TEMPLATE_EXPERIMENTS_DIR "src/Template/Experiments")
SET(TEMPLATE_LOGGERS_DIR "src/Template/Loggers")
SET(IOHEXPERIMENTER_DIR
"${IOH_ROOT}/${PROBLEMS_COMMON_DIR}"
"${IOH_ROOT}/${PROBLEMS_BBOB_DIR}"
"${IOH_ROOT}/${PROBLEMS_BBOB_COMMON_DIR}"
"${IOH_ROOT}/${PROBLEMS_PBO_DIR}"
"${IOH_ROOT}/${PROBLEMS_WMODEL_DIR}"
"${IOH_ROOT}/${PROBLEMS_PYTHON_DIR}"
"${IOH_ROOT}/${SUITES_DIR}"
"${IOH_ROOT}/${TEMPLATE_DIR}"
"${IOH_ROOT}/${TEMPLATE_EXPERIMENTS_DIR}"
"${IOH_ROOT}/${TEMPLATE_LOGGERS_DIR}"
)
include_directories(${IOHEXPERIMENTER_DIR})
else() else()
if(NOT EXISTS ${IOH_PROBLEM_H}) if(NOT EXISTS ${IOH_HPP})
message(FATAL_ERROR "Could not find `IOHprofiler_problem.h` in: ${IOH_ROOT}/src/Template/ (did you forget to compile it?)") message(FATAL_ERROR "Could not find `ioh.hpp` in: ${IOH_ROOT}/include/")
endif()
if(NOT EXISTS ${IOH_LIBRARIES})
message(FATAL_ERROR "Could not find `libIOH` in: ${IOH_ROOT}/[release|debug|build] (did you forget to compile it?)")
endif() endif()
# if(NOT EXISTS ${IOH_LIBRARIES})
# message(FATAL_ERROR "Could not find `libIOH` in: ${IOH_ROOT}/[release|debug|build] (did you forget to compile it?)")
# endif()
endif() endif()
@ -93,5 +70,6 @@ endif()
###################################################################################### ######################################################################################
add_executable(fastga fastga.cpp) add_executable(fastga fastga.cpp)
target_link_libraries(fastga ${PARADISEO_LIBRARIES} ${IOH_LIBRARY} stdc++fs) # target_link_libraries(fastga ${PARADISEO_LIBRARIES} ${IOH_LIBRARY} stdc++fs)
target_link_libraries(fastga ${PARADISEO_LIBRARIES} stdc++fs fmt)

Binary file not shown.

View file

@ -2,16 +2,19 @@
#include <iostream> #include <iostream>
#include <cstdlib> #include <cstdlib>
#include <string> #include <string>
#include <memory>
#include <eo> #include <eo>
#include <ga.h> #include <ga.h>
#include <utils/checkpointing> #include <utils/checkpointing>
#include <eoInt.h> #include <eoInt.h>
#include <problems/eval/eoEvalIOH.h> #include <problems/eval/eoEvalIOH.h>
#include <IOHprofiler_ecdf_logger.h> #include <ioh.hpp>
#include <IOHprofiler_csv_logger.h>
#include <IOHprofiler_observer_combine.h> /*****************************************************************************
#include <f_w_model_one_max.hpp> * ParadisEO algorithmic grammar definition.
*****************************************************************************/
// using Particle = eoRealParticle<eoMaximizingFitness>; // using Particle = eoRealParticle<eoMaximizingFitness>;
using Ints = eoInt<eoMaximizingFitnessT<int>, size_t>; using Ints = eoInt<eoMaximizingFitnessT<int>, size_t>;
@ -110,6 +113,10 @@ eoAlgoFoundryFastGA<Bits>& make_foundry(
return foundry; return foundry;
} }
/*****************************************************************************
* irace helper functions.
*****************************************************************************/
Bits::Fitness fake_func(const Bits&) { return 0; } Bits::Fitness fake_func(const Bits&) { return 0; }
void print_irace_full(const eoParam& param, const size_t slot_size, std::string type="i", std::ostream& out = std::cout) void print_irace_full(const eoParam& param, const size_t slot_size, std::string type="i", std::ostream& out = std::cout)
@ -210,12 +217,16 @@ std::ostream& operator<<(std::ostream& os, const Problem& pb)
return os; return os;
} }
/*****************************************************************************
* Command line interface.
*****************************************************************************/
int main(int argc, char* argv[]) int main(int argc, char* argv[])
{ {
/***** Global parameters. *****/ /***** Global parameters. *****/
enum { NO_ERROR = 0, ERROR_USAGE = 100 }; enum { NO_ERROR = 0, ERROR_USAGE = 100 };
std::map<size_t, Problem> problem_config_mapping { std::map<size_t, Problem> benchmark {
/* ┌ problem index in the map /* ┌ problem index in the map
* problem ID in IOH experimenter * problem ID in IOH experimenter
* dummy * dummy
@ -249,16 +260,22 @@ int main(int argc, char* argv[])
eoParser parser(argc, argv, "FastGA interface for iRace"); eoParser parser(argc, argv, "FastGA interface for iRace");
/***** Problem parameters *****/
auto problem_p = parser.getORcreateParam<size_t>(0, auto problem_p = parser.getORcreateParam<size_t>(0,
"problem", "Problem ID", "problem", "Problem ID",
'p', "Problem", /*required=*/true); 'p', "Problem", /*required=*/true);
const size_t problem = problem_p.value(); const size_t problem = problem_p.value();
assert(0 <= problem and problem < problem_config_mapping.size()); assert(0 <= problem and problem < benchmark.size());
// const size_t dimension = parser.getORcreateParam<size_t>(1000, // const size_t dimension = parser.getORcreateParam<size_t>(1000,
// "dimension", "Dimension size", // "dimension", "Dimension size",
// 'd', "Problem").value(); // 'd', "Problem").value();
const size_t dimension = problem_config_mapping[problem].dimension; const size_t dimension = benchmark[problem].dimension;
auto instance_p = parser.getORcreateParam<size_t>(0,
"instance", "Instance ID",
'i', "Instance", /*required=*/false);
const size_t instance = instance_p.value();
const size_t max_evals = parser.getORcreateParam<size_t>(5 * dimension, const size_t max_evals = parser.getORcreateParam<size_t>(5 * dimension,
"max-evals", "Maximum number of evaluations", "max-evals", "Maximum number of evaluations",
@ -268,6 +285,7 @@ int main(int argc, char* argv[])
"buckets", "Number of buckets for discretizing the ECDF", "buckets", "Number of buckets for discretizing the ECDF",
'b', "Performance estimation").value(); 'b', "Performance estimation").value();
/***** Generic options *****/
uint32_t seed = uint32_t seed =
parser.getORcreateParam<uint32_t>(0, parser.getORcreateParam<uint32_t>(0,
"seed", "Random number seed (0 = epoch)", "seed", "Random number seed (0 = epoch)",
@ -280,25 +298,31 @@ int main(int argc, char* argv[])
bool full_log = bool full_log =
parser.getORcreateParam<bool>(0, parser.getORcreateParam<bool>(0,
"full-log", "Log the full search in CSV files (using the IOH profiler format)", "full-log", "Log the full search in CSV files"/* (using the IOH profiler format)"*/,
'F').value(); 'F').value();
bool output_mat = bool output_mat =
parser.getORcreateParam<bool>(0, parser.getORcreateParam<bool>(0,
"output-mat", "Output the aggregated attainment matrix instead of its scalar sum.", "output-mat", "Output the aggregated attainment matrix instead of its scalar sum (fancy colormap on stderr, parsable CSV on stdout).",
'A').value(); 'A').value();
/***** populations sizes *****/
auto pop_size_p = parser.getORcreateParam<size_t>(5, auto pop_size_p = parser.getORcreateParam<size_t>(5,
"pop-size", "Population size", "pop-size", "Population size",
'P', "Operator Choice", /*required=*/false); 'P', "Operator Choice", /*required=*/false);
const size_t pop_size = pop_size_p.value(); const size_t pop_size = pop_size_p.value();
auto instance_p = parser.getORcreateParam<size_t>(0, auto offspring_size_p = parser.getORcreateParam<size_t>(0,
"instance", "Instance ID", "offspring-size", "Offsprings size (0 = same size than the parents pop, see --pop-size)",
'i', "Instance", /*required=*/false); 'O', "Operator Choice", /*required=*/false); // Single alternative, not required.
const size_t instance = instance_p.value(); const size_t offspring_size = offspring_size_p.value();
const size_t generations = static_cast<size_t>(std::floor(
static_cast<double>(max_evals) / static_cast<double>(pop_size)));
// const size_t generations = std::numeric_limits<size_t>::max();
eo::log << eo::debug << "Number of generations: " << generations << std::endl;
/***** operators / parameters *****/
auto continuator_p = parser.getORcreateParam<size_t>(0, auto continuator_p = parser.getORcreateParam<size_t>(0,
"continuator", "Stopping criterion", "continuator", "Stopping criterion",
'o', "Operator Choice", /*required=*/false); // Single alternative, not required. 'o', "Operator Choice", /*required=*/false); // Single alternative, not required.
@ -344,12 +368,6 @@ int main(int argc, char* argv[])
'r', "Operator Choice", /*required=*/true); 'r', "Operator Choice", /*required=*/true);
const size_t replacement = replacement_p.value(); const size_t replacement = replacement_p.value();
auto offspring_size_p = parser.getORcreateParam<size_t>(0,
"offspring-size", "Offsprings size (0 = same size than the parents pop, see --pop-size)",
'O', "Operator Choice", /*required=*/false); // Single alternative, not required.
const size_t offspring_size = offspring_size_p.value();
// Help + Verbose routines // Help + Verbose routines
make_verbose(parser); make_verbose(parser);
make_help(parser, /*exit_after*/false, std::clog); make_help(parser, /*exit_after*/false, std::clog);
@ -410,28 +428,19 @@ int main(int argc, char* argv[])
exit(NO_ERROR); exit(NO_ERROR);
} }
const size_t generations = static_cast<size_t>(std::floor( /*****************************************************************************
static_cast<double>(max_evals) / static_cast<double>(pop_size))); * IOH stuff.
// const size_t generations = std::numeric_limits<size_t>::max(); *****************************************************************************/
eo::log << eo::debug << "Number of generations: " << generations << std::endl;
/***** IOH logger *****/ /***** IOH logger *****/
auto max_target = benchmark[problem].max_target;
ioh::logger::eah::Log10Scale<double> target_range(0, max_target, buckets);
ioh::logger::eah::Log10Scale<size_t> budget_range(0, max_evals, buckets);
ioh::logger::EAH eah_logger(target_range, budget_range);
ioh::logger::Combine loggers(eah_logger);
auto max_target_para = problem_config_mapping[problem].max_target; std::shared_ptr<ioh::logger::FlatFile> csv_logger = nullptr;
IOHprofiler_RangeLinear<size_t> target_range(0, max_target_para, buckets);
IOHprofiler_RangeLinear<size_t> budget_range(0, max_evals, buckets);
IOHprofiler_ecdf_logger<int, size_t, size_t> ecdf_logger(
target_range, budget_range,
/*use_known_optimum*/false);
// ecdf_logger.set_complete_flag(true);
// ecdf_logger.set_interval(0);
ecdf_logger.activate_logger();
IOHprofiler_observer_combine<int> loggers(ecdf_logger);
std::shared_ptr<IOHprofiler_csv_logger<int>> csv_logger;
if(full_log) { if(full_log) {
// Build up an algorithm name from main parameters. // Build up an algorithm name from main parameters.
std::ostringstream name; std::ostringstream name;
@ -453,57 +462,54 @@ int main(int argc, char* argv[])
// Build up a problem description. // Build up a problem description.
std::ostringstream desc; std::ostringstream desc;
desc << "pb=" << problem << "_"; desc << "pb=" << problem << "_";
desc << problem_config_mapping[problem]; // Use the `operator<<` above. desc << benchmark[problem]; // Use the `operator<<` above.
std::clog << desc.str() << std::endl; std::clog << desc.str() << std::endl;
std::string dir(name.str()); std::filesystem::path folder = desc.str();
std::filesystem::path d = name.str(); std::filesystem::create_directories(folder);
std::filesystem::create_directory(d);
std::string folder(desc.str()); ioh::trigger::OnImprovement on_improvement;
std::filesystem::path f = desc.str(); ioh::watch::Evaluations evaluations;
ioh::watch::TransformedYBest transformed_y_best;
std::filesystem::create_directory(d); std::vector<std::reference_wrapper<ioh::logger::Trigger >> t = {std::ref(on_improvement)};
std::filesystem::create_directory(d/f); std::vector<std::reference_wrapper<ioh::logger::Property>> w = {std::ref(evaluations),std::ref(transformed_y_best)};
csv_logger = std::make_shared<ioh::logger::FlatFile>(
csv_logger = std::make_shared<IOHprofiler_csv_logger<int>>(dir, folder, d, f); // {std::ref(on_improvement)},
loggers.add(*csv_logger); // {std::ref(evaluations),std::ref(transformed_y_best)},
t, w,
name.str(),
folder
);
loggers.append(*csv_logger);
} }
/***** IOH problem *****/ /***** IOH problem *****/
double w_model_suite_dummy_para = problem_config_mapping[problem].dummy; double w_dummy = benchmark[problem].dummy;
int w_model_suite_epitasis_para = problem_config_mapping[problem].epistasis; int w_epitasis = benchmark[problem].epistasis;
int w_model_suite_neutrality_para = problem_config_mapping[problem].neutrality; int w_neutrality = benchmark[problem].neutrality;
int w_model_suite_ruggedness_para = problem_config_mapping[problem].ruggedness; int w_ruggedness = benchmark[problem].ruggedness;
W_Model_OneMax w_model_om; // std::string problem_name = "OneMax";
std::string problem_name = "OneMax"; // problem_name = problem_name
problem_name = problem_name // + "_D" + std::to_string((int)(w_dummy * dimension))
+ "_D" + std::to_string((int)(w_model_suite_dummy_para * dimension)) // + "_E" + std::to_string(w_epitasis)
+ "_E" + std::to_string(w_model_suite_epitasis_para) // + "_N" + std::to_string(w_neutrality)
+ "_N" + std::to_string(w_model_suite_neutrality_para) // + "_R" + std::to_string(w_ruggedness);
+ "_R" + std::to_string(w_model_suite_ruggedness_para);
ioh::problem::wmodel::WModelOneMax w_model_om(
/// This must be called to configure the w-model to be tested. instance,
w_model_om.set_w_setting(w_model_suite_dummy_para,w_model_suite_epitasis_para, dimension,
w_model_suite_neutrality_para,w_model_suite_ruggedness_para); w_dummy,
w_epitasis,
/// Set problem_name based on the configuration. w_neutrality,
w_model_om.IOHprofiler_set_problem_name(problem_name); w_ruggedness);
/// Set problem_id as 1
w_model_om.IOHprofiler_set_problem_id(problem); // FIXME check what that means
// w_model_om.IOHprofiler_set_instance_id(instance); // FIXME changing the instance seems to change the target upper bound.
/// Set dimension.
w_model_om.IOHprofiler_set_number_of_variables(dimension);
/***** Bindings *****/ /***** Bindings *****/
ecdf_logger.track_problem(w_model_om); w_model_om.attach_logger(loggers);
if(full_log) {
csv_logger->track_problem(w_model_om); /*****************************************************************************
} * Binding everything together.
*****************************************************************************/
eoEvalIOHproblem<Bits> onemax_pb(w_model_om, loggers); eoEvalIOHproblem<Bits> onemax_pb(w_model_om, loggers);
@ -545,40 +551,45 @@ int main(int argc, char* argv[])
// // Actually instanciate and run the algorithm. // // Actually instanciate and run the algorithm.
// eval_foundry(encoded_algo); // eval_foundry(encoded_algo);
/*****************************************************************************
* Run and output results.
*****************************************************************************/
eoPop<Bits> pop; eoPop<Bits> pop;
pop.append(pop_size, onemax_init); pop.append(pop_size, onemax_init);
onemax_eval(pop,pop); onemax_eval(pop,pop);
foundry(pop); // Actually run the selected algorithm. foundry(pop); // Actually run the selected algorithm.
/***** IOH perf stats *****/ /***** IOH perf stats *****/
IOHprofiler_ecdf_sum ecdf_sum; double perf = ioh::logger::eah::stat::under_curve::volume(eah_logger);
// iRace expects minimization
long perf = ecdf_sum(ecdf_logger.data());
// assert(0 < perf and perf <= buckets*buckets); if(perf == 0 or perf > max_target * max_evals * 1.0) {
if(perf <= 0 or buckets*buckets < perf) { std::cerr << "WARNING: illogical performance? " << perf
std::cerr << "WARNING: illogical performance: " << perf << " Check the bounds or the algorithm." << std::endl;
<< ", check the bounds or the algorithm." << std::endl;
} }
// std::clog << "After " << eval_count.getValue() << " / " << max_evals << " evaluations" << std::endl; // std::clog << "After " << eval_count.getValue() << " / " << max_evals << " evaluations" << std::endl;
if(output_mat) { if(output_mat) {
std::vector<std::vector<double>> mat = ioh::logger::eah::stat::distribution(eah_logger);
IOHprofiler_ecdf_aggregate agg; // Fancy color map on clog.
IOHprofiler_ecdf_aggregate::Mat mat = agg(ecdf_logger.data()); std::clog << ioh::logger::eah::colormap(mat) << std::endl;
std::clog << "Attainment matrix sum: " << std::endl;
// Parsable CSV on cout.
std::clog << "Attainment matrix distribution: " << std::endl;
assert(mat.size() > 0); assert(mat.size() > 0);
assert(mat[0].size() > 1); assert(mat[0].size() > 1);
for(int i = mat.size()-1; i >= 0; --i) { for(size_t i = mat.size()-1; i >= 0; --i) {
std::cout << mat[i][0]; std::cout << mat[i][0];
for(int j = 1; j < mat[i].size(); ++j) { for(size_t j = 1; j < mat[i].size(); ++j) {
std::cout << "," << mat[i][j]; std::cout << "," << mat[i][j];
} }
std::cout << std::endl; std::cout << std::endl;
} }
} else { } else {
// iRace expects minimization
std::cout << -1 * perf << std::endl; std::cout << -1 * perf << std::endl;
} }
} }

Binary file not shown.

View file

@ -2,10 +2,7 @@
#ifndef _eoEvalIOH_h #ifndef _eoEvalIOH_h
#define _eoEvalIOH_h #define _eoEvalIOH_h
#include <IOHprofiler_problem.h> #include <ioh.hpp>
#include <IOHprofiler_suite.h>
#include <IOHprofiler_observer.h>
#include <IOHprofiler_ecdf_logger.h>
/** Wrap an IOHexperimenter's problem class within an eoEvalFunc. /** Wrap an IOHexperimenter's problem class within an eoEvalFunc.
* *
@ -25,18 +22,19 @@ class eoEvalIOHproblem : public eoEvalFunc<EOT>
using Fitness = typename EOT::Fitness; using Fitness = typename EOT::Fitness;
using ScalarType = typename Fitness::ScalarType; using ScalarType = typename Fitness::ScalarType;
eoEvalIOHproblem(IOHprofiler_problem<ScalarType> & pb) : eoEvalIOHproblem(ioh::problem::Problem<ScalarType> & pb) :
_ioh_pb(&pb), _ioh_pb(&pb),
_has_log(false), _has_log(false),
_ioh_log(nullptr) _ioh_log(nullptr)
{ } { }
eoEvalIOHproblem(IOHprofiler_problem<ScalarType> & pb, IOHprofiler_observer<ScalarType> & log ) : eoEvalIOHproblem(ioh::problem::Problem<ScalarType> & pb, ioh::Logger & log ) :
_ioh_pb(&pb), _ioh_pb(&pb),
_has_log(true), _has_log(true),
_ioh_log(&log) _ioh_log(&log)
{ {
_ioh_log->track_problem(*_ioh_pb); // _ioh_log->track_problem(*_ioh_pb);
pb.attach_logger(log);
} }
virtual void operator()(EOT& sol) virtual void operator()(EOT& sol)
@ -55,322 +53,323 @@ class eoEvalIOHproblem : public eoEvalFunc<EOT>
* Instead of re-assembling your algorithm, * Instead of re-assembling your algorithm,
* just update the problem pointer. * just update the problem pointer.
*/ */
void problem(IOHprofiler_problem<ScalarType> & pb ) void problem(ioh::problem::Problem<ScalarType> & pb )
{ {
_ioh_pb = &pb; _ioh_pb = &pb;
_ioh_log->track_problem(pb); // _ioh_log->track_problem(pb);
_ioh_pb->attach_logger(_ioh_log);
} }
bool has_logger() const {return _has_log;} bool has_logger() const {return _has_log;}
IOHprofiler_observer<ScalarType> & observer() {return *_ioh_log;} ioh::Logger & logger() {return *_ioh_log;}
protected: protected:
IOHprofiler_problem<ScalarType> * _ioh_pb; ioh::problem::Problem<ScalarType> * _ioh_pb;
bool _has_log; bool _has_log;
IOHprofiler_observer<ScalarType> * _ioh_log; ioh::Logger * _ioh_log;
virtual Fitness call(EOT& sol) virtual Fitness call(EOT& sol)
{ {
Fitness f = _ioh_pb->evaluate(sol); Fitness f = (*_ioh_pb)(sol);
if(_has_log) { if(_has_log) {
_ioh_log->do_log(_ioh_pb->loggerInfo()); _ioh_log->log(_ioh_pb->log_info());
} }
return f; return f;
} }
}; };
/** Wrap an IOHexperimenter's suite class within an eoEvalFunc. Useful for algorithm selection. // /** Wrap an IOHexperimenter's suite class within an eoEvalFunc. Useful for algorithm selection.
* // *
* WARNING: only handle a suite of problems of A UNIQUE, SINGLE DIMENSION. // * WARNING: only handle a suite of problems of A UNIQUE, SINGLE DIMENSION.
* Because a given eoAlgo is bond to a instanciated eoInit (most probably an eoInitWithDim) // * Because a given eoAlgo is bond to a instanciated eoInit (most probably an eoInitWithDim)
* which is parametrized with a given dimension. // * which is parametrized with a given dimension.
* // *
* The idea is to run the given algorithm on a whole suite of problems // * The idea is to run the given algorithm on a whole suite of problems
* and output its aggregated performance. // * and output its aggregated performance.
* // *
* See https://github.com/IOHprofiler/IOHexperimenter // * See https://github.com/IOHprofiler/IOHexperimenter
* // *
* The main template EOT defines the interface of this functor, // * The main template EOT defines the interface of this functor,
* that is how the algorithm instance is encoded // * that is how the algorithm instance is encoded
* (e.g. an eoAlgoFoundry's integer vector). // * (e.g. an eoAlgoFoundry's integer vector).
* The SUBEOT template defines the encoding of the sub-problem, // * The SUBEOT template defines the encoding of the sub-problem,
* which the encoded algorithm have to solve // * which the encoded algorithm have to solve
* (e.g. a OneMax problem). // * (e.g. a OneMax problem).
* // *
* @note: This will not reset the given pop between two calls // * @note: This will not reset the given pop between two calls
* of the given algorithm on new problems. // * of the given algorithm on new problems.
* You most probably want to wrap your algorithm // * You most probably want to wrap your algorithm
* in an eoAlgoRestart to do that for you. // * in an eoAlgoRestart to do that for you.
* // *
* Handle only IOHprofiler `stat` classes which template type STAT // * Handle only IOH experimeneter `stat` classes which template type STAT
* is explicitely convertible to the given fitness. // * is explicitely convertible to the given fitness.
* Any scalar is most probably already convertible, but compound classes // * Any scalar is most probably already convertible, but compound classes
* (i.e. for multi-objective problems) are most probàbly not. // * (i.e. for multi-objective problems) are most probàbly not.
* // *
* @note: You're responsible of adding a conversion operator // * @note: You're responsible of adding a conversion operator
* to the given STAT type, if necessary // * to the given STAT type, if necessary
* (this is checked by a static assert in the constructor). // * (this is checked by a static assert in the constructor).
* // *
* @note: You're also responsible of matching the fitness' encoding scalar type // * @note: You're also responsible of matching the fitness' encoding scalar type
* (IOH handle double and int, as of 2020-03-09). // * (IOH handle double and int, as of 2020-03-09).
* // *
* You will need to pass the IOH include directory to your compiler // * You will need to pass the IOH include directory to your compiler
* (e.g. IOHexperimenter/build/Cpp/src/). // * (e.g. IOHexperimenter/build/Cpp/src/).
*/ // */
template<class EOT, class SUBEOT, class STAT> // template<class EOT, class SUBEOT, class STAT>
class eoEvalIOHsuiteSingleDim : public eoEvalFunc<EOT> // class eoEvalIOHsuiteSingleDim : public eoEvalFunc<EOT>
{ // {
public: // public:
using EOType = EOT; // using EOType = EOT;
using Fitness = typename EOType::Fitness; // using Fitness = typename EOType::Fitness;
using ScalarType = typename Fitness::ScalarType; // using ScalarType = typename Fitness::ScalarType;
/** Takes an ecdf_logger that computes the base data structure // /** Takes an ecdf_logger that computes the base data structure
* on which a ecdf_stat will be called to compute an // * on which a ecdf_stat will be called to compute an
* aggregated performance measure, which will be the evaluated fitness. // * aggregated performance measure, which will be the evaluated fitness.
* // *
* As such, the logger and the stat are mandatory. // * As such, the logger and the stat are mandatory.
* // *
* @note: The given logger should be at least embedded // * @note: The given logger should be at least embedded
* in the logger bound with the given eval. // * in the logger bound with the given eval.
*/ // */
eoEvalIOHsuiteSingleDim( // eoEvalIOHsuiteSingleDim(
eoEvalIOHproblem<SUBEOT>& eval, // eoEvalIOHproblem<SUBEOT>& eval,
eoAlgoFoundry<SUBEOT>& algo, // eoAlgoFoundry<SUBEOT>& algo,
eoPop<SUBEOT>& pop, // eoPop<SUBEOT>& pop,
IOHprofiler_suite<ScalarType>& suite, // ioh::suite::Suite<ScalarType>& suite,
IOHprofiler_ecdf_logger<ScalarType>& log, // ioh::logger::ECDF<ScalarType>& log,
IOHprofiler_ecdf_stat<STAT>& stat // ioh::logger::ECDFStat<STAT>& stat
) : // ) :
_eval(eval), // _eval(eval),
_algo(algo), // _algo(algo),
_pop(pop), // _pop(pop),
_ioh_suite(&suite), // _ioh_suite(&suite),
_ioh_log(log), // _ioh_log(log),
_ioh_stat(stat) // _ioh_stat(stat)
{ // {
static_assert(std::is_convertible<STAT,Fitness>::value); // static_assert(std::is_convertible<STAT,Fitness>::value);
assert(eval.has_log()); // assert(eval.has_log());
_ioh_log.target_suite(suite); // _ioh_log.target_suite(suite);
} // }
virtual void operator()(EOType& sol) // virtual void operator()(EOType& sol)
{ // {
if(not sol.invalid()) { // if(not sol.invalid()) {
return; // return;
} // }
sol.fitness( call( sol ) ); // sol.fitness( call( sol ) );
} // }
/** Update the suite pointer for a new one. // /** Update the suite pointer for a new one.
* // *
* This is useful if you assembled a ParadisEO algorithm // * This is useful if you assembled a ParadisEO algorithm
* and call it several time in an IOHexperimenter's loop across several suites. // * and call it several time in an IOHexperimenter's loop across several suites.
* Instead of re-assembling your algorithm, // * Instead of re-assembling your algorithm,
* just update the suite pointer. // * just update the suite pointer.
*/ // */
void suite( IOHprofiler_suite<ScalarType> & suite ) // void suite( ioh::suite::Suite<ScalarType> & suite )
{ // {
_ioh_suite = &suite; // _ioh_suite = &suite;
_ioh_log.target_suite(suite); // _ioh_log.target_suite(suite);
} // }
protected: // protected:
//! Sub-problem @{ // //! Sub-problem @{
eoEvalIOHproblem<SUBEOT>& _eval; // eoEvalIOHproblem<SUBEOT>& _eval;
eoAlgoFoundry<SUBEOT>& _algo; // eoAlgoFoundry<SUBEOT>& _algo;
eoPop<SUBEOT>& _pop; // eoPop<SUBEOT>& _pop;
//! @} // //! @}
//! IOH @{ // //! IOH @{
IOHprofiler_suite<ScalarType> * _ioh_suite; // ioh::suite::Suite<ScalarType> * _ioh_suite;
IOHprofiler_observer<ScalarType> & _ioh_log; // ioh::logger::Observer<ScalarType> & _ioh_log;
IOHprofiler_ecdf_stat<STAT>& _ioh_stat; // ioh::logger::ECDFStat<STAT>& _ioh_stat;
//! @} // //! @}
virtual Fitness call(EOType& sol) // virtual Fitness call(EOType& sol)
{ // {
// Decode the algorithm encoded in sol. // // Decode the algorithm encoded in sol.
_algo = sol; // _algo = sol;
// Evaluate the performance of the encoded algo instance // // Evaluate the performance of the encoded algo instance
// on a whole IOH suite benchmark. // // on a whole IOH suite benchmark.
typename IOHprofiler_suite<ScalarType>::Problem_ptr pb; // typename ioh::suite::Suite<ScalarType>::Problem_ptr pb;
while( (pb = _ioh_suite->get_next_problem()) ) { // while( (pb = _ioh_suite->get_next_problem()) ) {
// Consider a new problem. // // Consider a new problem.
_eval.problem(*pb); // Will call logger's target_problem. // _eval.problem(*pb); // Will call logger's target_problem.
// Actually solve it. // // Actually solve it.
_algo(_pop); // Will call the logger's write_line. // _algo(_pop); // Will call the logger's write_line.
// There's no need to get back the best fitness from ParadisEO, // // There's no need to get back the best fitness from ParadisEO,
// because everything is captured on-the-fly by IOHprofiler. // // because everything is captured on-the-fly by IOH experimenter.
} // }
// Get back the evaluated performance. // // Get back the evaluated performance.
// The explicit cast from STAT to Fitness which should exists. // // The explicit cast from STAT to Fitness which should exists.
return static_cast<Fitness>(_ioh_stat(_ioh_log.data())); // return static_cast<Fitness>(_ioh_stat(_ioh_log.data()));
} // }
}; // };
/** Operator that is called before search for each problem within an IOH suite. // /** Operator that is called before search for each problem within an IOH suite.
* // *
* You most probably need to reinstanciate some operators within your algorithm: // * You most probably need to reinstanciate some operators within your algorithm:
* at least the operators depending on the dimension, // * at least the operators depending on the dimension,
* as it will change between two calls. // * as it will change between two calls.
* // *
* By providing an operator using this interface, // * By providing an operator using this interface,
* you can have access to all the information needed to do so. // * you can have access to all the information needed to do so.
*/ // */
template<class EOT> // template<class EOT>
class eoIOHSetup : public eoFunctorBase // class eoIOHSetup : public eoFunctorBase
{ // {
public: // public:
using AtomType = typename EOT::AtomType; // using AtomType = typename EOT::AtomType;
virtual void operator()(eoPop<EOT>& pop, typename IOHprofiler_suite<AtomType>::Problem_ptr pb) = 0; // virtual void operator()(eoPop<EOT>& pop, typename ioh::suite::Suite<AtomType>::Problem_ptr pb) = 0;
}; // };
/** Wrap an IOHexperimenter's suite class within an eoEvalFunc. Useful for algorithm selection. // /** Wrap an IOHexperimenter's suite class within an eoEvalFunc. Useful for algorithm selection.
* // *
* The idea is to run the given algorithm on a whole suite of problems // * The idea is to run the given algorithm on a whole suite of problems
* and output its aggregated performance. // * and output its aggregated performance.
* // *
* See https://github.com/IOHprofiler/IOHexperimenter // * See https://github.com/IOHprofiler/IOHexperimenter
* // *
* The main template EOT defines the interface of this functor, // * The main template EOT defines the interface of this functor,
* that is how the algorithm instance is encoded // * that is how the algorithm instance is encoded
* (e.g. an eoAlgoFoundry's integer vector). // * (e.g. an eoAlgoFoundry's integer vector).
* The SUBEOT template defines the encoding of the sub-problem, // * The SUBEOT template defines the encoding of the sub-problem,
* which the encoded algorithm have to solve // * which the encoded algorithm have to solve
* (e.g. a OneMax problem). // * (e.g. a OneMax problem).
* // *
* @note: This will not reset the given pop between two calls // * @note: This will not reset the given pop between two calls
* of the given algorithm on new problems. // * of the given algorithm on new problems.
* You most probably want to wrap your algorithm // * You most probably want to wrap your algorithm
* in an eoAlgoRestart to do that for you. // * in an eoAlgoRestart to do that for you.
* // *
* Handle only IOHprofiler `stat` classes which template type STAT // * Handle only IOHprofiler `stat` classes which template type STAT
* is explicitely convertible to the given fitness. // * is explicitely convertible to the given fitness.
* Any scalar is most probably already convertible, but compound classes // * Any scalar is most probably already convertible, but compound classes
* (i.e. for multi-objective problems) are most probàbly not. // * (i.e. for multi-objective problems) are most probàbly not.
* // *
* @note: You're responsible of adding a conversion operator // * @note: You're responsible of adding a conversion operator
* to the given STAT type, if necessary // * to the given STAT type, if necessary
* (this is checked by a static assert in the constructor). // * (this is checked by a static assert in the constructor).
* // *
* @note: You're also responsible of matching the fitness' encoding scalar type // * @note: You're also responsible of matching the fitness' encoding scalar type
* (IOH handle double and int, as of 2020-03-09). // * (IOH handle double and int, as of 2020-03-09).
* // *
* You will need to pass the IOH include directory to your compiler // * You will need to pass the IOH include directory to your compiler
* (e.g. IOHexperimenter/build/Cpp/src/). // * (e.g. IOHexperimenter/build/Cpp/src/).
*/ // */
template<class EOT, class SUBEOT, class STAT> // template<class EOT, class SUBEOT, class STAT>
class eoEvalIOHsuite : public eoEvalFunc<EOT> // class eoEvalIOHsuite : public eoEvalFunc<EOT>
{ // {
public: // public:
using Fitness = typename EOT::Fitness; // using Fitness = typename EOT::Fitness;
using ScalarType = typename Fitness::ScalarType; // using ScalarType = typename Fitness::ScalarType;
using SubAtomType = typename SUBEOT::AtomType; // using SubAtomType = typename SUBEOT::AtomType;
/** Takes an ecdf_logger that computes the base data structure // /** Takes an ecdf_logger that computes the base data structure
* on which a ecdf_stat will be called to compute an // * on which a ecdf_stat will be called to compute an
* aggregated performance measure, which will be the evaluated fitness. // * aggregated performance measure, which will be the evaluated fitness.
* // *
* As such, the logger and the stat are mandatory. // * As such, the logger and the stat are mandatory.
* // *
* @note: The given logger should be at least embedded // * @note: The given logger should be at least embedded
* in the logger thas is bound with the given eval. // * in the logger thas is bound with the given eval.
*/ // */
eoEvalIOHsuite( // eoEvalIOHsuite(
eoEvalIOHproblem<SUBEOT>& eval, // eoEvalIOHproblem<SUBEOT>& eval,
eoAlgoFoundry<SUBEOT>& foundry, // eoAlgoFoundry<SUBEOT>& foundry,
eoPop<SUBEOT>& pop, // eoPop<SUBEOT>& pop,
eoIOHSetup<SUBEOT>& setup, // eoIOHSetup<SUBEOT>& setup,
IOHprofiler_suite<SubAtomType>& suite, // ioh::suite::Suite<SubAtomType>& suite,
IOHprofiler_ecdf_logger<SubAtomType>& log, // ioh::logger::ECDF<SubAtomType>& log,
IOHprofiler_ecdf_stat<STAT>& stat // ioh::logger::ECDFStat<STAT>& stat
) : // ) :
_eval(eval), // _eval(eval),
_foundry(foundry), // _foundry(foundry),
_pop(pop), // _pop(pop),
_setup(setup), // _setup(setup),
_ioh_suite(&suite), // _ioh_suite(&suite),
_ioh_log(log), // _ioh_log(log),
_ioh_stat(stat) // _ioh_stat(stat)
{ // {
static_assert(std::is_convertible<STAT,Fitness>::value); // static_assert(std::is_convertible<STAT,Fitness>::value);
assert(_eval.has_logger()); // assert(_eval.has_logger());
_ioh_log.track_suite(suite); // _ioh_log.track_suite(suite);
} // }
virtual void operator()(EOT& sol) // virtual void operator()(EOT& sol)
{ // {
if(not sol.invalid()) { // if(not sol.invalid()) {
return; // return;
} // }
sol.fitness( call( sol ) ); // sol.fitness( call( sol ) );
} // }
/** Update the suite pointer for a new one. // /** Update the suite pointer for a new one.
* // *
* This is useful if you assembled a ParadisEO algorithm // * This is useful if you assembled a ParadisEO algorithm
* and call it several time in an IOHexperimenter's loop across several suites. // * and call it several time in an IOHexperimenter's loop across several suites.
* Instead of re-assembling your algorithm, // * Instead of re-assembling your algorithm,
* just update the suite pointer. // * just update the suite pointer.
*/ // */
void suite( IOHprofiler_suite<SubAtomType> & suite ) // void suite( ioh::suite::Suite<SubAtomType> & suite )
{ // {
_ioh_suite = &suite; // _ioh_suite = &suite;
_ioh_log.target_suite(suite); // _ioh_log.target_suite(suite);
} // }
protected: // protected:
eoEvalIOHproblem<SUBEOT>& _eval; // eoEvalIOHproblem<SUBEOT>& _eval;
eoAlgoFoundry<SUBEOT>& _foundry; // eoAlgoFoundry<SUBEOT>& _foundry;
eoPop<SUBEOT>& _pop; // eoPop<SUBEOT>& _pop;
eoIOHSetup<SUBEOT>& _setup; // eoIOHSetup<SUBEOT>& _setup;
IOHprofiler_suite<SubAtomType> * _ioh_suite; // ioh::suite::Suite<SubAtomType> * _ioh_suite;
IOHprofiler_ecdf_logger<SubAtomType> & _ioh_log; // ioh::logger::ECDF<SubAtomType> & _ioh_log;
IOHprofiler_ecdf_stat<STAT>& _ioh_stat; // ioh::logger::ECDFStat<STAT>& _ioh_stat;
virtual Fitness call(EOT& sol) // virtual Fitness call(EOT& sol)
{ // {
// Select an algorithm in the foundry // // Select an algorithm in the foundry
// from the given encoded solution. // // from the given encoded solution.
std::vector<size_t> encoding; // std::vector<size_t> encoding;
std::transform(std::begin(sol), std::end(sol), std::back_inserter(encoding), // std::transform(std::begin(sol), std::end(sol), std::back_inserter(encoding),
[](const SubAtomType& v) -> size_t {return static_cast<size_t>(std::floor(v));} ); // [](const SubAtomType& v) -> size_t {return static_cast<size_t>(std::floor(v));} );
_foundry.select(encoding); // _foundry.select(encoding);
// Evaluate the performance of the encoded algo instance // // Evaluate the performance of the encoded algo instance
// on a whole IOH suite benchmark. // // on a whole IOH suite benchmark.
typename IOHprofiler_suite<SubAtomType>::Problem_ptr pb; // typename ioh::suite::Suite<SubAtomType>::Problem_ptr pb;
while( (pb = _ioh_suite->get_next_problem()) ) { // while( (pb = _ioh_suite->get_next_problem()) ) {
// Setup selected operators. // // Setup selected operators.
_setup(_pop, pb); // _setup(_pop, pb);
// Consider a new problem. // // Consider a new problem.
_eval.problem(*pb); // Will call logger's target_problem. // _eval.problem(*pb); // Will call logger's target_problem.
// Actually solve it. // // Actually solve it.
_foundry(_pop); // Will call the logger's write_line. // _foundry(_pop); // Will call the logger's write_line.
// There's no need to get back the best fitness from ParadisEO, // // There's no need to get back the best fitness from ParadisEO,
// because everything is captured on-the-fly by IOHprofiler. // // because everything is captured on-the-fly by IOH experimenter.
} // }
// Get back the evaluated performance. // // Get back the evaluated performance.
// The explicit cast from STAT to Fitness which should exists. // // The explicit cast from STAT to Fitness which should exists.
return static_cast<Fitness>(_ioh_stat(_ioh_log.data())); // return static_cast<Fitness>(_ioh_stat(_ioh_log.data()));
} // }
}; // };
#endif // _eoEvalIOH_h #endif // _eoEvalIOH_h