@@ -60,7 +60,7 @@ void tokenize(const std::string &str,
60
60
61
61
std::mutex fmtx; // WARNING: bbob function calls are NOT thread-safe (learnt the hard way...).
62
62
63
- void MY_OPTIMIZER (double (*fitnessfunction)(double *), unsigned int dim, double ftarget, double maxfunevals, int alg, bool noisy, bool withnumgradient)
63
+ void MY_OPTIMIZER (double (*fitnessfunction)(double *), unsigned int dim, double ftarget, double maxfunevals, int alg, bool noisy, bool withnumgradient, bool withtpa )
64
64
{
65
65
// map fct to libcmaes FitFunc.
66
66
FitFunc ff = [&](const double *x, const int N)
@@ -88,6 +88,7 @@ void MY_OPTIMIZER(double(*fitnessfunction)(double*), unsigned int dim, double ft
88
88
cmaparams.set_algo (alg);
89
89
cmaparams.set_quiet (true );
90
90
cmaparams.set_gradient (withnumgradient);
91
+ cmaparams.set_tpa (withtpa);
91
92
cmaparams.set_mt_feval (true );
92
93
if (noisy)
93
94
cmaparams.set_noisy ();
@@ -103,6 +104,7 @@ DEFINE_string(comment,"","comment for the experiment. If using multiple algorith
103
104
DEFINE_double (maxfunevals,1e6 ," maximum number of function evaluations" );
104
105
DEFINE_double (minfunevals,-1 ," minimum number of function evaluations, -1 for automatic definition based on dimension" );
105
106
DEFINE_bool (with_num_gradient,false ," whether to use numerical gradient injection" );
107
+ DEFINE_bool (tpa,false ," whether to use two-point adapation for step-size update" );
106
108
107
109
int main (int argc, char *argv[])
108
110
{
@@ -220,7 +222,7 @@ int main(int argc, char *argv[])
220
222
if (++independent_restarts > 0 )
221
223
fgeneric_restart (" independent restart" ); /* additional info */
222
224
MY_OPTIMIZER (&fgeneric_evaluate, dim[idx_dim], fgeneric_ftarget (),
223
- maxfunevals - fgeneric_evaluations (), (*mit).first , FLAGS_noisy, FLAGS_with_num_gradient);
225
+ maxfunevals - fgeneric_evaluations (), (*mit).first , FLAGS_noisy, FLAGS_with_num_gradient, FLAGS_tpa );
224
226
if (fgeneric_best () < fgeneric_ftarget ())
225
227
break ;
226
228
}
0 commit comments