Skip to content

Commit

Permalink
First genetic search implementation.
Browse files Browse the repository at this point in the history
  • Loading branch information
khuck committed Apr 12, 2024
1 parent ee30513 commit d8c73d2
Show file tree
Hide file tree
Showing 5 changed files with 211 additions and 10 deletions.
2 changes: 2 additions & 0 deletions src/apex/CMakeLists_standalone.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ csv_parser.cpp
dependency_tree.cpp
event_listener.cpp
exhaustive.cpp
genetic_search.cpp
handler.cpp
memory_wrapper.cpp
nvtx_listener.cpp
Expand Down Expand Up @@ -266,6 +267,7 @@ SET(APEX_PUBLIC_HEADERS apex.h
apex_policies.hpp
exhaustive.hpp
dependency_tree.hpp
genetic_search.hpp
handler.hpp
memory_wrapper.hpp
profile.hpp
Expand Down
100 changes: 100 additions & 0 deletions src/apex/apex_policies.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -907,6 +907,35 @@ int apex_sa_policy(shared_ptr<apex_tuning_session> tuning_session,
return APEX_NOERROR;
}

int apex_genetic_policy(shared_ptr<apex_tuning_session> tuning_session,
apex_context const context) {
APEX_UNUSED(context);
if (apex_final) return APEX_NOERROR; // we terminated
std::unique_lock<std::mutex> l{shutdown_mutex};
if (tuning_session->genetic_session.converged()) {
if (!tuning_session->converged_message) {
tuning_session->converged_message = true;
cout << "APEX: Tuning has converged for session " << tuning_session->id
<< "." << endl;
tuning_session->genetic_session.saveBestSettings();
tuning_session->genetic_session.printBestSettings();
}
tuning_session->genetic_session.saveBestSettings();
return APEX_NOERROR;
}

// get a measurement of our current setting
double new_value = tuning_session->metric_of_interest();

/* Report the performance we've just measured. */
tuning_session->genetic_session.evaluate(new_value);

/* Request new settings for next time */
tuning_session->genetic_session.getNewSettings();

return APEX_NOERROR;
}

int apex_exhaustive_policy(shared_ptr<apex_tuning_session> tuning_session,
apex_context const context) {
APEX_UNUSED(context);
Expand Down Expand Up @@ -1512,6 +1541,67 @@ inline int __sa_setup(shared_ptr<apex_tuning_session>
return APEX_NOERROR;
}

inline int __genetic_setup(shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request) {
APEX_UNUSED(tuning_session);
// set up the Simulated annealing!
// iterate over the parameters, and create variables.
using namespace apex::genetic;
for(auto & kv : request.params) {
auto & param = kv.second;
const char * param_name = param->get_name().c_str();
switch(param->get_type()) {
case apex_param_type::LONG: {
auto param_long =
std::static_pointer_cast<apex_param_long>(param);
Variable v(VariableType::longtype, param_long->value.get());
long lvalue = param_long->min;
do {
v.lvalues.push_back(lvalue);
lvalue = lvalue + param_long->step;
} while (lvalue <= param_long->max);
v.set_init();
tuning_session->genetic_session.add_var(param_name, std::move(v));
}
break;
case apex_param_type::DOUBLE: {
auto param_double =
std::static_pointer_cast<apex_param_double>(param);
Variable v(VariableType::doubletype, param_double->value.get());
double dvalue = param_double->min;
do {
v.dvalues.push_back(dvalue);
dvalue = dvalue + param_double->step;
} while (dvalue <= param_double->max);
v.set_init();
tuning_session->genetic_session.add_var(param_name, std::move(v));
}
break;
case apex_param_type::ENUM: {
auto param_enum =
std::static_pointer_cast<apex_param_enum>(param);
Variable v(VariableType::stringtype, param_enum->value.get());
for(const std::string & possible_value :
param_enum->possible_values) {
v.svalues.push_back(possible_value);
}
v.set_init();
tuning_session->genetic_session.add_var(param_name, std::move(v));
}
break;
default:
cerr <<
"ERROR: Attempted to register tuning parameter with unknown type."
<< endl;
return APEX_ERROR;
}
}
/* request initial settings */
tuning_session->genetic_session.getNewSettings();

return APEX_NOERROR;
}

inline int __exhaustive_setup(shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request) {
APEX_UNUSED(tuning_session);
Expand Down Expand Up @@ -1727,6 +1817,16 @@ inline int __common_setup_custom_tuning(shared_ptr<apex_tuning_session>
}
);
}
} else if (request.strategy == apex_ah_tuning_strategy::APEX_GENETIC) {
status = __genetic_setup(tuning_session, request);
if(status == APEX_NOERROR) {
apex::register_policy(
request.trigger,
[=](apex_context const & context)->int {
return apex_genetic_policy(tuning_session, context);
}
);
}
} else if (request.strategy == apex_ah_tuning_strategy::APEX_EXHAUSTIVE) {
status = __exhaustive_setup(tuning_session, request);
if(status == APEX_NOERROR) {
Expand Down
21 changes: 19 additions & 2 deletions src/apex/apex_policies.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,15 @@
#include "exhaustive.hpp"
// include the random class
#include "random.hpp"
// include the genetic_search class
#include "genetic_search.hpp"

enum class apex_param_type : int {NONE, LONG, DOUBLE, ENUM};
enum class apex_ah_tuning_strategy : int {EXHAUSTIVE, RANDOM, NELDER_MEAD,
PARALLEL_RANK_ORDER, SIMULATED_ANNEALING, APEX_EXHAUSTIVE, APEX_RANDOM};
enum class apex_ah_tuning_strategy : int {
EXHAUSTIVE, RANDOM, NELDER_MEAD,
PARALLEL_RANK_ORDER, SIMULATED_ANNEALING,
APEX_EXHAUSTIVE, APEX_RANDOM,
APEX_GENETIC};

struct apex_tuning_session;
class apex_tuning_request;
Expand Down Expand Up @@ -75,6 +80,8 @@ class apex_param {
tuning_session, apex_tuning_request & request);
friend int __random_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
friend int __genetic_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
};

class apex_param_long : public apex_param {
Expand Down Expand Up @@ -112,6 +119,8 @@ class apex_param_long : public apex_param {
tuning_session, apex_tuning_request & request);
friend int __random_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
friend int __genetic_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
};

class apex_param_double : public apex_param {
Expand Down Expand Up @@ -149,6 +158,8 @@ class apex_param_double : public apex_param {
tuning_session, apex_tuning_request & request);
friend int __random_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
friend int __genetic_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
};

class apex_param_enum : public apex_param {
Expand Down Expand Up @@ -185,6 +196,8 @@ class apex_param_enum : public apex_param {
tuning_session, apex_tuning_request & request);
friend int __random_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
friend int __genetic_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
};


Expand Down Expand Up @@ -325,6 +338,8 @@ class apex_tuning_request {
tuning_session, apex_tuning_request & request);
friend int __random_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
friend int __genetic_setup(std::shared_ptr<apex_tuning_session>
tuning_session, apex_tuning_request & request);
};


Expand All @@ -351,6 +366,8 @@ struct apex_tuning_session {
apex::exhaustive::Exhaustive exhaustive_session;
// if using exhaustive, this is the request.
apex::random::Random random_session;
// if using genetic, this is the request.
apex::genetic::GeneticSearch genetic_session;
bool converged_message = false;

// variables related to power throttling
Expand Down
79 changes: 77 additions & 2 deletions src/apex/genetic_search.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

namespace apex {

namespace random {
namespace genetic {

double inline myrand() {
return ((double) rand() / (RAND_MAX));
Expand Down Expand Up @@ -56,6 +56,75 @@ class log_wrapper {
}
};

/* OK, here's where ALL the book keeping is going to happen.
The basic approach is to do the following:
1. Create N initial random generation combinations
2. Evaluate all N individuals
3. Sort individuals
4. Drop bottom 1/2 of individuals
5. Replace dropped individuals with "genes" crossed from each higher ranked
individuals, probabilities proportional to their rankings. That is, the
highest ranked individual is most likely to contribute their genes to
the next generation of individuals.
6. mutate some of the new individuals accordning to some probability
7. Go back to 2, iterate until we don't get a new highest ranked
individual for X generations.
Constants needed:
population size (N): probably some power of 2, dependent on the number of variables?
parent ratio
mutate probability
transfer ratio
crossover
*/

auto get_random_number(const std::size_t min, const std::size_t max)
{
const std::size_t values_count = max - min + 1;
return rand() % values_count + min;
}

void GeneticSearch::getNewSettings() {
static bool bootstrapping{true};
if (bootstrapping) {
// we are still bootstrapping, so just get a random selection.
for (auto& v : vars) { v.second.get_next_neighbor(); }
return;
}
// time to cull the herd?
if (population.size() >= population_size) {
std::cout << "Have population of " << population.size() << " to evaluate!" << std::endl;
// we need to sort the population...
sort(population.begin(), population.end(),
[](const individual& lhs, const individual& rhs) {
return lhs.cost < rhs.cost;
});
// ...then drop half of them - the "weakest" ones.
population.erase(population.cbegin() + crossover, population.cend());
std::cout << "Now have population of " << population.size() << std::endl;
bootstrapping = false;
}
// We want to generate a new individual using two "high quality" parents.
// choose parent A
individual& A = population[get_random_number(0,crossover)];
// choose parent B
individual& B = population[get_random_number(0,crossover)];
// blend their variables into a new individual and maybe mutate?
size_t i = 0;
for (auto& v : vars) {
// if mutating, just get a random value.
if (get_random_number(0,100) < mutate_probability) {
v.second.get_next_neighbor();
// otherwise, get a "gene" from a parent
} else if (get_random_number(0,100) < parent_ratio) {
v.second.current_index = A.indexes[i];
} else {
v.second.current_index = B.indexes[i];
}
i++;
}
}

void GeneticSearch::evaluate(double new_cost) {
static log_wrapper log(vars);
static size_t count{0};
Expand All @@ -73,12 +142,18 @@ void GeneticSearch::evaluate(double new_cost) {
}
cost = new_cost;
}
/* save our individual in the population */
individual i;
i.cost = new_cost;
for (auto& v : vars) { i.indexes.push_back(v.second.current_index); }
population.push_back(i);

//for (auto& v : vars) { v.second.choose_neighbor(); }
k++;
return;
}

} // random
} // genetic

} // apex

Expand Down
19 changes: 13 additions & 6 deletions src/apex/genetic_search.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

namespace apex {

namespace random {
namespace genetic {

enum class VariableType { doubletype, longtype, stringtype } ;

Expand Down Expand Up @@ -81,6 +81,11 @@ class Variable {
}
};

struct individual {
std::vector<size_t> indexes;
double cost;
};

class GeneticSearch {
private:
double cost;
Expand All @@ -90,6 +95,11 @@ class GeneticSearch {
std::map<std::string, Variable> vars;
const size_t max_iterations{1000};
const size_t min_iterations{100};
const size_t population_size{16};
const size_t crossover{8}; // half population
const size_t parent_ratio{50};
const size_t mutate_probability{5};
std::vector<individual> population;
public:
void evaluate(double new_cost);
GeneticSearch() :
Expand All @@ -102,10 +112,7 @@ class GeneticSearch {
}
double getEnergy() { return best_cost; }
bool converged() { return (k > kmax); }
void getNewSettings() {
/* Increment neighbour */
for (auto& v : vars) { v.second.get_next_neighbor(); }
}
void getNewSettings();
void saveBestSettings() {
for (auto& v : vars) { v.second.getBest(); }
}
Expand All @@ -127,6 +134,6 @@ class GeneticSearch {
}
};

} // random
} // genetic

} // apex

0 comments on commit d8c73d2

Please sign in to comment.