Skip to content

Commit

Permalink
clean
Browse files Browse the repository at this point in the history
  • Loading branch information
RoberLopez committed Jan 15, 2025
1 parent f3adf01 commit 113c190
Show file tree
Hide file tree
Showing 6 changed files with 26 additions and 59 deletions.
37 changes: 9 additions & 28 deletions opennn/genetic_algorithm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ void GeneticAlgorithm::set_default()
if (!training_strategy || !training_strategy->has_neural_network())
return;

const Index genes_number = training_strategy->get_data_set()->get_variables_number(DataSet::VariableUse::Input);
const Index genes_number = get_genes_number();

const Index individuals_number = 40;

Expand Down Expand Up @@ -129,7 +129,7 @@ void GeneticAlgorithm::set_population(const Tensor<bool, 2>& new_population)

void GeneticAlgorithm::set_genes_number(const Index& new_genes_number)
{
genes_number = new_genes_number;
// @todo
}


Expand All @@ -141,28 +141,17 @@ void GeneticAlgorithm::set_maximum_epochs_number(const Index& new_maximum_epochs

void GeneticAlgorithm::set_individuals_number(const Index& new_individuals_number)
{
if(!training_strategy)
throw runtime_error("Training strategy is null");

const DataSet* data_set = training_strategy->get_data_set();
if (!training_strategy || !training_strategy->get_data_set())
throw runtime_error("Training strategy or data set is null");

if (!data_set)
throw runtime_error("Data set is null");

const Index new_genes_number = data_set->get_variables_number(DataSet::VariableUse::Input);
const Index new_genes_number = training_strategy->get_data_set()->get_variables_number(DataSet::VariableUse::Input);

population.resize(new_individuals_number, new_genes_number);

parameters.resize(new_individuals_number);

training_errors.resize(new_individuals_number);

selection_errors.resize(new_individuals_number);

fitness.resize(new_individuals_number);

fitness.setConstant(type(-1.0));

selection.resize(new_individuals_number);

elitism_size = min(elitism_size, new_individuals_number);
Expand Down Expand Up @@ -199,15 +188,9 @@ void GeneticAlgorithm::initialize_population_random()
{
DataSet* data_set = training_strategy->get_data_set();

const Index genes_number = data_set->get_variables_number(DataSet::VariableUse::Input);

const Index genes_number = get_genes_number();
const Index individuals_number = get_individuals_number();

population.resize(individuals_number, genes_number);

original_input_raw_variable_indices = data_set->get_raw_variable_indices(DataSet::VariableUse::Input);
original_target_raw_variable_indices = data_set->get_raw_variable_indices(DataSet::VariableUse::Target);

const Index original_input_raw_variables_number = original_input_raw_variable_indices.size();

const Index random_raw_variables_number = data_set->get_raw_variables_number(DataSet::VariableUse::Input);
Expand Down Expand Up @@ -432,8 +415,6 @@ void GeneticAlgorithm::evaluate_population()
data_set->set_raw_variable_indices(original_input_raw_variable_indices, original_target_raw_variable_indices);
}

// Mean generational selection and training error calculation (primitive way)

const Tensor<type, 0> sum_training_errors = training_errors.sum();
const Tensor<type, 0> sum_selection_errors = selection_errors.sum();

Expand Down Expand Up @@ -501,11 +482,11 @@ vector<Index> GeneticAlgorithm::get_selected_individuals_indices()
{
vector<Index> selection_indices(count(selection.data(), selection.data() + selection.size(), 1));

Index activated_index_count = 0;
Index count = 0;

for(Index i = 0; i < selection.size(); i++)
if(selection(i))
selection_indices[activated_index_count++] = i;
selection_indices[count++] = i;

return selection_indices;
}
Expand Down Expand Up @@ -899,7 +880,7 @@ Tensor<bool, 1> GeneticAlgorithm::get_individual_genes(const Tensor<bool, 1>& in
{
DataSet* data_set = training_strategy->get_data_set();

const Index genes_number = data_set->get_variables_number(DataSet::VariableUse::Input);
const Index genes_number = get_genes_number();
const Index raw_variables_number = individual_raw_variables.size();

Tensor<bool, 1> individual_raw_variables_to_variables(genes_number);
Expand Down
2 changes: 0 additions & 2 deletions opennn/genetic_algorithm.h
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,6 @@ class GeneticAlgorithm : public InputsSelection

Tensor<bool, 2> optimal_individuals_history;

Index genes_number;

type mutation_rate;

Index elitism_size;
Expand Down
4 changes: 2 additions & 2 deletions opennn/growing_neurons.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -249,8 +249,8 @@ void GrowingNeurons::from_XML(const XMLDocument& document)
if(!root_element)
throw runtime_error("GrowingNeurons element is nullptr.\n");

minimum_neurons = read_xml_index(root_element, "MinimumNeurons");
maximum_neurons = read_xml_index(root_element, "MaximumNeurons");
set_minimum_neurons(read_xml_index(root_element, "MinimumNeurons"));
set_maximum_neurons(read_xml_index(root_element, "MaximumNeurons"));
set_neurons_increment(read_xml_index(root_element, "NeuronsIncrement"));
set_trials_number(read_xml_index(root_element, "TrialsNumber"));
set_selection_error_goal(read_xml_type(root_element, "SelectionErrorGoal"));
Expand Down
39 changes: 13 additions & 26 deletions opennn/neurons_selection.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -87,36 +87,24 @@ void NeuronsSelection::set_training_strategy(TrainingStrategy* new_training_stra

void NeuronsSelection::set_default()
{
if(!training_strategy)
return;

NeuralNetwork* neural_network = training_strategy->get_neural_network();

if(!neural_network)
if (!(training_strategy && training_strategy->get_neural_network()))
return;

const Index inputs_number = neural_network->get_inputs_number();
const Index outputs_number = neural_network->get_outputs_number();
const Index inputs_number = training_strategy->get_neural_network()->get_inputs_number();
const Index outputs_number = training_strategy->get_neural_network()->get_outputs_number();

minimum_neurons = 1;

// Heuristic value for the maximum_neurons

maximum_neurons = 2*(inputs_number + outputs_number);
maximum_neurons = 2 * (inputs_number + outputs_number);
trials_number = 1;

display = true;

// Stopping criteria

selection_error_goal = type(0);

maximum_epochs_number = 1000;
maximum_time = type(3600);
}


void NeuronsSelection::set_maximum_neurons_number(const Index& new_maximum_neurons)
void NeuronsSelection::set_maximum_neurons(const Index& new_maximum_neurons)
{
maximum_neurons = new_maximum_neurons;
}
Expand Down Expand Up @@ -216,18 +204,17 @@ void NeuronsSelection::check() const

string NeuronsSelection::write_time(const type& time) const
{
const int hours = int(time) / 3600;
int seconds = int(time) % 3600;
const int minutes = seconds / 60;
seconds = seconds % 60;
const int total_seconds = static_cast<int>(time);
const int hours = total_seconds / 3600;
const int minutes = (total_seconds % 3600) / 60;
const int seconds = total_seconds % 60;

ostringstream elapsed_time;
elapsed_time << setfill('0') << setw(2)
<< hours << ":"
<< minutes << ":"
<< seconds << endl;

elapsed_time << setfill('0') << setw(2)
<< hours << ":"
<< minutes << ":"
<< seconds << endl;

return elapsed_time.str();
}

Expand Down
2 changes: 1 addition & 1 deletion opennn/neurons_selection.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class NeuronsSelection

void set_default();

void set_maximum_neurons_number(const Index&);
void set_maximum_neurons(const Index&);
void set_minimum_neurons(const Index&);
void set_trials_number(const Index&);

Expand Down
1 change: 1 addition & 0 deletions opennn/perceptron_layer_3d.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ Index PerceptronLayer3D::get_inputs_number_xxx() const
return inputs_number_xxx;
}


Index PerceptronLayer3D::get_inputs_depth() const
{
return synaptic_weights.dimension(0);
Expand Down

0 comments on commit 113c190

Please sign in to comment.