16 #include "random_search.h"
407 if(new_warning_parameters_norm < 0.0)
409 std::ostringstream buffer;
411 buffer <<
"OpenNN Exception: RandomSearch class.\n"
412 <<
"void set_warning_parameters_norm(const double&) method.\n"
413 <<
"Warning parameters norm must be equal or greater than 0.\n";
415 throw std::logic_error(buffer.str());
439 if(new_warning_training_rate < 0.0)
441 std::ostringstream buffer;
443 buffer <<
"OpenNN Exception: RandomSearch class.\n"
444 <<
"void set_warning_training_rate(const double&) method.\n"
445 <<
"Warning training rate must be equal or greater than 0.\n";
447 throw std::logic_error(buffer.str());
468 if(new_error_parameters_norm < 0.0)
470 std::ostringstream buffer;
472 buffer <<
"OpenNN Exception: RandomSearch class.\n"
473 <<
"void set_error_parameters_norm(const double&) method.\n"
474 <<
"Error parameters norm must be equal or greater than 0.\n";
476 throw std::logic_error(buffer.str());
499 if(new_error_training_rate < 0.0)
501 std::ostringstream buffer;
503 buffer <<
"OpenNN Exception: RandomSearch class.\n"
504 <<
"void set_error_training_rate(const double&) method.\n"
505 <<
"Error training rate must be equal or greater than 0.\n";
507 throw std::logic_error(buffer.str());
565 if(new_maximum_time < 0.0)
567 std::ostringstream buffer;
569 buffer <<
"OpenNN Exception: RandomSearch class.\n"
570 <<
"void set_maximum_time(const double&) method.\n"
571 <<
"Maximum time must be equal or greater than 0.\n";
573 throw std::logic_error(buffer.str());
656 if(new_display_period <= 0)
658 std::ostringstream buffer;
660 buffer <<
"OpenNN Exception: RandomSearch class.\n"
661 <<
"void set_display_period(const double&) method.\n"
662 <<
"First training rate must be greater than 0.\n";
664 throw std::logic_error(buffer.str());
691 }
while(random_norm == 0.0);
693 return(random/random_norm);
748 std::ostringstream buffer;
752 if(!parameters_history.empty())
754 buffer <<
"% Parameters history:\n"
755 << parameters_history <<
"\n";
760 if(!parameters_norm_history.empty())
762 buffer <<
"% Parameters norm history:\n"
763 << parameters_norm_history <<
"\n";
768 if(!performance_history.empty())
770 buffer <<
"% performance history:\n"
771 << performance_history <<
"\n";
776 if(!generalization_performance_history.empty())
778 buffer <<
"% Generalization performance history:\n"
779 << generalization_performance_history <<
"\n";
784 if(!training_direction_history.empty())
786 if(!training_direction_history[0].empty())
788 buffer <<
"% Training direction history:\n"
789 << training_direction_history <<
"\n";
795 if(!training_rate_history.empty())
797 buffer <<
"% Training rate history:\n"
798 << training_rate_history <<
"\n";
803 if(!elapsed_time_history.empty())
805 buffer <<
"% Elapsed time history:\n"
806 << elapsed_time_history <<
"\n";
809 return(buffer.str());
817 std::ostringstream buffer;
824 names.push_back(
"Final parameters norm");
827 buffer << std::setprecision(precision) <<final_parameters_norm;
829 values.push_back(buffer.str());
833 names.push_back(
"Final performance");
836 buffer << std::setprecision(precision) << final_performance;
838 values.push_back(buffer.str());
846 names.push_back(
"Final generalization performance");
849 buffer << std::setprecision(precision) << final_generalization_performance;
851 values.push_back(buffer.str());
865 names.push_back(
"Iterations number");
868 buffer << iterations_number;
870 values.push_back(buffer.str());
874 names.push_back(
"Elapsed time");
877 buffer << elapsed_time;
879 values.push_back(buffer.str());
881 const size_t rows_number = names.size();
882 const size_t columns_number = 2;
889 return(final_results);
915 std::cout <<
"Training with random search...\n";
921 time_t beginning_time, current_time;
922 time(&beginning_time);
932 double parameters_norm;
936 double performance = 0.0;
937 double potential_performance = 1.0e99;
939 double generalization_performance = 0.0;
940 double old_generalization_performance = 0.0;
942 size_t generalization_failures = 0;
947 double training_rate = 1.0;
950 double potential_parameters_norm;
955 bool stop_training =
false;
967 std::cout <<
"OpenNN Warning: Parameters norm is " << parameters_norm <<
".\n";
979 if(iteration != 0 && generalization_performance > old_generalization_performance)
981 generalization_failures++;
995 parameters_increment = training_direction*training_rate;
998 potential_parameters = parameters + parameters_increment;
999 potential_parameters_norm = potential_parameters.
calculate_norm();
1001 time(¤t_time);
1002 elapsed_time = difftime(current_time, beginning_time);
1062 std::cout <<
"Iteration " << iteration <<
": Performance goal reached.\n";
1065 stop_training =
true;
1072 std::cout <<
"Iteration " << iteration <<
": Maximum number of iterations reached.\n";
1075 stop_training =
true;
1082 std::cout <<
"Iteration " << iteration <<
": Maximum training time reached.\n";
1085 stop_training =
true;
1088 if(iteration != 0 && iteration %
save_period == 0)
1097 std::cout <<
"Parameters norm: " << parameters_norm <<
"\n"
1098 <<
"Potential parameters norm: " << potential_parameters_norm <<
"\n"
1099 <<
"Performance: " << performance <<
"\n"
1101 <<
"Potential performance: " << potential_performance <<
"\n"
1102 <<
"Training rate: " << training_rate <<
"\n"
1103 <<
"Elapsed time: " << elapsed_time << std::endl;
1105 if(generalization_performance != 0)
1107 std::cout <<
"Generalization performance: " << generalization_performance << std::endl;
1128 std::cout <<
"Iteration " << iteration <<
";\n"
1129 <<
"Parameters norm: " << parameters_norm <<
"\n"
1130 <<
"Potential parameters norm: " << potential_parameters_norm <<
"\n"
1131 <<
"Performance: " << performance <<
"\n"
1133 <<
"Potential performance: " << potential_performance <<
"\n"
1134 <<
"Training rate: " << training_rate <<
"\n"
1135 <<
"Elapsed time: " << elapsed_time << std::endl;
1137 if(generalization_performance != 0)
1139 std::cout <<
"Generalization performance: " << generalization_performance << std::endl;
1145 if(potential_performance < performance)
1147 parameters = potential_parameters;
1151 performance = potential_performance;
1154 old_generalization_performance = generalization_performance;
1158 return(results_pointer);
1166 return(
"RANDOM_SEARCH");
1176 std::ostringstream buffer;
1183 labels.push_back(
"Performance goal");
1188 values.push_back(buffer.str());
1192 labels.push_back(
"Maximum generalization performance decreases");
1197 values.push_back(buffer.str());
1201 labels.push_back(
"Maximum iterations number");
1206 values.push_back(buffer.str());
1210 labels.push_back(
"Maximum time");
1215 values.push_back(buffer.str());
1219 labels.push_back(
"Reserve parameters norm history");
1224 values.push_back(buffer.str());
1228 labels.push_back(
"Reserve performance history");
1233 values.push_back(buffer.str());
1237 labels.push_back(
"Reserve generalization performance history");
1242 values.push_back(buffer.str());
1262 labels.push_back(
"Reserve elapsed time history");
1267 values.push_back(buffer.str());
1269 const size_t rows_number = labels.size();
1270 const size_t columns_number = 2;
1277 return(string_matrix);
1288 std::ostringstream buffer;
1290 tinyxml2::XMLDocument* document =
new tinyxml2::XMLDocument;
1294 tinyxml2::XMLElement* root_element = document->NewElement(
"RandomSearch");
1296 document->InsertFirstChild(root_element);
1298 tinyxml2::XMLElement* element = NULL;
1299 tinyxml2::XMLText* text = NULL;
1303 element = document->NewElement(
"TrainingRateReductionFactor");
1304 root_element->LinkEndChild(element);
1309 text = document->NewText(buffer.str().c_str());
1310 element->LinkEndChild(text);
1315 element = document->NewElement(
"TrainingRateReductionPeriod");
1316 root_element->LinkEndChild(element);
1321 text = document->NewText(buffer.str().c_str());
1322 element->LinkEndChild(text);
1327 element = document->NewElement(
"FirstTrainingRate");
1328 root_element->LinkEndChild(element);
1333 text = document->NewText(buffer.str().c_str());
1334 element->LinkEndChild(text);
1339 element = document->NewElement(
"WarningParametersNorm");
1340 root_element->LinkEndChild(element);
1345 text = document->NewText(buffer.str().c_str());
1346 element->LinkEndChild(text);
1351 element = document->NewElement(
"WarningTrainingRate");
1352 root_element->LinkEndChild(element);
1357 text = document->NewText(buffer.str().c_str());
1358 element->LinkEndChild(text);
1363 element = document->NewElement(
"ErrorParametersNorm");
1364 root_element->LinkEndChild(element);
1369 text = document->NewText(buffer.str().c_str());
1370 element->LinkEndChild(text);
1375 element = document->NewElement(
"ErrorTrainingRate");
1376 root_element->LinkEndChild(element);
1381 text = document->NewText(buffer.str().c_str());
1382 element->LinkEndChild(text);
1387 element = document->NewElement(
"PerformanceGoal");
1388 root_element->LinkEndChild(element);
1393 text = document->NewText(buffer.str().c_str());
1394 element->LinkEndChild(text);
1399 element = document->NewElement(
"MaximumGeneralizationPerformanceDecreases");
1400 root_element->LinkEndChild(element);
1405 text = document->NewText(buffer.str().c_str());
1406 element->LinkEndChild(text);
1411 element = document->NewElement(
"MaximumIterationsNumber");
1412 root_element->LinkEndChild(element);
1417 text = document->NewText(buffer.str().c_str());
1418 element->LinkEndChild(text);
1423 element = document->NewElement(
"MaximumTime");
1424 root_element->LinkEndChild(element);
1429 text = document->NewText(buffer.str().c_str());
1430 element->LinkEndChild(text);
1435 element = document->NewElement(
"ReserveParametersHistory");
1436 root_element->LinkEndChild(element);
1441 text = document->NewText(buffer.str().c_str());
1442 element->LinkEndChild(text);
1447 element = document->NewElement(
"ReserveParametersNormHistory");
1448 root_element->LinkEndChild(element);
1453 text = document->NewText(buffer.str().c_str());
1454 element->LinkEndChild(text);
1459 element = document->NewElement(
"ReservePerformanceHistory");
1460 root_element->LinkEndChild(element);
1465 text = document->NewText(buffer.str().c_str());
1466 element->LinkEndChild(text);
1471 element = document->NewElement(
"ReserveGeneralizationPerformanceHistory");
1472 root_element->LinkEndChild(element);
1477 text = document->NewText(buffer.str().c_str());
1478 element->LinkEndChild(text);
1483 element = document->NewElement(
"ReserveTrainingDirectionHistory");
1484 root_element->LinkEndChild(element);
1489 text = document->NewText(buffer.str().c_str());
1490 element->LinkEndChild(text);
1495 element = document->NewElement(
"ReserveTrainingRateHistory");
1496 root_element->LinkEndChild(element);
1501 text = document->NewText(buffer.str().c_str());
1502 element->LinkEndChild(text);
1507 element = document->NewElement(
"ReserveElapsedTimeHistory");
1508 root_element->LinkEndChild(element);
1513 text = document->NewText(buffer.str().c_str());
1514 element->LinkEndChild(text);
1519 element = document->NewElement(
"ReserveGeneralizationperformanceHistory");
1520 root_element->LinkEndChild(element);
1525 text = document->NewText(buffer.str().c_str());
1526 element->LinkEndChild(text);
1531 element = document->NewElement(
"DisplayPeriod");
1532 root_element->LinkEndChild(element);
1537 text = document->NewText(buffer.str().c_str());
1538 element->LinkEndChild(text);
1543 element = document->NewElement(
"Display");
1544 root_element->LinkEndChild(element);
1549 text = document->NewText(buffer.str().c_str());
1550 element->LinkEndChild(text);
1561 const tinyxml2::XMLElement* root_element = document.FirstChildElement(
"RandomSearch");
1565 std::ostringstream buffer;
1567 buffer <<
"OpenNN Exception: RandomSearch class.\n"
1568 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1569 <<
"Random search element is NULL.\n";
1571 throw std::logic_error(buffer.str());
1576 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"FirstTrainingRate");
1580 const double new_first_training_rate = atof(element->GetText());
1586 catch(
const std::logic_error& e)
1588 std::cout << e.what() << std::endl;
1595 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"TrainingRateReductionFactor");
1599 const double new_training_rate_reduction_factor = atof(element->GetText());
1605 catch(
const std::logic_error& e)
1607 std::cout << e.what() << std::endl;
1614 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"TrainingRateReductionPeriod");
1618 const size_t new_training_rate_reduction_period = atoi(element->GetText());
1624 catch(
const std::logic_error& e)
1626 std::cout << e.what() << std::endl;
1633 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"WarningParametersNorm");
1637 const double new_warning_parameters_norm = atof(element->GetText());
1643 catch(
const std::logic_error& e)
1645 std::cout << e.what() << std::endl;
1652 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"WarningTrainingRate");
1656 const double new_warning_training_rate = atof(element->GetText());
1662 catch(
const std::logic_error& e)
1664 std::cout << e.what() << std::endl;
1671 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ErrorParametersNorm");
1675 const double new_error_parameters_norm = atof(element->GetText());
1681 catch(
const std::logic_error& e)
1683 std::cout << e.what() << std::endl;
1690 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ErrorTrainingRate");
1694 const double new_error_training_rate = atof(element->GetText());
1700 catch(
const std::logic_error& e)
1702 std::cout << e.what() << std::endl;
1709 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"PerformanceGoal");
1713 const double new_performance_goal = atof(element->GetText());
1719 catch(
const std::logic_error& e)
1721 std::cout << e.what() << std::endl;
1728 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"MaximumGeneralizationPerformanceDecreases");
1732 const size_t new_maximum_generalization_performance_decreases = atoi(element->GetText());
1738 catch(
const std::logic_error& e)
1740 std::cout << e.what() << std::endl;
1747 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"MaximumIterationsNumber");
1751 const size_t new_maximum_iterations_number = atoi(element->GetText());
1757 catch(
const std::logic_error& e)
1759 std::cout << e.what() << std::endl;
1766 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"MaximumTime");
1770 const double new_maximum_time = atof(element->GetText());
1776 catch(
const std::logic_error& e)
1778 std::cout << e.what() << std::endl;
1785 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ReserveParametersHistory");
1789 const std::string new_reserve_parameters_history = element->GetText();
1795 catch(
const std::logic_error& e)
1797 std::cout << e.what() << std::endl;
1804 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ReserveParametersNormHistory");
1808 const std::string new_reserve_parameters_norm_history = element->GetText();
1814 catch(
const std::logic_error& e)
1816 std::cout << e.what() << std::endl;
1823 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ReservePerformanceHistory");
1827 const std::string new_reserve_performance_history = element->GetText();
1833 catch(
const std::logic_error& e)
1835 std::cout << e.what() << std::endl;
1842 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ReserveGeneralizationPerformanceHistory");
1846 const std::string new_reserve_generalization_performance_history = element->GetText();
1852 catch(
const std::logic_error& e)
1854 std::cout << e.what() << std::endl;
1861 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ReserveTrainingDirectionHistory");
1865 const std::string new_reserve_training_direction_history = element->GetText();
1871 catch(
const std::logic_error& e)
1873 std::cout << e.what() << std::endl;
1880 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ReserveTrainingDirectionNormHistory");
1884 const std::string new_reserve_training_direction_norm_history = element->GetText();
1890 catch(
const std::logic_error& e)
1892 std::cout << e.what() << std::endl;
1899 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ReserveTrainingRateHistory");
1903 const std::string new_reserve_training_rate_history = element->GetText();
1909 catch(
const std::logic_error& e)
1911 std::cout << e.what() << std::endl;
1918 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"ReserveElapsedTimeHistory");
1922 const std::string new_reserve_elapsed_time_history = element->GetText();
1928 catch(
const std::logic_error& e)
1930 std::cout << e.what() << std::endl;
1937 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"DisplayPeriod");
1941 const size_t new_display_period = atoi(element->GetText());
1947 catch(
const std::logic_error& e)
1949 std::cout << e.what() << std::endl;
1956 const tinyxml2::XMLElement* element = root_element->FirstChildElement(
"Display");
1960 const std::string new_display = element->GetText();
1966 catch(
const std::logic_error& e)
1968 std::cout << e.what() << std::endl;
void set_error_parameters_norm(const double &)
size_t count_parameters_number(void) const
void randomize_uniform(const double &=-1.0, const double &=1.0)
void set_error_training_rate(const double &)
double maximum_time
Maximum training time. It is used as a stopping criterion.
void resize_training_history(const size_t &)
void set_training_rate_reduction_factor(const double &)
void set_first_training_rate(const double &)
bool reserve_generalization_performance_history
True if the Generalization performance history vector is to be reserved, false otherwise.
size_t maximum_generalization_performance_decreases
void set_display_period(const size_t &)
const bool & get_reserve_parameters_norm_history(void) const
Returns true if the parameters norm history vector is to be reserved, and false otherwise.
Vector< double > arrange_parameters(void) const
RandomSearch * random_search_pointer
Pointer to the random search object for which the training results are to be stored.
bool reserve_training_rate_history
True if the training rate history vector is to be reserved, false otherwise.
const double & get_error_parameters_norm(void) const
bool display
Display messages to screen.
Vector< double > parameters_norm_history
History of the parameters norm over the training iterations.
Vector< Vector< double > > parameters_history
History of the neural network parameters over the training iterations.
std::string write_training_algorithm_type(void) const
This method writes a string with the type of training algoritm.
size_t maximum_iterations_number
Maximum number of iterations to perform_training. It is used as a stopping criterion.
Vector< double > generalization_performance_history
History of the generalization performance over the training iterations.
virtual void set_reserve_all_training_history(const bool &)
Makes the training history of all variables to be reseved or not in memory.
double final_parameters_norm
Final neural network parameters norm.
void set_training_rate_reduction_period(const size_t &)
bool reserve_training_direction_history
True if the training direction history matrix is to be reserved, false otherwise. ...
void set_reserve_parameters_norm_history(const bool &)
const double & get_maximum_time(void) const
Returns the maximum training time.
const double & get_warning_parameters_norm(void) const
Returns the minimum value for the norm of the parameters vector at wich a warning message is written ...
void set_reserve_elapsed_time_history(const bool &)
bool reserve_performance_history
True if the performance history vector is to be reserved, false otherwise.
Vector< Vector< double > > training_direction_history
History of the random search training direction over the training iterations.
void set_maximum_iterations_number(const size_t &)
void set_maximum_generalization_performance_decreases(const size_t &)
double final_training_rate
Final random search training rate.
Vector< double > performance_history
History of the performance function performance over the training iterations.
bool reserve_parameters_history
True if the parameters history vector of vectors is to be reserved, false otherwise.
const size_t & get_training_rate_reduction_period(void) const
Returns the reducing period for the training rate.
size_t save_period
Number of iterations between the training saving progress.
void set_reserve_training_rate_history(const bool &)
void from_XML(const tinyxml2::XMLDocument &)
const size_t & get_maximum_iterations_number(void) const
Returns the maximum number of iterations for training.
const double & get_performance_goal(void) const
const bool & get_reserve_performance_history(void) const
Returns true if the performance history vector is to be reserved, and false otherwise.
void set_warning_parameters_norm(const double &)
double warning_parameters_norm
Value for the parameters norm at which a warning message is written to the screen.
double calculate_norm(void) const
Returns the vector norm.
Vector< double > elapsed_time_history
History of the elapsed time over the training iterations.
void set_display(const bool &)
size_t iterations_number
Maximum number of training iterations.
std::string neural_network_file_name
Path where the neural network is saved.
const double & get_training_rate_reduction_factor(void) const
Returns the reducing factor for the training rate.
const bool & get_reserve_training_rate_history(void) const
Returns true if the training rate history vector is to be reserved, and false otherwise.
std::string to_string(void) const
Returns a string representation of the current random search results structure.
double warning_training_rate
Training rate value at wich a warning message is written to the screen.
void set_column(const size_t &, const Vector< T > &)
Vector< double > final_parameters
Final neural network parameters vector.
double training_rate_reduction_factor
bool reserve_elapsed_time_history
True if the elapsed time history vector is to be reserved, false otherwise.
const bool & get_reserve_parameters_history(void) const
Returns true if the parameters history matrix is to be reserved, and false otherwise.
double final_generalization_performance
Final generalization performance.
RandomSearchResults * perform_training(void)
void set_reserve_parameters_history(const bool &)
const double & get_warning_training_rate(void) const
Returns the training rate value at wich a warning message is written to the screen during line minimi...
void set_warning_training_rate(const double &)
double final_performance
Final performance function evaluation.
const bool & get_reserve_training_direction_history(void) const
Returns true if the training direction history matrix is to be reserved, and false otherwise...
virtual ~RandomSearch(void)
const double & get_error_training_rate(void) const
void save(const std::string &) const
const bool & get_reserve_generalization_performance_history(void) const
Returns true if the Generalization performance history vector is to be reserved, and false otherwise...
void set_reserve_performance_history(const bool &)
virtual void check(void) const
const size_t & get_maximum_generalization_performance_decreases(void) const
Returns the maximum number of generalization failures during the training process.
Matrix< std::string > to_string_matrix(void) const
size_t training_rate_reduction_period
Iterations interval at which the training rate is reduced.
double first_training_rate
Initial training rate following a random training direction.
Vector< double > training_rate_history
History of the random search training rate over the training iterations.
void set_reserve_training_direction_history(const bool &)
const bool & get_reserve_elapsed_time_history(void) const
Returns true if the elapsed time history vector is to be reserved, and false otherwise.
double error_parameters_norm
Value for the parameters norm at which the training process is assumed to fail.
void set_reserve_generalization_performance_history(const bool &)
Vector< double > final_training_direction
Final random search training direction.
bool reserve_parameters_norm_history
True if the parameters norm history vector is to be reserved, false otherwise.
bool reserve_training_direction_norm_history
True if the training direction norm history vector is to be reserved, false otherwise.
double elapsed_time
Elapsed time of the training process.
PerformanceFunctional * performance_functional_pointer
Pointer to a performance functional for a multilayer perceptron object.
double performance_goal
Goal value for the performance. It is used as a stopping criterion.
Vector< double > calculate_training_direction(void) const
Calculates a random vector to be used as training direction.
void set_reserve_training_direction_norm_history(const bool &)
size_t display_period
Number of iterations between the training showing progress.
void set_maximum_time(const double &)
Matrix< std::string > write_final_results(const size_t &precision=3) const
Returns a default (empty) string matrix with the final results from training.
void set_performance_goal(const double &)
double error_training_rate
Training rate at wich the line minimization algorithm is assumed to be unable to bracket a minimum...
void set_parameters(const Vector< double > &)
tinyxml2::XMLDocument * to_XML(void) const