OpenNN  2.2
Open Neural Networks Library
random_search.cpp
1 /****************************************************************************************************************/
2 /* */
3 /* OpenNN: Open Neural Networks Library */
4 /* www.artelnics.com/opennn */
5 /* */
6 /* R A N D O M S E A R C H C L A S S */
7 /* */
8 /* Roberto Lopez */
9 /* Artelnics - Making intelligent use of data */
11 /* */
12 /****************************************************************************************************************/
13 
14 // OpenNN includes
15 
16 #include "random_search.h"
17 
18 namespace OpenNN
19 {
20 
21 // DEFAULT CONSTRUCTOR
22 
26 
29 {
30  set_default();
31 }
32 
33 
34 // PERFORMANCE FUNCTIONAL CONSTRUCTOR
35 
40 
41 RandomSearch::RandomSearch(PerformanceFunctional* new_performance_functional_pointer)
42 : TrainingAlgorithm(new_performance_functional_pointer)
43 {
44  set_default();
45 }
46 
47 
48 // XML CONSTRUCTOR
49 
54 
55 RandomSearch::RandomSearch(const tinyxml2::XMLDocument& document) : TrainingAlgorithm(document)
56 {
57  from_XML(document);
58 }
59 
60 
61 // DESTRUCTOR
62 
65 
67 {
68 
69 }
70 
71 
72 // const double& get_warning_parameters_norm(void) const method
73 
75 
77 {
78  return(warning_parameters_norm);
79 }
80 
81 
82 // const double& get_warning_training_rate(void) const method
83 
85 
86 const double& RandomSearch::get_warning_training_rate(void) const
87 {
88  return(warning_training_rate);
89 }
90 
91 
92 // const double& get_error_parameters_norm(void) const method
93 
96 
97 const double& RandomSearch::get_error_parameters_norm(void) const
98 {
99  return(error_parameters_norm);
100 }
101 
102 
103 // const double& get_error_training_rate(void) const method
104 
107 
108 const double& RandomSearch::get_error_training_rate(void) const
109 {
110  return(error_training_rate);
111 }
112 
113 
114 // const double& get_performance_goal(void) const method
115 
118 
119 const double& RandomSearch::get_performance_goal(void) const
120 {
121  return(performance_goal);
122 }
123 
124 
125 // const size_t& get_maximum_generalization_performance_decreases(void) const method
126 
128 
130 {
132 }
133 
134 
135 // const size_t& get_maximum_iterations_number(void) const method
136 
138 
140 {
142 }
143 
144 
145 // const double& get_maximum_time(void) const method
146 
148 
149 const double& RandomSearch::get_maximum_time(void) const
150 {
151  return(maximum_time);
152 }
153 
154 
155 // const bool& get_reserve_parameters_history(void) const method
156 
158 
160 {
162 }
163 
164 
165 // const bool& get_reserve_parameters_norm_history(void) const method
166 
168 
170 {
172 }
173 
174 
175 // const bool& get_reserve_performance_history(void) const method
176 
178 
180 {
182 }
183 
184 
185 // const bool& get_reserve_training_direction_history(void) const method
186 
188 
190 {
192 }
193 
194 
195 // const bool& get_reserve_training_rate_history(void) const method
196 
198 
200 {
202 }
203 
204 
205 // const bool& get_reserve_elapsed_time_history(void) const method
206 
208 
210 {
212 }
213 
214 
215 // const bool& get_reserve_generalization_performance_history(void) const method
216 
218 
220 {
222 }
223 
224 
225 // const double& get_training_rate_reduction_factor(void) const method
226 
228 
230 {
232 }
233 
234 
235 // const size_t& get_training_rate_reduction_period(void) const method
236 
238 
240 {
242 }
243 
244 
245 // void set_default(void) method
246 
262 
264 {
265  // TRAINING PARAMETERS
266 
267  first_training_rate = 0.01;
268 
271 
272  // STOPPING CRITERIA
273 
274  performance_goal = -1.0e99;
275 
277  maximum_time = 1000.0;
278 
279  // TRAINING HISTORY
280 
283 
285 
289 
290  // UTILITIES
291 
292  warning_parameters_norm = 1.0e6;
293  warning_training_rate = 1.0e6;
294 
295  error_parameters_norm = 1.0e9;
296  error_training_rate = 1.0e9;
297 
298  display = true;
299  display_period = 10;
300 }
301 
302 
303 // void set_first_training_rate(const double&)
304 
308 
309 void RandomSearch::set_first_training_rate(const double& new_first_training_rate)
310 {
311  first_training_rate = new_first_training_rate;
312 }
313 
314 
315 // void set_training_rate_reduction_factor(const double&) method
316 
319 
320 void RandomSearch::set_training_rate_reduction_factor(const double& new_training_rate_reduction_factor)
321 {
322  training_rate_reduction_factor = new_training_rate_reduction_factor;
323 }
324 
325 
326 // void set_training_rate_reduction_period(size_t) method
327 
330 
331 void RandomSearch::set_training_rate_reduction_period(const size_t& new_training_rate_reduction_period)
332 {
333  training_rate_reduction_period = new_training_rate_reduction_period;
334 }
335 
336 
337 // void set_reserve_parameters_history(bool) method
338 
341 
342 void RandomSearch::set_reserve_parameters_history(const bool& new_reserve_parameters_history)
343 {
344  reserve_parameters_history = new_reserve_parameters_history;
345 }
346 
347 
348 // void set_reserve_parameters_norm_history(bool) method
349 
353 
354 void RandomSearch::set_reserve_parameters_norm_history(const bool& new_reserve_parameters_norm_history)
355 {
356  reserve_parameters_norm_history = new_reserve_parameters_norm_history;
357 }
358 
359 
360 // void set_reserve_performance_history(bool) method
361 
365 
366 void RandomSearch::set_reserve_performance_history(const bool& new_reserve_performance_history)
367 {
368  reserve_performance_history = new_reserve_performance_history;
369 }
370 
371 
372 // void set_reserve_all_training_history(bool) method
373 
377 
378 void RandomSearch::set_reserve_all_training_history(const bool& new_reserve_all_training_history)
379 {
380  // Neural network
381 
382  reserve_parameters_history = new_reserve_all_training_history;
383  reserve_parameters_norm_history = new_reserve_all_training_history;
384 
385  // Performance functional
386 
387  reserve_performance_history = new_reserve_all_training_history;
388 
389  // Training algorithm
390 
391  reserve_elapsed_time_history = new_reserve_all_training_history;
392 }
393 
394 
395 // void set_warning_parameters_norm(const double&) method
396 
400 
401 void RandomSearch::set_warning_parameters_norm(const double& new_warning_parameters_norm)
402 {
403  // Control sentence (if debug)
404 
405  #ifndef NDEBUG
406 
407  if(new_warning_parameters_norm < 0.0)
408  {
409  std::ostringstream buffer;
410 
411  buffer << "OpenNN Exception: RandomSearch class.\n"
412  << "void set_warning_parameters_norm(const double&) method.\n"
413  << "Warning parameters norm must be equal or greater than 0.\n";
414 
415  throw std::logic_error(buffer.str());
416  }
417 
418  #endif
419 
420  // Set warning parameters norm
421 
422  warning_parameters_norm = new_warning_parameters_norm;
423 }
424 
425 
426 
427 // void set_warning_training_rate(const double&) method
428 
432 
433 void RandomSearch::set_warning_training_rate(const double& new_warning_training_rate)
434 {
435  // Control sentence (if debug)
436 
437  #ifndef NDEBUG
438 
439  if(new_warning_training_rate < 0.0)
440  {
441  std::ostringstream buffer;
442 
443  buffer << "OpenNN Exception: RandomSearch class.\n"
444  << "void set_warning_training_rate(const double&) method.\n"
445  << "Warning training rate must be equal or greater than 0.\n";
446 
447  throw std::logic_error(buffer.str());
448  }
449 
450  #endif
451 
452  warning_training_rate = new_warning_training_rate;
453 }
454 
455 
456 // void set_error_parameters_norm(const double&) method
457 
461 
462 void RandomSearch::set_error_parameters_norm(const double& new_error_parameters_norm)
463 {
464  // Control sentence (if debug)
465 
466  #ifndef NDEBUG
467 
468  if(new_error_parameters_norm < 0.0)
469  {
470  std::ostringstream buffer;
471 
472  buffer << "OpenNN Exception: RandomSearch class.\n"
473  << "void set_error_parameters_norm(const double&) method.\n"
474  << "Error parameters norm must be equal or greater than 0.\n";
475 
476  throw std::logic_error(buffer.str());
477  }
478 
479  #endif
480 
481  // Set error parameters norm
482 
483  error_parameters_norm = new_error_parameters_norm;
484 }
485 
486 
487 // void set_error_training_rate(const double&) method
488 
492 
493 void RandomSearch::set_error_training_rate(const double& new_error_training_rate)
494 {
495  // Control sentence (if debug)
496 
497  #ifndef NDEBUG
498 
499  if(new_error_training_rate < 0.0)
500  {
501  std::ostringstream buffer;
502 
503  buffer << "OpenNN Exception: RandomSearch class.\n"
504  << "void set_error_training_rate(const double&) method.\n"
505  << "Error training rate must be equal or greater than 0.\n";
506 
507  throw std::logic_error(buffer.str());
508  }
509 
510  #endif
511 
512  // Set error training rate
513 
514  error_training_rate = new_error_training_rate;
515 }
516 
517 
518 // void set_performance_goal(const doubleT) method
519 
523 
524 void RandomSearch::set_performance_goal(const double& new_performance_goal)
525 {
526  performance_goal = new_performance_goal;
527 }
528 
529 
530 // void set_maximum_generalization_performance_decreases(const size_t&) method
531 
534 
535 void RandomSearch::set_maximum_generalization_performance_decreases(const size_t& new_maximum_generalization_performance_decreases)
536 {
537  // Set maximum generalization performace decrases
538 
539  maximum_generalization_performance_decreases = new_maximum_generalization_performance_decreases;
540 }
541 
542 
543 // void set_maximum_iterations_number(const size_t&) method
544 
547 
548 void RandomSearch::set_maximum_iterations_number(const size_t& new_maximum_iterations_number)
549 {
550  maximum_iterations_number = new_maximum_iterations_number;
551 }
552 
553 
554 // void set_maximum_time(const double&) method
555 
558 
559 void RandomSearch::set_maximum_time(const double& new_maximum_time)
560 {
561  // Control sentence (if debug)
562 
563  #ifndef NDEBUG
564 
565  if(new_maximum_time < 0.0)
566  {
567  std::ostringstream buffer;
568 
569  buffer << "OpenNN Exception: RandomSearch class.\n"
570  << "void set_maximum_time(const double&) method.\n"
571  << "Maximum time must be equal or greater than 0.\n";
572 
573  throw std::logic_error(buffer.str());
574  }
575 
576  #endif
577 
578  // Set maximum time
579 
580  maximum_time = new_maximum_time;
581 }
582 
583 
584 // void set_reserve_training_direction_history(const bool&) method
585 
589 
590 void RandomSearch::set_reserve_training_direction_history(const bool& new_reserve_training_direction_history)
591 {
592  reserve_training_direction_history = new_reserve_training_direction_history;
593 }
594 
595 
596 // void set_reserve_training_direction_norm_history(const bool&) method
597 
601 
602 void RandomSearch::set_reserve_training_direction_norm_history(const bool& new_reserve_training_direction_norm_history)
603 {
604  reserve_training_direction_norm_history = new_reserve_training_direction_norm_history;
605 }
606 
607 
608 // void set_reserve_training_rate_history(bool) method
609 
613 
614 void RandomSearch::set_reserve_training_rate_history(const bool& new_reserve_training_rate_history)
615 {
616  reserve_training_rate_history = new_reserve_training_rate_history;
617 }
618 
619 
620 // void set_reserve_elapsed_time_history(bool) method
621 
625 
626 void RandomSearch::set_reserve_elapsed_time_history(const bool& new_reserve_elapsed_time_history)
627 {
628  reserve_elapsed_time_history = new_reserve_elapsed_time_history;
629 }
630 
631 
632 // void set_reserve_generalization_performance_history(bool) method
633 
637 
638 void RandomSearch::set_reserve_generalization_performance_history(const bool& new_reserve_generalization_performance_history)
639 {
640  reserve_generalization_performance_history = new_reserve_generalization_performance_history;
641 }
642 
643 
644 // void set_display_period(size_t) method
645 
649 
650 void RandomSearch::set_display_period(const size_t& new_display_period)
651 {
652  // Control sentence (if debug)
653 
654  #ifndef NDEBUG
655 
656  if(new_display_period <= 0)
657  {
658  std::ostringstream buffer;
659 
660  buffer << "OpenNN Exception: RandomSearch class.\n"
661  << "void set_display_period(const double&) method.\n"
662  << "First training rate must be greater than 0.\n";
663 
664  throw std::logic_error(buffer.str());
665  }
666 
667  #endif
668 
669  display_period = new_display_period;
670 }
671 
672 
673 
674 // Vector<double> calculate_training_direction(void) const method
675 
677 
679 {
681 
682  const size_t parameters_number = neural_network_pointer->count_parameters_number();
683 
684  Vector<double> random(parameters_number);
685  double random_norm;
686 
687  do
688  {
689  random.randomize_uniform();
690  random_norm = random.calculate_norm();
691  }while(random_norm == 0.0);
692 
693  return(random/random_norm);
694 }
695 
696 
697 // void resize_training_history(const size_t&) method
698 
701 
703 {
704 
706  {
707  parameters_history.resize(new_size);
708  }
709 
711  {
712  parameters_norm_history.resize(new_size);
713  }
714 
716  {
717  performance_history.resize(new_size);
718  }
719 
721  {
722  generalization_performance_history.resize(new_size);
723  }
724 
726  {
727  training_direction_history.resize(new_size);
728  }
729 
731  {
732  training_rate_history.resize(new_size);
733  }
734 
736  {
737  elapsed_time_history.resize(new_size);
738  }
739 }
740 
741 
742 // std::string to_string(void) const method
743 
745 
747 {
748  std::ostringstream buffer;
749 
750  // Parameters history
751 
752  if(!parameters_history.empty())
753  {
754  buffer << "% Parameters history:\n"
755  << parameters_history << "\n";
756  }
757 
758  // Parameters norm history
759 
760  if(!parameters_norm_history.empty())
761  {
762  buffer << "% Parameters norm history:\n"
763  << parameters_norm_history << "\n";
764  }
765 
766  // performance history
767 
768  if(!performance_history.empty())
769  {
770  buffer << "% performance history:\n"
771  << performance_history << "\n";
772  }
773 
774  // Generalization performance history
775 
776  if(!generalization_performance_history.empty())
777  {
778  buffer << "% Generalization performance history:\n"
779  << generalization_performance_history << "\n";
780  }
781 
782  // Training direction history
783 
784  if(!training_direction_history.empty())
785  {
786  if(!training_direction_history[0].empty())
787  {
788  buffer << "% Training direction history:\n"
789  << training_direction_history << "\n";
790  }
791  }
792 
793  // Training rate history
794 
795  if(!training_rate_history.empty())
796  {
797  buffer << "% Training rate history:\n"
798  << training_rate_history << "\n";
799  }
800 
801  // Elapsed time history
802 
803  if(!elapsed_time_history.empty())
804  {
805  buffer << "% Elapsed time history:\n"
806  << elapsed_time_history << "\n";
807  }
808 
809  return(buffer.str());
810 }
811 
812 
813 // Matrix<std::string> write_final_results(const size_t& precision) const method
814 
816 {
817  std::ostringstream buffer;
818 
819  Vector<std::string> names;
820  Vector<std::string> values;
821 
822  // Final parameters norm
823 
824  names.push_back("Final parameters norm");
825 
826  buffer.str("");
827  buffer << std::setprecision(precision) <<final_parameters_norm;
828 
829  values.push_back(buffer.str());
830 
831  // Final performance
832 
833  names.push_back("Final performance");
834 
835  buffer.str("");
836  buffer << std::setprecision(precision) << final_performance;
837 
838  values.push_back(buffer.str());
839 
840  // Final generalization performance
841 
842  const PerformanceFunctional* performance_functional_pointer = random_search_pointer->get_performance_functional_pointer();
843 
844  if(performance_functional_pointer->has_generalization())
845  {
846  names.push_back("Final generalization performance");
847 
848  buffer.str("");
849  buffer << std::setprecision(precision) << final_generalization_performance;
850 
851  values.push_back(buffer.str());
852  }
853 
854  // Final training rate
855 
856 // names.push_back("Final training rate");
857 
858 // buffer.str("");
859 // buffer << std::setprecision(precision) << final_training_rate;
860 
861 // values.push_back(buffer.str());
862 
863  // Iterations number
864 
865  names.push_back("Iterations number");
866 
867  buffer.str("");
868  buffer << iterations_number;
869 
870  values.push_back(buffer.str());
871 
872  // Elapsed time
873 
874  names.push_back("Elapsed time");
875 
876  buffer.str("");
877  buffer << elapsed_time;
878 
879  values.push_back(buffer.str());
880 
881  const size_t rows_number = names.size();
882  const size_t columns_number = 2;
883 
884  Matrix<std::string> final_results(rows_number, columns_number);
885 
886  final_results.set_column(0, names);
887  final_results.set_column(1, values);
888 
889  return(final_results);
890 }
891 
892 
893 // RandomSearchResults* perform_training(void) method
894 
897 
899 {
900  // Control sentence (if debug)
901 
902  #ifndef NDEBUG
903 
904  check();
905 
906  #endif
907 
908  RandomSearchResults* results_pointer = new RandomSearchResults(this);
910 
911  // Start training
912 
913  if(display)
914  {
915  std::cout << "Training with random search...\n";
916  }
917 
918 
919  // Elapsed time
920 
921  time_t beginning_time, current_time;
922  time(&beginning_time);
923  double elapsed_time;
924 
925  // Neural network stuff
926 
928 
929  const size_t parameters_number = neural_network_pointer->count_parameters_number();
930 
931  Vector<double> parameters = neural_network_pointer->arrange_parameters();
932  double parameters_norm;
933 
934  // Performance functional stuff
935 
936  double performance = 0.0;
937  double potential_performance = 1.0e99;
938 
939  double generalization_performance = 0.0;
940  double old_generalization_performance = 0.0;
941 
942  size_t generalization_failures = 0;
943 
944  // Training algorithm stuff
945 
946  Vector<double> training_direction(parameters_number);
947  double training_rate = 1.0;
948 
949  Vector<double> potential_parameters(parameters);
950  double potential_parameters_norm;
951 
952  Vector<double> parameters_increment(parameters_number);
953 // double parameters_increment_norm;
954 
955  bool stop_training = false;
956 
957  // Main loop
958 
959  for(size_t iteration = 0; iteration <= maximum_iterations_number; iteration++)
960  {
961  // Neural network stuff
962 
963  parameters_norm = parameters.calculate_norm();
964 
965  if(display && parameters_norm >= warning_parameters_norm)
966  {
967  std::cout << "OpenNN Warning: Parameters norm is " << parameters_norm << ".\n";
968  }
969 
970  // Performance functional stuff
971 
972  if(iteration == 0)
973  {
976  }
977 
978 
979  if(iteration != 0 && generalization_performance > old_generalization_performance)
980  {
981  generalization_failures++;
982  }
983 
984  potential_performance = performance_functional_pointer->calculate_performance(potential_parameters);
985 
986  // Training algorithm stuff
987 
988  training_direction = calculate_training_direction();
989 
990  if(iteration != 0 && iteration%training_rate_reduction_period == 0)
991  {
992  training_rate *= training_rate_reduction_factor;
993  }
994 
995  parameters_increment = training_direction*training_rate;
996 // parameters_increment_norm = parameters_increment.calculate_norm();
997 
998  potential_parameters = parameters + parameters_increment;
999  potential_parameters_norm = potential_parameters.calculate_norm();
1000 
1001  time(&current_time);
1002  elapsed_time = difftime(current_time, beginning_time);
1003 
1004  // Training history neural network
1005 
1007  {
1008  results_pointer->parameters_history[iteration] = parameters;
1009  }
1010 
1012  {
1013  results_pointer->parameters_norm_history[iteration] = parameters_norm;
1014  }
1015 
1016  // Training history performance functional
1017 
1019  {
1020  results_pointer->performance_history[iteration] = performance;
1021  }
1022 
1024  {
1025  results_pointer->generalization_performance_history[iteration] = generalization_performance;
1026  }
1027 
1028  // Training history training algorithm
1029 
1031  {
1032  results_pointer->training_direction_history[iteration] = training_direction;
1033  }
1034 
1036  {
1037  results_pointer->training_rate_history[iteration] = training_rate;
1038  }
1039 
1040 // if(reserve_potential_parameters_history)
1041 // {
1042 // results_pointer->potential_parameters_history[iteration] = potential_parameters;
1043 // }
1044 
1045 // if(reserve_potential_parameters_norm_history)
1046 // {
1047 // results_pointer->potential_parameters_norm_history[iteration] = potential_parameters_norm;
1048 // }
1049 
1050 
1052  {
1053  results_pointer->elapsed_time_history[iteration] = elapsed_time;
1054  }
1055 
1056  // Stopping Criteria
1057 
1058  if(performance <= performance_goal)
1059  {
1060  if(display)
1061  {
1062  std::cout << "Iteration " << iteration << ": Performance goal reached.\n";
1063  }
1064 
1065  stop_training = true;
1066  }
1067 
1068  else if(iteration == maximum_iterations_number)
1069  {
1070  if(display)
1071  {
1072  std::cout << "Iteration " << iteration << ": Maximum number of iterations reached.\n";
1073  }
1074 
1075  stop_training = true;
1076  }
1077 
1078  else if(elapsed_time >= maximum_time)
1079  {
1080  if(display)
1081  {
1082  std::cout << "Iteration " << iteration << ": Maximum training time reached.\n";
1083  }
1084 
1085  stop_training = true;
1086  }
1087 
1088  if(iteration != 0 && iteration % save_period == 0)
1089  {
1090  neural_network_pointer->save(neural_network_file_name);
1091  }
1092 
1093  if(stop_training)
1094  {
1095  if(display)
1096  {
1097  std::cout << "Parameters norm: " << parameters_norm << "\n"
1098  << "Potential parameters norm: " << potential_parameters_norm << "\n"
1099  << "Performance: " << performance << "\n"
1101  << "Potential performance: " << potential_performance << "\n"
1102  << "Training rate: " << training_rate << "\n"
1103  << "Elapsed time: " << elapsed_time << std::endl;
1104 
1105  if(generalization_performance != 0)
1106  {
1107  std::cout << "Generalization performance: " << generalization_performance << std::endl;
1108  }
1109  }
1110 
1111  results_pointer->final_parameters = parameters;
1112  results_pointer->final_parameters_norm = parameters_norm;
1113 
1114  results_pointer->final_performance = performance;
1115  results_pointer->final_generalization_performance = generalization_performance;
1116 
1117  results_pointer->final_training_direction = training_direction;
1118  results_pointer->final_training_rate = training_rate;
1119  results_pointer->elapsed_time = elapsed_time;
1120 
1121  results_pointer->iterations_number = iteration;
1122 
1123  break;
1124  }
1125 
1126  else if(display && iteration % display_period == 0)
1127  {
1128  std::cout << "Iteration " << iteration << ";\n"
1129  << "Parameters norm: " << parameters_norm << "\n"
1130  << "Potential parameters norm: " << potential_parameters_norm << "\n"
1131  << "Performance: " << performance << "\n"
1133  << "Potential performance: " << potential_performance << "\n"
1134  << "Training rate: " << training_rate << "\n"
1135  << "Elapsed time: " << elapsed_time << std::endl;
1136 
1137  if(generalization_performance != 0)
1138  {
1139  std::cout << "Generalization performance: " << generalization_performance << std::endl;
1140  }
1141  }
1142 
1143  // Set new parameters
1144 
1145  if(potential_performance < performance)
1146  {
1147  parameters = potential_parameters;
1148 
1149  neural_network_pointer->set_parameters(parameters);
1150 
1151  performance = potential_performance;
1152 
1154  old_generalization_performance = generalization_performance;
1155  }
1156  }
1157 
1158  return(results_pointer);
1159 }
1160 
1161 
1162 // std::string write_training_algorithm_type(void) const method
1163 
1165 {
1166  return("RANDOM_SEARCH");
1167 }
1168 
1169 
1170 // Matrix<std::string> to_string_matrix(void) const method
1171 
1172 // the most representative
1173 
1175 {
1176  std::ostringstream buffer;
1177 
1178  Vector<std::string> labels;
1179  Vector<std::string> values;
1180 
1181  // Performance goal
1182 
1183  labels.push_back("Performance goal");
1184 
1185  buffer.str("");
1186  buffer << performance_goal;
1187 
1188  values.push_back(buffer.str());
1189 
1190  // Maximum generalization failures
1191 
1192  labels.push_back("Maximum generalization performance decreases");
1193 
1194  buffer.str("");
1196 
1197  values.push_back(buffer.str());
1198 
1199  // Maximum iterations number
1200 
1201  labels.push_back("Maximum iterations number");
1202 
1203  buffer.str("");
1204  buffer << maximum_iterations_number;
1205 
1206  values.push_back(buffer.str());
1207 
1208  // Maximum time
1209 
1210  labels.push_back("Maximum time");
1211 
1212  buffer.str("");
1213  buffer << maximum_time;
1214 
1215  values.push_back(buffer.str());
1216 
1217  // Reserve parameters norm history
1218 
1219  labels.push_back("Reserve parameters norm history");
1220 
1221  buffer.str("");
1223 
1224  values.push_back(buffer.str());
1225 
1226  // Reserve performance history
1227 
1228  labels.push_back("Reserve performance history");
1229 
1230  buffer.str("");
1231  buffer << reserve_performance_history;
1232 
1233  values.push_back(buffer.str());
1234 
1235  // Reserve generalization performance history
1236 
1237  labels.push_back("Reserve generalization performance history");
1238 
1239  buffer.str("");
1241 
1242  values.push_back(buffer.str());
1243 
1244  // Reserve training direction norm history
1245 
1246 // labels.push_back("");
1247 
1248 // buffer.str("");
1249 // buffer << reserve_training_direction_norm_history;
1250 
1251  // Reserve training rate history
1252 
1253 // labels.push_back("");
1254 
1255 // buffer.str("");
1256 // buffer << reserve_training_rate_history;
1257 
1258 // values.push_back(buffer.str());
1259 
1260  // Reserve elapsed time history
1261 
1262  labels.push_back("Reserve elapsed time history");
1263 
1264  buffer.str("");
1265  buffer << reserve_elapsed_time_history;
1266 
1267  values.push_back(buffer.str());
1268 
1269  const size_t rows_number = labels.size();
1270  const size_t columns_number = 2;
1271 
1272  Matrix<std::string> string_matrix(rows_number, columns_number);
1273 
1274  string_matrix.set_column(0, labels);
1275  string_matrix.set_column(1, values);
1276 
1277  return(string_matrix);
1278 }
1279 
1280 
1281 // tinyxml2::XMLDocument* to_XML(void) const method
1282 
1285 
1286 tinyxml2::XMLDocument* RandomSearch::to_XML(void) const
1287 {
1288  std::ostringstream buffer;
1289 
1290  tinyxml2::XMLDocument* document = new tinyxml2::XMLDocument;
1291 
1292  // Training algorithm
1293 
1294  tinyxml2::XMLElement* root_element = document->NewElement("RandomSearch");
1295 
1296  document->InsertFirstChild(root_element);
1297 
1298  tinyxml2::XMLElement* element = NULL;
1299  tinyxml2::XMLText* text = NULL;
1300 
1301  // Training rate reduction factor
1302  {
1303  element = document->NewElement("TrainingRateReductionFactor");
1304  root_element->LinkEndChild(element);
1305 
1306  buffer.str("");
1308 
1309  text = document->NewText(buffer.str().c_str());
1310  element->LinkEndChild(text);
1311  }
1312 
1313  // Training rate reduction period
1314  {
1315  element = document->NewElement("TrainingRateReductionPeriod");
1316  root_element->LinkEndChild(element);
1317 
1318  buffer.str("");
1320 
1321  text = document->NewText(buffer.str().c_str());
1322  element->LinkEndChild(text);
1323  }
1324 
1325  // First training rate
1326  {
1327  element = document->NewElement("FirstTrainingRate");
1328  root_element->LinkEndChild(element);
1329 
1330  buffer.str("");
1331  buffer << first_training_rate;
1332 
1333  text = document->NewText(buffer.str().c_str());
1334  element->LinkEndChild(text);
1335  }
1336 
1337  // Warning parameters norm
1338  {
1339  element = document->NewElement("WarningParametersNorm");
1340  root_element->LinkEndChild(element);
1341 
1342  buffer.str("");
1343  buffer << warning_parameters_norm;
1344 
1345  text = document->NewText(buffer.str().c_str());
1346  element->LinkEndChild(text);
1347  }
1348 
1349  // Warning training rate
1350  {
1351  element = document->NewElement("WarningTrainingRate");
1352  root_element->LinkEndChild(element);
1353 
1354  buffer.str("");
1355  buffer << warning_training_rate;
1356 
1357  text = document->NewText(buffer.str().c_str());
1358  element->LinkEndChild(text);
1359  }
1360 
1361  // Error parameters norm
1362  {
1363  element = document->NewElement("ErrorParametersNorm");
1364  root_element->LinkEndChild(element);
1365 
1366  buffer.str("");
1367  buffer << error_parameters_norm;
1368 
1369  text = document->NewText(buffer.str().c_str());
1370  element->LinkEndChild(text);
1371  }
1372 
1373  // Error training rate
1374  {
1375  element = document->NewElement("ErrorTrainingRate");
1376  root_element->LinkEndChild(element);
1377 
1378  buffer.str("");
1379  buffer << error_training_rate;
1380 
1381  text = document->NewText(buffer.str().c_str());
1382  element->LinkEndChild(text);
1383  }
1384 
1385  // Performance goal
1386  {
1387  element = document->NewElement("PerformanceGoal");
1388  root_element->LinkEndChild(element);
1389 
1390  buffer.str("");
1391  buffer << performance_goal;
1392 
1393  text = document->NewText(buffer.str().c_str());
1394  element->LinkEndChild(text);
1395  }
1396 
1397  // Maximum generalization performance decreases
1398  {
1399  element = document->NewElement("MaximumGeneralizationPerformanceDecreases");
1400  root_element->LinkEndChild(element);
1401 
1402  buffer.str("");
1404 
1405  text = document->NewText(buffer.str().c_str());
1406  element->LinkEndChild(text);
1407  }
1408 
1409  // Maximum iterations number
1410  {
1411  element = document->NewElement("MaximumIterationsNumber");
1412  root_element->LinkEndChild(element);
1413 
1414  buffer.str("");
1415  buffer << maximum_iterations_number;
1416 
1417  text = document->NewText(buffer.str().c_str());
1418  element->LinkEndChild(text);
1419  }
1420 
1421  // Maximum time
1422  {
1423  element = document->NewElement("MaximumTime");
1424  root_element->LinkEndChild(element);
1425 
1426  buffer.str("");
1427  buffer << maximum_time;
1428 
1429  text = document->NewText(buffer.str().c_str());
1430  element->LinkEndChild(text);
1431  }
1432 
1433  // Reserve parameters history
1434  {
1435  element = document->NewElement("ReserveParametersHistory");
1436  root_element->LinkEndChild(element);
1437 
1438  buffer.str("");
1439  buffer << reserve_parameters_history;
1440 
1441  text = document->NewText(buffer.str().c_str());
1442  element->LinkEndChild(text);
1443  }
1444 
1445  // Reserve parameters norm history
1446  {
1447  element = document->NewElement("ReserveParametersNormHistory");
1448  root_element->LinkEndChild(element);
1449 
1450  buffer.str("");
1452 
1453  text = document->NewText(buffer.str().c_str());
1454  element->LinkEndChild(text);
1455  }
1456 
1457  // Reserve performance history
1458  {
1459  element = document->NewElement("ReservePerformanceHistory");
1460  root_element->LinkEndChild(element);
1461 
1462  buffer.str("");
1463  buffer << reserve_performance_history;
1464 
1465  text = document->NewText(buffer.str().c_str());
1466  element->LinkEndChild(text);
1467  }
1468 
1469  // Reserve generalization performance history
1470  {
1471  element = document->NewElement("ReserveGeneralizationPerformanceHistory");
1472  root_element->LinkEndChild(element);
1473 
1474  buffer.str("");
1476 
1477  text = document->NewText(buffer.str().c_str());
1478  element->LinkEndChild(text);
1479  }
1480 
1481  // Reserve training direction history
1482  {
1483  element = document->NewElement("ReserveTrainingDirectionHistory");
1484  root_element->LinkEndChild(element);
1485 
1486  buffer.str("");
1488 
1489  text = document->NewText(buffer.str().c_str());
1490  element->LinkEndChild(text);
1491  }
1492 
1493  // Reserve training rate history
1494  {
1495  element = document->NewElement("ReserveTrainingRateHistory");
1496  root_element->LinkEndChild(element);
1497 
1498  buffer.str("");
1500 
1501  text = document->NewText(buffer.str().c_str());
1502  element->LinkEndChild(text);
1503  }
1504 
1505  // Reserve elapsed time history
1506  {
1507  element = document->NewElement("ReserveElapsedTimeHistory");
1508  root_element->LinkEndChild(element);
1509 
1510  buffer.str("");
1511  buffer << reserve_elapsed_time_history;
1512 
1513  text = document->NewText(buffer.str().c_str());
1514  element->LinkEndChild(text);
1515  }
1516 
1517  // Reserve generalization performance history
1518  {
1519  element = document->NewElement("ReserveGeneralizationperformanceHistory");
1520  root_element->LinkEndChild(element);
1521 
1522  buffer.str("");
1524 
1525  text = document->NewText(buffer.str().c_str());
1526  element->LinkEndChild(text);
1527  }
1528 
1529  // Display period
1530  {
1531  element = document->NewElement("DisplayPeriod");
1532  root_element->LinkEndChild(element);
1533 
1534  buffer.str("");
1535  buffer << display_period;
1536 
1537  text = document->NewText(buffer.str().c_str());
1538  element->LinkEndChild(text);
1539  }
1540 
1541  // Display
1542  {
1543  element = document->NewElement("Display");
1544  root_element->LinkEndChild(element);
1545 
1546  buffer.str("");
1547  buffer << display;
1548 
1549  text = document->NewText(buffer.str().c_str());
1550  element->LinkEndChild(text);
1551  }
1552 
1553  return(document);
1554 }
1555 
1556 
1557 // void from_XML(const tinyxml2::XMLDocument&) method
1558 
1559 void RandomSearch::from_XML(const tinyxml2::XMLDocument& document)
1560 {
1561  const tinyxml2::XMLElement* root_element = document.FirstChildElement("RandomSearch");
1562 
1563  if(!root_element)
1564  {
1565  std::ostringstream buffer;
1566 
1567  buffer << "OpenNN Exception: RandomSearch class.\n"
1568  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1569  << "Random search element is NULL.\n";
1570 
1571  throw std::logic_error(buffer.str());
1572  }
1573 
1574  // First training rate
1575  {
1576  const tinyxml2::XMLElement* element = root_element->FirstChildElement("FirstTrainingRate");
1577 
1578  if(element)
1579  {
1580  const double new_first_training_rate = atof(element->GetText());
1581 
1582  try
1583  {
1584  set_first_training_rate(new_first_training_rate);
1585  }
1586  catch(const std::logic_error& e)
1587  {
1588  std::cout << e.what() << std::endl;
1589  }
1590  }
1591  }
1592 
1593  // Training rate reduction factor
1594  {
1595  const tinyxml2::XMLElement* element = root_element->FirstChildElement("TrainingRateReductionFactor");
1596 
1597  if(element)
1598  {
1599  const double new_training_rate_reduction_factor = atof(element->GetText());
1600 
1601  try
1602  {
1603  set_training_rate_reduction_factor(new_training_rate_reduction_factor);
1604  }
1605  catch(const std::logic_error& e)
1606  {
1607  std::cout << e.what() << std::endl;
1608  }
1609  }
1610  }
1611 
1612  // Training rate reduction period
1613  {
1614  const tinyxml2::XMLElement* element = root_element->FirstChildElement("TrainingRateReductionPeriod");
1615 
1616  if(element)
1617  {
1618  const size_t new_training_rate_reduction_period = atoi(element->GetText());
1619 
1620  try
1621  {
1622  set_training_rate_reduction_period(new_training_rate_reduction_period);
1623  }
1624  catch(const std::logic_error& e)
1625  {
1626  std::cout << e.what() << std::endl;
1627  }
1628  }
1629  }
1630 
1631  // Warning parameters norm
1632  {
1633  const tinyxml2::XMLElement* element = root_element->FirstChildElement("WarningParametersNorm");
1634 
1635  if(element)
1636  {
1637  const double new_warning_parameters_norm = atof(element->GetText());
1638 
1639  try
1640  {
1641  set_warning_parameters_norm(new_warning_parameters_norm);
1642  }
1643  catch(const std::logic_error& e)
1644  {
1645  std::cout << e.what() << std::endl;
1646  }
1647  }
1648  }
1649 
1650  // Warning training rate
1651  {
1652  const tinyxml2::XMLElement* element = root_element->FirstChildElement("WarningTrainingRate");
1653 
1654  if(element)
1655  {
1656  const double new_warning_training_rate = atof(element->GetText());
1657 
1658  try
1659  {
1660  set_warning_training_rate(new_warning_training_rate);
1661  }
1662  catch(const std::logic_error& e)
1663  {
1664  std::cout << e.what() << std::endl;
1665  }
1666  }
1667  }
1668 
1669  // Error parameters norm
1670  {
1671  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ErrorParametersNorm");
1672 
1673  if(element)
1674  {
1675  const double new_error_parameters_norm = atof(element->GetText());
1676 
1677  try
1678  {
1679  set_error_parameters_norm(new_error_parameters_norm);
1680  }
1681  catch(const std::logic_error& e)
1682  {
1683  std::cout << e.what() << std::endl;
1684  }
1685  }
1686  }
1687 
1688  // Error training rate
1689  {
1690  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ErrorTrainingRate");
1691 
1692  if(element)
1693  {
1694  const double new_error_training_rate = atof(element->GetText());
1695 
1696  try
1697  {
1698  set_error_training_rate(new_error_training_rate);
1699  }
1700  catch(const std::logic_error& e)
1701  {
1702  std::cout << e.what() << std::endl;
1703  }
1704  }
1705  }
1706 
1707  // Performance goal
1708  {
1709  const tinyxml2::XMLElement* element = root_element->FirstChildElement("PerformanceGoal");
1710 
1711  if(element)
1712  {
1713  const double new_performance_goal = atof(element->GetText());
1714 
1715  try
1716  {
1717  set_performance_goal(new_performance_goal);
1718  }
1719  catch(const std::logic_error& e)
1720  {
1721  std::cout << e.what() << std::endl;
1722  }
1723  }
1724  }
1725 
1726  // Maximum generalization performance decreases
1727  {
1728  const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumGeneralizationPerformanceDecreases");
1729 
1730  if(element)
1731  {
1732  const size_t new_maximum_generalization_performance_decreases = atoi(element->GetText());
1733 
1734  try
1735  {
1736  set_maximum_generalization_performance_decreases(new_maximum_generalization_performance_decreases);
1737  }
1738  catch(const std::logic_error& e)
1739  {
1740  std::cout << e.what() << std::endl;
1741  }
1742  }
1743  }
1744 
1745  // Maximum iterations number
1746  {
1747  const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumIterationsNumber");
1748 
1749  if(element)
1750  {
1751  const size_t new_maximum_iterations_number = atoi(element->GetText());
1752 
1753  try
1754  {
1755  set_maximum_iterations_number(new_maximum_iterations_number);
1756  }
1757  catch(const std::logic_error& e)
1758  {
1759  std::cout << e.what() << std::endl;
1760  }
1761  }
1762  }
1763 
1764  // Maximum time
1765  {
1766  const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumTime");
1767 
1768  if(element)
1769  {
1770  const double new_maximum_time = atof(element->GetText());
1771 
1772  try
1773  {
1774  set_maximum_time(new_maximum_time);
1775  }
1776  catch(const std::logic_error& e)
1777  {
1778  std::cout << e.what() << std::endl;
1779  }
1780  }
1781  }
1782 
1783  // Reserve parameters history
1784  {
1785  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveParametersHistory");
1786 
1787  if(element)
1788  {
1789  const std::string new_reserve_parameters_history = element->GetText();
1790 
1791  try
1792  {
1793  set_reserve_parameters_history(new_reserve_parameters_history != "0");
1794  }
1795  catch(const std::logic_error& e)
1796  {
1797  std::cout << e.what() << std::endl;
1798  }
1799  }
1800  }
1801 
1802  // Reserve parameters norm history
1803  {
1804  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveParametersNormHistory");
1805 
1806  if(element)
1807  {
1808  const std::string new_reserve_parameters_norm_history = element->GetText();
1809 
1810  try
1811  {
1812  set_reserve_parameters_norm_history(new_reserve_parameters_norm_history != "0");
1813  }
1814  catch(const std::logic_error& e)
1815  {
1816  std::cout << e.what() << std::endl;
1817  }
1818  }
1819  }
1820 
1821  // Reserve performance history
1822  {
1823  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReservePerformanceHistory");
1824 
1825  if(element)
1826  {
1827  const std::string new_reserve_performance_history = element->GetText();
1828 
1829  try
1830  {
1831  set_reserve_performance_history(new_reserve_performance_history != "0");
1832  }
1833  catch(const std::logic_error& e)
1834  {
1835  std::cout << e.what() << std::endl;
1836  }
1837  }
1838  }
1839 
1840  // Reserve generalization performance history
1841  {
1842  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveGeneralizationPerformanceHistory");
1843 
1844  if(element)
1845  {
1846  const std::string new_reserve_generalization_performance_history = element->GetText();
1847 
1848  try
1849  {
1850  set_reserve_generalization_performance_history(new_reserve_generalization_performance_history != "0");
1851  }
1852  catch(const std::logic_error& e)
1853  {
1854  std::cout << e.what() << std::endl;
1855  }
1856  }
1857  }
1858 
1859  // Reserve training direction history
1860  {
1861  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveTrainingDirectionHistory");
1862 
1863  if(element)
1864  {
1865  const std::string new_reserve_training_direction_history = element->GetText();
1866 
1867  try
1868  {
1869  set_reserve_training_direction_history(new_reserve_training_direction_history != "0");
1870  }
1871  catch(const std::logic_error& e)
1872  {
1873  std::cout << e.what() << std::endl;
1874  }
1875  }
1876  }
1877 
1878  // Reserve training direction norm history
1879  {
1880  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveTrainingDirectionNormHistory");
1881 
1882  if(element)
1883  {
1884  const std::string new_reserve_training_direction_norm_history = element->GetText();
1885 
1886  try
1887  {
1888  set_reserve_training_direction_norm_history(new_reserve_training_direction_norm_history != "0");
1889  }
1890  catch(const std::logic_error& e)
1891  {
1892  std::cout << e.what() << std::endl;
1893  }
1894  }
1895  }
1896 
1897  // Reserve training rate history
1898  {
1899  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveTrainingRateHistory");
1900 
1901  if(element)
1902  {
1903  const std::string new_reserve_training_rate_history = element->GetText();
1904 
1905  try
1906  {
1907  set_reserve_training_rate_history(new_reserve_training_rate_history != "0");
1908  }
1909  catch(const std::logic_error& e)
1910  {
1911  std::cout << e.what() << std::endl;
1912  }
1913  }
1914  }
1915 
1916  // Reserve elapsed time history
1917  {
1918  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveElapsedTimeHistory");
1919 
1920  if(element)
1921  {
1922  const std::string new_reserve_elapsed_time_history = element->GetText();
1923 
1924  try
1925  {
1926  set_reserve_elapsed_time_history(new_reserve_elapsed_time_history != "0");
1927  }
1928  catch(const std::logic_error& e)
1929  {
1930  std::cout << e.what() << std::endl;
1931  }
1932  }
1933  }
1934 
1935  // Display period
1936  {
1937  const tinyxml2::XMLElement* element = root_element->FirstChildElement("DisplayPeriod");
1938 
1939  if(element)
1940  {
1941  const size_t new_display_period = atoi(element->GetText());
1942 
1943  try
1944  {
1945  set_display_period(new_display_period);
1946  }
1947  catch(const std::logic_error& e)
1948  {
1949  std::cout << e.what() << std::endl;
1950  }
1951  }
1952  }
1953 
1954  // Display
1955  {
1956  const tinyxml2::XMLElement* element = root_element->FirstChildElement("Display");
1957 
1958  if(element)
1959  {
1960  const std::string new_display = element->GetText();
1961 
1962  try
1963  {
1964  set_display(new_display != "0");
1965  }
1966  catch(const std::logic_error& e)
1967  {
1968  std::cout << e.what() << std::endl;
1969  }
1970  }
1971  }
1972 }
1973 
1974 }
1975 
1976 
1977 // OpenNN: Open Neural Networks Library.
1978 // Copyright (c) 2005-2015 Roberto Lopez.
1979 //
1980 // This library is free software; you can redistribute it and/or
1981 // modify it under the terms of the GNU Lesser General Public
1982 // License as published by the Free Software Foundation; either
1983 // version 2.1 of the License, or any later version.
1984 //
1985 // This library is distributed in the hope that it will be useful,
1986 // but WITHOUT ANY WARRANTY; without even the implied warranty of
1987 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1988 // Lesser General Public License for more details.
1989 
1990 // You should have received a copy of the GNU Lesser General Public
1991 // License along with this library; if not, write to the Free Software
1992 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
virtual double calculate_generalization_performance(void) const
void set_error_parameters_norm(const double &)
size_t count_parameters_number(void) const
void randomize_uniform(const double &=-1.0, const double &=1.0)
Definition: vector.h:781
void set_error_training_rate(const double &)
double maximum_time
Maximum training time. It is used as a stopping criterion.
void set_training_rate_reduction_factor(const double &)
void set_first_training_rate(const double &)
bool reserve_generalization_performance_history
True if the Generalization performance history vector is to be reserved, false otherwise.
size_t maximum_generalization_performance_decreases
void set_display_period(const size_t &)
const bool & get_reserve_parameters_norm_history(void) const
Returns true if the parameters norm history vector is to be reserved, and false otherwise.
Vector< double > arrange_parameters(void) const
RandomSearch * random_search_pointer
Pointer to the random search object for which the training results are to be stored.
Definition: random_search.h:99
bool reserve_training_rate_history
True if the training rate history vector is to be reserved, false otherwise.
const double & get_error_parameters_norm(void) const
bool display
Display messages to screen.
Vector< double > parameters_norm_history
History of the parameters norm over the training iterations.
NeuralNetwork * get_neural_network_pointer(void) const
Returns a pointer to the neural network associated to the performance functional. ...
Vector< Vector< double > > parameters_history
History of the neural network parameters over the training iterations.
std::string write_training_algorithm_type(void) const
This method writes a string with the type of training algoritm.
size_t maximum_iterations_number
Maximum number of iterations to perform_training. It is used as a stopping criterion.
Vector< double > generalization_performance_history
History of the generalization performance over the training iterations.
virtual void set_reserve_all_training_history(const bool &)
Makes the training history of all variables to be reseved or not in memory.
double final_parameters_norm
Final neural network parameters norm.
void set_training_rate_reduction_period(const size_t &)
bool reserve_training_direction_history
True if the training direction history matrix is to be reserved, false otherwise. ...
void set_reserve_parameters_norm_history(const bool &)
const double & get_maximum_time(void) const
Returns the maximum training time.
const double & get_warning_parameters_norm(void) const
Returns the minimum value for the norm of the parameters vector at wich a warning message is written ...
void set_reserve_elapsed_time_history(const bool &)
bool reserve_performance_history
True if the performance history vector is to be reserved, false otherwise.
Vector< Vector< double > > training_direction_history
History of the random search training direction over the training iterations.
void set_maximum_iterations_number(const size_t &)
void set_maximum_generalization_performance_decreases(const size_t &)
double final_training_rate
Final random search training rate.
Vector< double > performance_history
History of the performance function performance over the training iterations.
bool reserve_parameters_history
True if the parameters history vector of vectors is to be reserved, false otherwise.
const size_t & get_training_rate_reduction_period(void) const
Returns the reducing period for the training rate.
size_t save_period
Number of iterations between the training saving progress.
void set_reserve_training_rate_history(const bool &)
void from_XML(const tinyxml2::XMLDocument &)
const size_t & get_maximum_iterations_number(void) const
Returns the maximum number of iterations for training.
const double & get_performance_goal(void) const
const bool & get_reserve_performance_history(void) const
Returns true if the performance history vector is to be reserved, and false otherwise.
virtual std::string write_information(void)
void set_warning_parameters_norm(const double &)
double warning_parameters_norm
Value for the parameters norm at which a warning message is written to the screen.
double calculate_norm(void) const
Returns the vector norm.
Definition: vector.h:2358
Vector< double > elapsed_time_history
History of the elapsed time over the training iterations.
size_t iterations_number
Maximum number of training iterations.
std::string neural_network_file_name
Path where the neural network is saved.
const double & get_training_rate_reduction_factor(void) const
Returns the reducing factor for the training rate.
const bool & get_reserve_training_rate_history(void) const
Returns true if the training rate history vector is to be reserved, and false otherwise.
std::string to_string(void) const
Returns a string representation of the current random search results structure.
double warning_training_rate
Training rate value at wich a warning message is written to the screen.
void set_column(const size_t &, const Vector< T > &)
Definition: matrix.h:1774
Vector< double > final_parameters
Final neural network parameters vector.
double training_rate_reduction_factor
bool reserve_elapsed_time_history
True if the elapsed time history vector is to be reserved, false otherwise.
const bool & get_reserve_parameters_history(void) const
Returns true if the parameters history matrix is to be reserved, and false otherwise.
double final_generalization_performance
Final generalization performance.
RandomSearchResults * perform_training(void)
void set_reserve_parameters_history(const bool &)
const double & get_warning_training_rate(void) const
Returns the training rate value at wich a warning message is written to the screen during line minimi...
void set_warning_training_rate(const double &)
double final_performance
Final performance function evaluation.
const bool & get_reserve_training_direction_history(void) const
Returns true if the training direction history matrix is to be reserved, and false otherwise...
virtual ~RandomSearch(void)
const double & get_error_training_rate(void) const
void save(const std::string &) const
const bool & get_reserve_generalization_performance_history(void) const
Returns true if the Generalization performance history vector is to be reserved, and false otherwise...
void set_reserve_performance_history(const bool &)
virtual void check(void) const
const size_t & get_maximum_generalization_performance_decreases(void) const
Returns the maximum number of generalization failures during the training process.
Matrix< std::string > to_string_matrix(void) const
size_t training_rate_reduction_period
Iterations interval at which the training rate is reduced.
double first_training_rate
Initial training rate following a random training direction.
Vector< double > training_rate_history
History of the random search training rate over the training iterations.
void set_reserve_training_direction_history(const bool &)
const bool & get_reserve_elapsed_time_history(void) const
Returns true if the elapsed time history vector is to be reserved, and false otherwise.
double error_parameters_norm
Value for the parameters norm at which the training process is assumed to fail.
void set_reserve_generalization_performance_history(const bool &)
Vector< double > final_training_direction
Final random search training direction.
bool reserve_parameters_norm_history
True if the parameters norm history vector is to be reserved, false otherwise.
bool reserve_training_direction_norm_history
True if the training direction norm history vector is to be reserved, false otherwise.
double elapsed_time
Elapsed time of the training process.
PerformanceFunctional * performance_functional_pointer
Pointer to a performance functional for a multilayer perceptron object.
double performance_goal
Goal value for the performance. It is used as a stopping criterion.
Vector< double > calculate_training_direction(void) const
Calculates a random vector to be used as training direction.
void set_reserve_training_direction_norm_history(const bool &)
size_t display_period
Number of iterations between the training showing progress.
void set_maximum_time(const double &)
Matrix< std::string > write_final_results(const size_t &precision=3) const
Returns a default (empty) string matrix with the final results from training.
void set_performance_goal(const double &)
double error_training_rate
Training rate at wich the line minimization algorithm is assumed to be unable to bracket a minimum...
void set_parameters(const Vector< double > &)
tinyxml2::XMLDocument * to_XML(void) const