OpenNN  2.2
Open Neural Networks Library
performance_functional.cpp
1 /****************************************************************************************************************/
2 /* */
3 /* OpenNN: Open Neural Networks Library */
4 /* www.artelnics.com/opennn */
5 /* */
6 /* P E R F O R M A N C E F U N C T I O N A L C L A S S */
7 /* */
8 /* Roberto Lopez */
9 /* Artelnics - Making intelligent use of data */
11 /* */
12 /****************************************************************************************************************/
13 
14 // OpenNN includes
15 
16 #include "performance_functional.h"
17 
18 namespace OpenNN
19 {
20 
21 // DEFAULT CONSTRUCTOR
22 
26 
28  : neural_network_pointer(NULL)
29  , data_set_pointer(NULL)
30  , mathematical_model_pointer(NULL)
31  , sum_squared_error_objective_pointer(NULL)
32  , mean_squared_error_objective_pointer(NULL)
33  , root_mean_squared_error_objective_pointer(NULL)
34  , normalized_squared_error_objective_pointer(NULL)
35  , Minkowski_error_objective_pointer(NULL)
36  , cross_entropy_error_objective_pointer(NULL)
37  , outputs_integrals_objective_pointer(NULL)
38  , solutions_error_objective_pointer(NULL)
39  , final_solutions_error_objective_pointer(NULL)
40  , independent_parameters_error_objective_pointer(NULL)
41  , inverse_sum_squared_error_objective_pointer(NULL)
42  , user_objective_pointer(NULL)
43  , neural_parameters_norm_regularization_pointer(NULL)
44  , outputs_integrals_regularization_pointer(NULL)
45  , user_regularization_pointer(NULL)
46  , outputs_integrals_constraints_pointer(NULL)
47  , solutions_error_constraints_pointer(NULL)
48  , final_solutions_error_constraints_pointer(NULL)
49  , independent_parameters_error_constraints_pointer(NULL)
50  , user_constraints_pointer(NULL)
51 {
52  set_objective_type(NORMALIZED_SQUARED_ERROR_OBJECTIVE);
53  set_regularization_type(NO_REGULARIZATION);
54  set_constraints_type(NO_CONSTRAINTS);
55 
56  set_default();
57 }
58 
59 
60 // NEURAL NETWORK CONSTRUCTOR
61 
67 
69  : neural_network_pointer(new_neural_network_pointer)
70  , data_set_pointer(NULL)
71  , mathematical_model_pointer(NULL)
72  , sum_squared_error_objective_pointer(NULL)
73  , mean_squared_error_objective_pointer(NULL)
74  , root_mean_squared_error_objective_pointer(NULL)
75  , normalized_squared_error_objective_pointer(NULL)
76  , Minkowski_error_objective_pointer(NULL)
77  , cross_entropy_error_objective_pointer(NULL)
78  , outputs_integrals_objective_pointer(NULL)
79  , solutions_error_objective_pointer(NULL)
80  , final_solutions_error_objective_pointer(NULL)
81  , independent_parameters_error_objective_pointer(NULL)
82  , inverse_sum_squared_error_objective_pointer(NULL)
83  , user_objective_pointer(NULL)
84  , neural_parameters_norm_regularization_pointer(NULL)
85  , outputs_integrals_regularization_pointer(NULL)
86  , user_regularization_pointer(NULL)
87  , outputs_integrals_constraints_pointer(NULL)
88  , solutions_error_constraints_pointer(NULL)
89  , final_solutions_error_constraints_pointer(NULL)
90  , independent_parameters_error_constraints_pointer(NULL)
91  , user_constraints_pointer(NULL)
92 {
93  set_objective_type(NORMALIZED_SQUARED_ERROR_OBJECTIVE);
94  set_regularization_type(NO_REGULARIZATION);
95  set_constraints_type(NO_CONSTRAINTS);
96 
97  set_default();
98 }
99 
100 
101 // NEURAL NETWORK AND DATA SET CONSTRUCTOR
102 
109 
110 PerformanceFunctional::PerformanceFunctional(NeuralNetwork* new_neural_network_pointer, DataSet* new_data_set_pointer)
111  : neural_network_pointer(new_neural_network_pointer)
112  , data_set_pointer(new_data_set_pointer)
113  , mathematical_model_pointer(NULL)
114  , sum_squared_error_objective_pointer(NULL)
115  , mean_squared_error_objective_pointer(NULL)
116  , root_mean_squared_error_objective_pointer(NULL)
117  , normalized_squared_error_objective_pointer(NULL)
118  , Minkowski_error_objective_pointer(NULL)
119  , cross_entropy_error_objective_pointer(NULL)
120  , outputs_integrals_objective_pointer(NULL)
121  , solutions_error_objective_pointer(NULL)
122  , final_solutions_error_objective_pointer(NULL)
123  , independent_parameters_error_objective_pointer(NULL)
124  , inverse_sum_squared_error_objective_pointer(NULL)
125  , user_objective_pointer(NULL)
126  , neural_parameters_norm_regularization_pointer(NULL)
127  , outputs_integrals_regularization_pointer(NULL)
128  , user_regularization_pointer(NULL)
129  , outputs_integrals_constraints_pointer(NULL)
130  , solutions_error_constraints_pointer(NULL)
131  , final_solutions_error_constraints_pointer(NULL)
132  , independent_parameters_error_constraints_pointer(NULL)
133  , user_constraints_pointer(NULL)
134 {
135  set_objective_type(NORMALIZED_SQUARED_ERROR_OBJECTIVE);
136  set_regularization_type(NO_REGULARIZATION);
137  set_constraints_type(NO_CONSTRAINTS);
138 
139  set_default();
140 
141 }
142 
143 
144 // NEURAL NETWORK AND MATHEMATICAL MODEL CONSTRUCTOR
145 
152 
153 PerformanceFunctional::PerformanceFunctional(NeuralNetwork* new_neural_network_pointer, MathematicalModel* new_mathematical_model_pointer)
154  : neural_network_pointer(new_neural_network_pointer)
155  , data_set_pointer(NULL)
156  , mathematical_model_pointer(new_mathematical_model_pointer)
157  , sum_squared_error_objective_pointer(NULL)
158  , mean_squared_error_objective_pointer(NULL)
159  , root_mean_squared_error_objective_pointer(NULL)
160  , normalized_squared_error_objective_pointer(NULL)
161  , Minkowski_error_objective_pointer(NULL)
162  , cross_entropy_error_objective_pointer(NULL)
163  , outputs_integrals_objective_pointer(NULL)
164  , solutions_error_objective_pointer(NULL)
165  , final_solutions_error_objective_pointer(NULL)
166  , independent_parameters_error_objective_pointer(NULL)
167  , inverse_sum_squared_error_objective_pointer(NULL)
168  , user_objective_pointer(NULL)
169  , neural_parameters_norm_regularization_pointer(NULL)
170  , outputs_integrals_regularization_pointer(NULL)
171  , user_regularization_pointer(NULL)
172  , outputs_integrals_constraints_pointer(NULL)
173  , solutions_error_constraints_pointer(NULL)
174  , final_solutions_error_constraints_pointer(NULL)
175  , independent_parameters_error_constraints_pointer(NULL)
176  , user_constraints_pointer(NULL)
177 {
178  set_objective_type(NORMALIZED_SQUARED_ERROR_OBJECTIVE);
179  set_regularization_type(NO_REGULARIZATION);
180  set_constraints_type(NO_CONSTRAINTS);
181 
182  set_default();
183 }
184 
185 
186 // NEURAL NETWORK, MATHEMATICAL MODEL AND DATA SET CONSTRUCTOR
187 
195 
196 PerformanceFunctional::PerformanceFunctional(NeuralNetwork* new_neural_network_pointer, MathematicalModel* new_mathematical_model_pointer, DataSet* new_data_set_pointer)
197  : neural_network_pointer(new_neural_network_pointer)
198  , data_set_pointer(new_data_set_pointer)
199  , mathematical_model_pointer(new_mathematical_model_pointer)
200  , sum_squared_error_objective_pointer(NULL)
201  , mean_squared_error_objective_pointer(NULL)
202  , root_mean_squared_error_objective_pointer(NULL)
203  , normalized_squared_error_objective_pointer(NULL)
204  , Minkowski_error_objective_pointer(NULL)
205  , cross_entropy_error_objective_pointer(NULL)
206  , outputs_integrals_objective_pointer(NULL)
207  , solutions_error_objective_pointer(NULL)
208  , final_solutions_error_objective_pointer(NULL)
209  , independent_parameters_error_objective_pointer(NULL)
210  , inverse_sum_squared_error_objective_pointer(NULL)
211  , user_objective_pointer(NULL)
212  , neural_parameters_norm_regularization_pointer(NULL)
213  , outputs_integrals_regularization_pointer(NULL)
214  , user_regularization_pointer(NULL)
215  , outputs_integrals_constraints_pointer(NULL)
216  , solutions_error_constraints_pointer(NULL)
217  , final_solutions_error_constraints_pointer(NULL)
218  , independent_parameters_error_constraints_pointer(NULL)
219  , user_constraints_pointer(NULL)
220 {
221  set_objective_type(NORMALIZED_SQUARED_ERROR_OBJECTIVE);
222  set_regularization_type(NO_REGULARIZATION);
223  set_constraints_type(NO_CONSTRAINTS);
224 
225  set_default();
226 }
227 
228 
229 // USER OBJECTIVE TERM CONSTRUCTOR
230 
235 
237  : neural_network_pointer(NULL)
238  , data_set_pointer(NULL)
239  , mathematical_model_pointer(NULL)
240  , sum_squared_error_objective_pointer(NULL)
241  , mean_squared_error_objective_pointer(NULL)
242  , root_mean_squared_error_objective_pointer(NULL)
243  , normalized_squared_error_objective_pointer(NULL)
244  , Minkowski_error_objective_pointer(NULL)
245  , cross_entropy_error_objective_pointer(NULL)
246  , outputs_integrals_objective_pointer(NULL)
247  , solutions_error_objective_pointer(NULL)
248  , final_solutions_error_objective_pointer(NULL)
249  , independent_parameters_error_objective_pointer(NULL)
250  , inverse_sum_squared_error_objective_pointer(NULL)
251  , user_objective_pointer(new_user_objective_pointer)
252  , neural_parameters_norm_regularization_pointer(NULL)
253  , outputs_integrals_regularization_pointer(NULL)
254  , user_regularization_pointer(NULL)
255  , outputs_integrals_constraints_pointer(NULL)
256  , solutions_error_constraints_pointer(NULL)
257  , final_solutions_error_constraints_pointer(NULL)
258  , independent_parameters_error_constraints_pointer(NULL)
259  , user_constraints_pointer(NULL)
260 {
261  objective_type = USER_OBJECTIVE;
262  set_regularization_type(NO_REGULARIZATION);
263  set_constraints_type(NO_CONSTRAINTS);
264 
265  set_default();
266 }
267 
268 
269 // FILE CONSTRUCTOR
270 
275 
276 PerformanceFunctional::PerformanceFunctional(const std::string& file_name)
277  : neural_network_pointer(NULL)
278  , data_set_pointer(NULL)
279  , mathematical_model_pointer(NULL)
280  , sum_squared_error_objective_pointer(NULL)
281  , mean_squared_error_objective_pointer(NULL)
282  , root_mean_squared_error_objective_pointer(NULL)
283  , normalized_squared_error_objective_pointer(NULL)
284  , Minkowski_error_objective_pointer(NULL)
285  , cross_entropy_error_objective_pointer(NULL)
286  , outputs_integrals_objective_pointer(NULL)
287  , solutions_error_objective_pointer(NULL)
288  , final_solutions_error_objective_pointer(NULL)
289  , independent_parameters_error_objective_pointer(NULL)
290  , inverse_sum_squared_error_objective_pointer(NULL)
291  , user_objective_pointer(NULL)
292  , neural_parameters_norm_regularization_pointer(NULL)
293  , outputs_integrals_regularization_pointer(NULL)
294  , user_regularization_pointer(NULL)
295  , outputs_integrals_constraints_pointer(NULL)
296  , solutions_error_constraints_pointer(NULL)
297  , final_solutions_error_constraints_pointer(NULL)
298  , independent_parameters_error_constraints_pointer(NULL)
299  , user_constraints_pointer(NULL)
300 {
301  set_objective_type(NORMALIZED_SQUARED_ERROR_OBJECTIVE);
302  set_regularization_type(NO_REGULARIZATION);
303  set_constraints_type(NO_CONSTRAINTS);
304 
305  set_default();
306 
307  load(file_name);
308 }
309 
310 
311 // XML CONSTRUCTOR
312 
316 
317 PerformanceFunctional::PerformanceFunctional(const tinyxml2::XMLDocument& performance_functional_document)
318  : neural_network_pointer(NULL)
319  , data_set_pointer(NULL)
320  , mathematical_model_pointer(NULL)
321  , sum_squared_error_objective_pointer(NULL)
322  , mean_squared_error_objective_pointer(NULL)
323  , root_mean_squared_error_objective_pointer(NULL)
324  , normalized_squared_error_objective_pointer(NULL)
325  , Minkowski_error_objective_pointer(NULL)
326  , cross_entropy_error_objective_pointer(NULL)
327  , outputs_integrals_objective_pointer(NULL)
328  , solutions_error_objective_pointer(NULL)
329  , final_solutions_error_objective_pointer(NULL)
330  , independent_parameters_error_objective_pointer(NULL)
331  , inverse_sum_squared_error_objective_pointer(NULL)
332  , user_objective_pointer(NULL)
333  , neural_parameters_norm_regularization_pointer(NULL)
334  , outputs_integrals_regularization_pointer(NULL)
335  , user_regularization_pointer(NULL)
336  , outputs_integrals_constraints_pointer(NULL)
337  , solutions_error_constraints_pointer(NULL)
338  , final_solutions_error_constraints_pointer(NULL)
339  , independent_parameters_error_constraints_pointer(NULL)
340  , user_constraints_pointer(NULL)
341 {
342  set_objective_type(NORMALIZED_SQUARED_ERROR_OBJECTIVE);
343  set_regularization_type(NO_REGULARIZATION);
344  set_constraints_type(NO_CONSTRAINTS);
345 
346  set_default();
347 
348  from_XML(performance_functional_document);
349 }
350 
351 
352 // COPY CONSTRUCTOR
353 
358 
360  : neural_network_pointer(NULL)
361  , data_set_pointer(NULL)
362  , mathematical_model_pointer(NULL)
363  , sum_squared_error_objective_pointer(NULL)
364  , mean_squared_error_objective_pointer(NULL)
365  , root_mean_squared_error_objective_pointer(NULL)
366  , normalized_squared_error_objective_pointer(NULL)
367  , Minkowski_error_objective_pointer(NULL)
368  , cross_entropy_error_objective_pointer(NULL)
369  , outputs_integrals_objective_pointer(NULL)
370  , solutions_error_objective_pointer(NULL)
371  , final_solutions_error_objective_pointer(NULL)
372  , independent_parameters_error_objective_pointer(NULL)
373  , inverse_sum_squared_error_objective_pointer(NULL)
374  , user_objective_pointer(NULL)
375  , neural_parameters_norm_regularization_pointer(NULL)
376  , outputs_integrals_regularization_pointer(NULL)
377  , user_regularization_pointer(NULL)
378  , outputs_integrals_constraints_pointer(NULL)
379  , solutions_error_constraints_pointer(NULL)
380  , final_solutions_error_constraints_pointer(NULL)
381  , independent_parameters_error_constraints_pointer(NULL)
382  , user_constraints_pointer(NULL)
383 {
384  neural_network_pointer = other_performance_functional.neural_network_pointer;
385  data_set_pointer = other_performance_functional.data_set_pointer;
386  mathematical_model_pointer = other_performance_functional.mathematical_model_pointer;
387 
388  objective_type = other_performance_functional.objective_type;
389  regularization_type = other_performance_functional.regularization_type;
390  constraints_type = other_performance_functional.constraints_type;
391 
392  // Objective
393 
394  switch(objective_type)
395  {
396  case NO_OBJECTIVE:
397  {
398  // Do nothing
399  }
400  break;
401 
402  case SUM_SQUARED_ERROR_OBJECTIVE:
403  {
405  }
406  break;
407 
408  case MEAN_SQUARED_ERROR_OBJECTIVE:
409  {
411  }
412  break;
413 
414  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
415  {
417  }
418  break;
419 
420  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
421  {
423  }
424  break;
425 
426  case MINKOWSKI_ERROR_OBJECTIVE:
427  {
429  }
430  break;
431 
432  case CROSS_ENTROPY_ERROR_OBJECTIVE:
433  {
435  }
436  break;
437 
438  case OUTPUTS_INTEGRALS_OBJECTIVE:
439  {
441  }
442  break;
443 
444  case SOLUTIONS_ERROR_OBJECTIVE:
445  {
447  }
448  break;
449 
450  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
451  {
453  }
454  break;
455 
456  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
457  {
459  }
460  break;
461 
462  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
463  {
465  }
466  break;
467 
468  case USER_OBJECTIVE:
469  {
470  //user_objective_pointer = new PerformanceTerm(*other_performance_functional.user_objective_pointer);
471  }
472  break;
473 
474  default:
475  {
476  std::ostringstream buffer;
477 
478  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
479  << "Copy constructor.\n"
480  << "Unknown objective type.\n";
481 
482  throw std::logic_error(buffer.str());
483  }
484  break;
485  }
486 
487  // Regularization
488 
489  switch(regularization_type)
490  {
491  case NO_REGULARIZATION:
492  {
493  // Do nothing
494  }
495  break;
496 
497  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
498  {
500  }
501  break;
502 
503  case OUTPUTS_INTEGRALS_REGULARIZATION:
504  {
506  }
507  break;
508 
509  case USER_REGULARIZATION:
510  {
511  //user_regularization_pointer = new PerformanceTerm(*other_performance_functional.user_regularization_pointer);
512  }
513  break;
514 
515  default:
516  {
517  std::ostringstream buffer;
518 
519  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
520  << "Copy constructor.\n"
521  << "Unknown regularization type.\n";
522 
523  throw std::logic_error(buffer.str());
524  }
525  break;
526  }
527 
528  // Constraints
529 
530  switch(constraints_type)
531  {
532  case NO_CONSTRAINTS:
533  {
534  // Do nothing
535  }
536  break;
537 
538  case OUTPUTS_INTEGRALS_CONSTRAINTS:
539  {
541  }
542  break;
543 
544  case SOLUTIONS_ERROR_CONSTRAINTS:
545  {
547  }
548  break;
549 
550  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
551  {
553  }
554  break;
555 
556  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
557  {
559  }
560  break;
561 
562  case USER_CONSTRAINTS:
563  {
564  //user_constraints_pointer = new PerformanceTerm(*other_performance_functional.user_constraints_pointer);
565  }
566  break;
567 
568  default:
569  {
570  std::ostringstream buffer;
571 
572  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
573  << "Copy constructor.\n"
574  << "Unknown constraints type.\n";
575 
576  throw std::logic_error(buffer.str());
577  }
578  break;
579  }
580 
581  display = other_performance_functional.display;
582 }
583 
584 
585 // DESTRUCTOR
586 
589 
591 {
592  // Delete objective terms
593 
605  delete user_objective_pointer;
606 
607  // Delete regularization terms
608 
612 
613  // Delete constraints terms
614 
620 }
621 
622 
623 // METHODS
624 
625 
626 // bool has_neural_network(void) const method
627 
630 
632 {
634  {
635  return(true);
636  }
637  else
638  {
639  return(false);
640  }
641 }
642 
643 
644 // bool has_mathematical_model(void) const method
645 
648 
650 {
652  {
653  return(true);
654  }
655  else
656  {
657  return(false);
658  }
659 }
660 
661 
662 // bool has_data_set(void) const method
663 
666 
668 {
669  if(data_set_pointer)
670  {
671  return(true);
672  }
673  else
674  {
675  return(false);
676  }
677 }
678 
679 
680 // bool has_generalization(void) const method
681 
684 
686 {
687  if(!data_set_pointer)
688  {
689  return(false);
690  }
691  else
692  {
693  const size_t generalization_instances_number = data_set_pointer->get_instances().count_generalization_instances_number();
694 
695  if(generalization_instances_number == 0)
696  {
697  return(false);
698  }
699  }
700 
701  return(true);
702 }
703 
704 
705 // bool is_sum_squared_terms(void) const method
706 
709 
711 {
712  if(objective_type == ROOT_MEAN_SQUARED_ERROR_OBJECTIVE
713  || objective_type == MINKOWSKI_ERROR_OBJECTIVE
714  || objective_type == CROSS_ENTROPY_ERROR_OBJECTIVE
715  || objective_type == OUTPUTS_INTEGRALS_OBJECTIVE
716  || objective_type == SOLUTIONS_ERROR_OBJECTIVE
717  || objective_type == FINAL_SOLUTIONS_ERROR_OBJECTIVE
718  || objective_type == INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE
719  || objective_type == INVERSE_SUM_SQUARED_ERROR_OBJECTIVE)
720  {
721  return(false);
722  }
723 
724  if(regularization_type == NEURAL_PARAMETERS_NORM_REGULARIZATION
725  || regularization_type == OUTPUTS_INTEGRALS_REGULARIZATION)
726  {
727  return(false);
728  }
729 
730  if(constraints_type == OUTPUTS_INTEGRALS_CONSTRAINTS
731  || constraints_type == SOLUTIONS_ERROR_CONSTRAINTS
732  || constraints_type == FINAL_SOLUTIONS_ERROR_CONSTRAINTS
733  || constraints_type == INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS)
734  {
735  return(false);
736  }
737 
738  return(true);
739 }
740 
741 
742 // void check_neural_network(void) const method
743 
745 
747 {
749  {
750  std::ostringstream buffer;
751 
752  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
753  << "void check_neural_network(void) const.\n"
754  << "Pointer to neural network is NULL.\n";
755 
756  throw std::logic_error(buffer.str());
757  }
758 }
759 
760 
761 // void check_performance_terms(void) const method
762 
765 
767 {
768  if(objective_type == NO_OBJECTIVE
769  && regularization_type == NO_REGULARIZATION
770  && constraints_type == NO_CONSTRAINTS)
771  {
772  std::ostringstream buffer;
773 
774  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
775  << "void check_performance_terms(void) const method.\n"
776  << "None objective, regularization or constraints terms are used.\n";
777 
778  throw std::logic_error(buffer.str());
779 
780  }
781 }
782 
783 
784 // SumSquaredError* get_sum_squared_error_objective_pointer(void) const method
785 
788 
790 {
791  // Control sentence (if debug)
792 
793  #ifndef NDEBUG
794 
796  {
797  std::ostringstream buffer;
798 
799  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
800  << "SumSquaredError* get_sum_squared_error_objective_pointer(void) const method.\n"
801  << "Pointer to sum squared error objective is NULL.\n";
802 
803  throw std::logic_error(buffer.str());
804  }
805 
806  #endif
807 
809 }
810 
811 
812 // MeanSquaredError* get_mean_squared_error_objective_pointer(void) const method
813 
816 
818 {
819  // Control sentence (if debug)
820 
821  #ifndef NDEBUG
822 
824  {
825  std::ostringstream buffer;
826 
827  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
828  << "MeanSquaredError* get_mean_squared_error_objective_pointer(void) const method.\n"
829  << "Pointer to mean squared error objective is NULL.\n";
830 
831  throw std::logic_error(buffer.str());
832  }
833 
834  #endif
835 
837 }
838 
839 
840 // RootMeanSquaredError* get_root_mean_squared_error_objective_pointer(void) const method
841 
844 
846 {
847  // Control sentence (if debug)
848 
849  #ifndef NDEBUG
850 
852  {
853  std::ostringstream buffer;
854 
855  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
856  << "RootMeanSquaredError* get_root_mean_squared_error_objective_pointer(void) const method.\n"
857  << "Pointer to root mean squared error objective is NULL.\n";
858 
859  throw std::logic_error(buffer.str());
860  }
861 
862  #endif
863 
865 }
866 
867 
868 // NormalizedSquaredError* get_normalized_squared_error_objective_pointer(void) const method
869 
872 
874 {
875  // Control sentence (if debug)
876 
877  #ifndef NDEBUG
878 
880  {
881  std::ostringstream buffer;
882 
883  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
884  << "NormalizedSquaredError* get_normalized_squared_error_objective_pointer(void) const method.\n"
885  << "Pointer to normalized squared error objective is NULL.\n";
886 
887  throw std::logic_error(buffer.str());
888  }
889 
890  #endif
891 
893 }
894 
895 
896 // MinkowskiError* get_Minkowski_error_objective_pointer(void) const method
897 
900 
902 {
903  // Control sentence (if debug)
904 
905  #ifndef NDEBUG
906 
908  {
909  std::ostringstream buffer;
910 
911  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
912  << "MinkowskiError* get_Minkowski_error_objective_pointer(void) const method.\n"
913  << "Pointer to Minkowski error objective is NULL.\n";
914 
915  throw std::logic_error(buffer.str());
916  }
917 
918  #endif
919 
921 }
922 
923 
924 // CrossEntropyError* get_cross_entropy_error_objective_pointer(void) const method
925 
928 
930 {
931  // Control sentence (if debug)
932 
933  #ifndef NDEBUG
934 
936  {
937  std::ostringstream buffer;
938 
939  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
940  << "SumSquaredError* get_cross_entropy_error_objective_pointer(void) const method.\n"
941  << "Pointer to cross entropy error objective is NULL.\n";
942 
943  throw std::logic_error(buffer.str());
944  }
945 
946  #endif
947 
949 }
950 
951 
952 // OutputsIntegrals* get_outputs_integrals_objective_pointer(void) const method
953 
956 
957 
959 {
960  // Control sentence (if debug)
961 
962  #ifndef NDEBUG
963 
965  {
966  std::ostringstream buffer;
967 
968  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
969  << "OutputsIntegrals* get_outputs_integrals_objective_pointer(void) const method.\n"
970  << "Pointer to outputs integrals objective is NULL.\n";
971 
972  throw std::logic_error(buffer.str());
973  }
974 
975  #endif
976 
978 }
979 
980 
981 // SolutionsError* get_solutions_error_objective_pointer(void) const method
982 
985 
987 {
988  // Control sentence (if debug)
989 
990  #ifndef NDEBUG
991 
993  {
994  std::ostringstream buffer;
995 
996  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
997  << "SolutionsError* get_solutions_error_objective_pointer(void) const method.\n"
998  << "Pointer to solutions error objective is NULL.\n";
999 
1000  throw std::logic_error(buffer.str());
1001  }
1002 
1003  #endif
1004 
1006 }
1007 
1008 
1009 // FinalSolutionsError* get_final_solutions_error_objective_pointer(void) const method
1010 
1013 
1015 {
1016  // Control sentence (if debug)
1017 
1018  #ifndef NDEBUG
1019 
1021  {
1022  std::ostringstream buffer;
1023 
1024  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1025  << "FinalSolutionsError* get_final_solutions_error_objective_pointer(void) const method.\n"
1026  << "Pointer to final solutions error objective is NULL.\n";
1027 
1028  throw std::logic_error(buffer.str());
1029  }
1030 
1031  #endif
1032 
1034 }
1035 
1036 
1037 // IndependentParametersError* get_independent_parameters_error_objective_pointer(void) const method
1038 
1041 
1043 {
1044  // Control sentence (if debug)
1045 
1046  #ifndef NDEBUG
1047 
1049  {
1050  std::ostringstream buffer;
1051 
1052  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1053  << "IndependentParametersError* get_independent_parameters_error_objective_pointer(void) const method.\n"
1054  << "Pointer to independent parameters error objective is NULL.\n";
1055 
1056  throw std::logic_error(buffer.str());
1057  }
1058 
1059  #endif
1060 
1062 }
1063 
1064 
1065 // InverseSumSquaredError* get_inverse_sum_squared_error_objective_pointer(void) const method
1066 
1069 
1071 {
1072  // Control sentence (if debug)
1073 
1074  #ifndef NDEBUG
1075 
1077  {
1078  std::ostringstream buffer;
1079 
1080  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1081  << "InverseSumSquaredError* get_inverse_sum_squared_error_objective_pointer(void) const method.\n"
1082  << "Pointer to inverse sum squared error objective is NULL.\n";
1083 
1084  throw std::logic_error(buffer.str());
1085  }
1086 
1087  #endif
1088 
1090 }
1091 
1092 
1093 // PerformanceTerm* get_user_objective_pointer(void) const method
1094 
1097 
1099 {
1100  // Control sentence (if debug)
1101 
1102  #ifndef NDEBUG
1103 
1105  {
1106  std::ostringstream buffer;
1107 
1108  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1109  << "PerformanceTerm* get_user_objective_pointer(void) const method.\n"
1110  << "Pointer to user objective is NULL.\n";
1111 
1112  throw std::logic_error(buffer.str());
1113  }
1114 
1115  #endif
1116 
1117  return(user_objective_pointer);
1118 }
1119 
1120 
1121 // NeuralParametersNorm* get_neural_parameters_norm_regularization_pointer(void) const method
1122 
1125 
1127 {
1128  // Control sentence (if debug)
1129 
1130  #ifndef NDEBUG
1131 
1133  {
1134  std::ostringstream buffer;
1135 
1136  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1137  << "NeuralParametersNorm* get_neural_parameters_norm_regularization_pointer(void) const method.\n"
1138  << "Pointer to neural parameters norm regularization is NULL.\n";
1139 
1140  throw std::logic_error(buffer.str());
1141  }
1142 
1143  #endif
1144 
1146 }
1147 
1148 
1149 // OutputsIntegrals* get_outputs_integrals_regularization_pointer(void) const method
1150 
1153 
1155 {
1156  // Control sentence (if debug)
1157 
1158  #ifndef NDEBUG
1159 
1161  {
1162  std::ostringstream buffer;
1163 
1164  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1165  << "NeuralParametersNorm* get_outputs_integrals_regularization_pointer(void) const method.\n"
1166  << "Pointer to outputs integrals regularization is NULL.\n";
1167 
1168  throw std::logic_error(buffer.str());
1169  }
1170 
1171  #endif
1172 
1174 }
1175 
1176 
1177 // PerformanceTerm* get_user_regularization_pointer(void) const method
1178 
1181 
1183 {
1184  // Control sentence (if debug)
1185 
1186  #ifndef NDEBUG
1187 
1189  {
1190  std::ostringstream buffer;
1191 
1192  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1193  << "PerformanceTerm* get_user_regularization_pointer(void) const method.\n"
1194  << "Pointer to user regularization is NULL.\n";
1195 
1196  throw std::logic_error(buffer.str());
1197  }
1198 
1199  #endif
1200 
1202 }
1203 
1204 
1205 // OutputsIntegrals* get_outputs_integrals_constraints_pointer(void) const method
1206 
1209 
1211 {
1212  // Control sentence (if debug)
1213 
1214  #ifndef NDEBUG
1215 
1217  {
1218  std::ostringstream buffer;
1219 
1220  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1221  << "OutputsIntegrals* get_outputs_integrals_constraints_pointer(void) const method.\n"
1222  << "Pointer to outputs integrals constraints is NULL.\n";
1223 
1224  throw std::logic_error(buffer.str());
1225  }
1226 
1227  #endif
1228 
1230 }
1231 
1232 
1233 // SolutionsError* get_solutions_error_constraints_pointer(void) const method
1234 
1237 
1239 {
1240  // Control sentence (if debug)
1241 
1242  #ifndef NDEBUG
1243 
1245  {
1246  std::ostringstream buffer;
1247 
1248  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1249  << "SolutionsError* get_outputs_integrals_constraints_pointer(void) const method.\n"
1250  << "Pointer to solutions error constraints is NULL.\n";
1251 
1252  throw std::logic_error(buffer.str());
1253  }
1254 
1255  #endif
1256 
1258 }
1259 
1260 
1261 // FinalSolutionsError* get_final_solutions_error_constraints_pointer(void) const method
1262 
1265 
1267 {
1268  // Control sentence (if debug)
1269 
1270  #ifndef NDEBUG
1271 
1273  {
1274  std::ostringstream buffer;
1275 
1276  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1277  << "FinalSolutionsError* get_final_solutions_error_constraints_pointer(void) const method.\n"
1278  << "Pointer to final solutions error constraints is NULL.\n";
1279 
1280  throw std::logic_error(buffer.str());
1281  }
1282 
1283  #endif
1284 
1286 }
1287 
1288 
1289 // IndependentParametersError* get_independent_parameters_error_constraints_pointer(void) const method
1290 
1293 
1295 {
1296  // Control sentence (if debug)
1297 
1298  #ifndef NDEBUG
1299 
1301  {
1302  std::ostringstream buffer;
1303 
1304  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1305  << "IndependentParametersError* get_independent_parameters_error_constraints_pointer(void) const method.\n"
1306  << "Pointer to solutions error constraints is NULL.\n";
1307 
1308  throw std::logic_error(buffer.str());
1309  }
1310 
1311  #endif
1312 
1314 }
1315 
1316 
1317 // PerformanceTerm* get_user_constraints_pointer(void) const method
1318 
1321 
1323 {
1324  // Control sentence (if debug)
1325 
1326  #ifndef NDEBUG
1327 
1329  {
1330  std::ostringstream buffer;
1331 
1332  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1333  << "PerformanceTerm* get_user_constraints_pointer(void) const method.\n"
1334  << "Pointer to user constraints is NULL.\n";
1335 
1336  throw std::logic_error(buffer.str());
1337  }
1338 
1339  #endif
1340 
1341  return(user_constraints_pointer);
1342 }
1343 
1344 
1345 // const ObjectiveType& get_objective_type(void) const method
1346 
1348 
1350 {
1351  return(objective_type);
1352 }
1353 
1354 
1355 // const RegularizationType& get_regularization_type(void) const method
1356 
1358 
1360 {
1361  return(regularization_type);
1362 }
1363 
1364 
1365 // const ConstraintsType& get_constraints_type(void) const method
1366 
1368 
1370 {
1371  return(constraints_type);
1372 }
1373 
1374 
1375 // std::string write_objective_type(void) const
1376 
1378 
1380 {
1381  if(objective_type == NO_OBJECTIVE)
1382  {
1383  return("NO_OBJECTIVE");
1384  }
1385  else if(objective_type == SUM_SQUARED_ERROR_OBJECTIVE)
1386  {
1387  return("SUM_SQUARED_ERROR_OBJECTIVE");
1388  }
1389  else if(objective_type == MEAN_SQUARED_ERROR_OBJECTIVE)
1390  {
1391  return("MEAN_SQUARED_ERROR_OBJECTIVE");
1392  }
1393  else if(objective_type == ROOT_MEAN_SQUARED_ERROR_OBJECTIVE)
1394  {
1395  return("ROOT_MEAN_SQUARED_ERROR_OBJECTIVE");
1396  }
1397  else if(objective_type == NORMALIZED_SQUARED_ERROR_OBJECTIVE)
1398  {
1399  return("NORMALIZED_SQUARED_ERROR_OBJECTIVE");
1400  }
1401  else if(objective_type == MINKOWSKI_ERROR_OBJECTIVE)
1402  {
1403  return("MINKOWSKI_ERROR_OBJECTIVE");
1404  }
1405  else if(objective_type == CROSS_ENTROPY_ERROR_OBJECTIVE)
1406  {
1407  return("CROSS_ENTROPY_ERROR_OBJECTIVE");
1408  }
1409  else if(objective_type == OUTPUTS_INTEGRALS_OBJECTIVE)
1410  {
1411  return("OUTPUTS_INTEGRALS_OBJECTIVE");
1412  }
1413  else if(objective_type == SOLUTIONS_ERROR_OBJECTIVE)
1414  {
1415  return("SOLUTIONS_ERROR_OBJECTIVE");
1416  }
1417  else if(objective_type == FINAL_SOLUTIONS_ERROR_OBJECTIVE)
1418  {
1419  return("FINAL_SOLUTIONS_ERROR_OBJECTIVE");
1420  }
1421  else if(objective_type == INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE)
1422  {
1423  return("INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE");
1424  }
1425  else if(objective_type == INVERSE_SUM_SQUARED_ERROR_OBJECTIVE)
1426  {
1427  return("INVERSE_SUM_SQUARED_ERROR_OBJECTIVE");
1428  }
1429  else if(objective_type == USER_OBJECTIVE)
1430  {
1431  return("USER_OBJECTIVE");
1432  }
1433  else
1434  {
1435  std::ostringstream buffer;
1436 
1437  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1438  << "std::string write_objective_type(void) const method.\n"
1439  << "Unknown objective type.\n";
1440 
1441  throw std::logic_error(buffer.str());
1442  }
1443 }
1444 
1445 
1446 // std::string write_regularization_type(void) const method
1447 
1449 
1451 {
1452  if(regularization_type == NO_REGULARIZATION)
1453  {
1454  return("NO_REGULARIZATION");
1455  }
1456  else if(regularization_type == NEURAL_PARAMETERS_NORM_REGULARIZATION)
1457  {
1458  return("NEURAL_PARAMETERS_NORM_REGULARIZATION");
1459  }
1460  else if(regularization_type == OUTPUTS_INTEGRALS_REGULARIZATION)
1461  {
1462  return("OUTPUTS_INTEGRALS_REGULARIZATION");
1463  }
1464  else if(regularization_type == USER_REGULARIZATION)
1465  {
1466  return("USER_REGULARIZATION_REGULARIZATION");
1467  }
1468  else
1469  {
1470  std::ostringstream buffer;
1471 
1472  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1473  << "std::string write_regularization_type(void) const method.\n"
1474  << "Unknown regularization type.\n";
1475 
1476  throw std::logic_error(buffer.str());
1477  }
1478 }
1479 
1480 
1481 // std::string write_constraints_type(void) const method
1482 
1484 
1486 {
1487  if(constraints_type == NO_CONSTRAINTS)
1488  {
1489  return("NO_CONSTRAINTS");
1490  }
1491  else if(constraints_type == OUTPUTS_INTEGRALS_CONSTRAINTS)
1492  {
1493  return("OUTPUTS_INTEGRALS_CONSTRAINTS");
1494  }
1495  else if(constraints_type == SOLUTIONS_ERROR_CONSTRAINTS)
1496  {
1497  return("SOLUTIONS_ERROR_CONSTRAINTS");
1498  }
1499  else if(constraints_type == FINAL_SOLUTIONS_ERROR_CONSTRAINTS)
1500  {
1501  return("FINAL_SOLUTIONS_ERROR_CONSTRAINTS");
1502  }
1503  else if(constraints_type == INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS)
1504  {
1505  return("INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS");
1506  }
1507  else if(constraints_type == USER_CONSTRAINTS)
1508  {
1509  return("USER_CONSTRAINTS");
1510  }
1511  else
1512  {
1513  std::ostringstream buffer;
1514 
1515  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1516  << "std::string write_constraints_type(void) const method.\n"
1517  << "Unknown constraints type.\n";
1518 
1519  throw std::logic_error(buffer.str());
1520  }
1521 }
1522 
1523 
1524 // std::string write_objective_type_text(void) const
1525 
1527 
1529 {
1530  if(objective_type == NO_OBJECTIVE)
1531  {
1532  return("no objective");
1533  }
1534  else if(objective_type == SUM_SQUARED_ERROR_OBJECTIVE)
1535  {
1536  return("sum squared error");
1537  }
1538  else if(objective_type == MEAN_SQUARED_ERROR_OBJECTIVE)
1539  {
1540  return("mean squared error");
1541  }
1542  else if(objective_type == ROOT_MEAN_SQUARED_ERROR_OBJECTIVE)
1543  {
1544  return("root mean squared error");
1545  }
1546  else if(objective_type == NORMALIZED_SQUARED_ERROR_OBJECTIVE)
1547  {
1548  return("normalized squared error");
1549  }
1550  else if(objective_type == MINKOWSKI_ERROR_OBJECTIVE)
1551  {
1552  return("Minkowski error");
1553  }
1554  else if(objective_type == CROSS_ENTROPY_ERROR_OBJECTIVE)
1555  {
1556  return("cross entropy error");
1557  }
1558  else if(objective_type == OUTPUTS_INTEGRALS_OBJECTIVE)
1559  {
1560  return("outputs integrals");
1561  }
1562  else if(objective_type == SOLUTIONS_ERROR_OBJECTIVE)
1563  {
1564  return("solutions error");
1565  }
1566  else if(objective_type == FINAL_SOLUTIONS_ERROR_OBJECTIVE)
1567  {
1568  return("final solutions error");
1569  }
1570  else if(objective_type == INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE)
1571  {
1572  return("independent parameters error");
1573  }
1574  else if(objective_type == INVERSE_SUM_SQUARED_ERROR_OBJECTIVE)
1575  {
1576  return("inverse sum squared error");
1577  }
1578  else if(objective_type == USER_OBJECTIVE)
1579  {
1580  return("user objective");
1581  }
1582  else
1583  {
1584  std::ostringstream buffer;
1585 
1586  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1587  << "std::string write_objective_type_text(void) const method.\n"
1588  << "Unknown objective type.\n";
1589 
1590  throw std::logic_error(buffer.str());
1591  }
1592 }
1593 
1594 
1595 // std::string write_regularization_type_text(void) const method
1596 
1598 
1600 {
1601  if(regularization_type == NO_REGULARIZATION)
1602  {
1603  return("no regularization");
1604  }
1605  else if(regularization_type == NEURAL_PARAMETERS_NORM_REGULARIZATION)
1606  {
1607  return("neural parameters norm");
1608  }
1609  else if(regularization_type == OUTPUTS_INTEGRALS_REGULARIZATION)
1610  {
1611  return("outputs integrals");
1612  }
1613  else if(regularization_type == USER_REGULARIZATION)
1614  {
1615  return("user regularization");
1616  }
1617  else
1618  {
1619  std::ostringstream buffer;
1620 
1621  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1622  << "std::string write_regularization_type_text(void) const method.\n"
1623  << "Unknown regularization type.\n";
1624 
1625  throw std::logic_error(buffer.str());
1626  }
1627 }
1628 
1629 
1630 // std::string write_constraints_type_text(void) const method
1631 
1633 
1635 {
1636  if(constraints_type == NO_CONSTRAINTS)
1637  {
1638  return("no constraints");
1639  }
1640  else if(constraints_type == OUTPUTS_INTEGRALS_CONSTRAINTS)
1641  {
1642  return("outputs integrals");
1643  }
1644  else if(constraints_type == SOLUTIONS_ERROR_CONSTRAINTS)
1645  {
1646  return("solutions error");
1647  }
1648  else if(constraints_type == FINAL_SOLUTIONS_ERROR_CONSTRAINTS)
1649  {
1650  return("fina solutions error");
1651  }
1652  else if(constraints_type == INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS)
1653  {
1654  return("independent parameters error");
1655  }
1656  else if(constraints_type == USER_CONSTRAINTS)
1657  {
1658  return("user constraints");
1659  }
1660  else
1661  {
1662  std::ostringstream buffer;
1663 
1664  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1665  << "std::string write_constraints_type_text(void) const method.\n"
1666  << "Unknown constraints type.\n";
1667 
1668  throw std::logic_error(buffer.str());
1669  }
1670 }
1671 
1672 
1673 // const bool& get_display(void) const method
1674 
1677 
1678 const bool& PerformanceFunctional::get_display(void) const
1679 {
1680  return(display);
1681 }
1682 
1683 
1684 // void set_neural_network_pointer(NeuralNetwork*) method
1685 
1688 
1690 {
1691  neural_network_pointer = new_neural_network_pointer;
1692 
1693  // Objective
1694 
1695  switch(objective_type)
1696  {
1697  case NO_OBJECTIVE:
1698  {
1699  // Do nothing
1700  }
1701  break;
1702 
1703  case SUM_SQUARED_ERROR_OBJECTIVE:
1704  {
1706  }
1707  break;
1708 
1709  case MEAN_SQUARED_ERROR_OBJECTIVE:
1710  {
1712  }
1713  break;
1714 
1715  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
1716  {
1718  }
1719  break;
1720 
1721  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
1722  {
1724  }
1725  break;
1726 
1727  case MINKOWSKI_ERROR_OBJECTIVE:
1728  {
1730  }
1731  break;
1732 
1733  case CROSS_ENTROPY_ERROR_OBJECTIVE:
1734  {
1736  }
1737  break;
1738 
1739  case OUTPUTS_INTEGRALS_OBJECTIVE:
1740  {
1742  }
1743  break;
1744 
1745  case SOLUTIONS_ERROR_OBJECTIVE:
1746  {
1748  }
1749  break;
1750 
1751  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
1752  {
1754  }
1755  break;
1756 
1757  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
1758  {
1760  }
1761  break;
1762 
1763  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
1764  {
1766  }
1767  break;
1768 
1769  case USER_OBJECTIVE:
1770  {
1771  user_objective_pointer->set_neural_network_pointer(new_neural_network_pointer);
1772  }
1773  break;
1774 
1775  default:
1776  {
1777  std::ostringstream buffer;
1778 
1779  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1780  << "void set_neural_network_pointer(NeuralNetwork*) method.\n"
1781  << "Unknown objective type.\n";
1782 
1783  throw std::logic_error(buffer.str());
1784  }
1785  break;
1786  }
1787 
1788  // Regularization
1789 
1790  switch(regularization_type)
1791  {
1792  case NO_REGULARIZATION:
1793  {
1794  // Do nothing
1795  }
1796  break;
1797 
1798  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
1799  {
1801  }
1802  break;
1803 
1804  case OUTPUTS_INTEGRALS_REGULARIZATION:
1805  {
1807  }
1808  break;
1809 
1810  case USER_REGULARIZATION:
1811  {
1812  user_regularization_pointer->set_neural_network_pointer(new_neural_network_pointer);
1813  }
1814  break;
1815 
1816  default:
1817  {
1818  std::ostringstream buffer;
1819 
1820  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1821  << "void set_neural_network_pointer(NeuralNetwork*) method.\n"
1822  << "Unknown regularization type.\n";
1823 
1824  throw std::logic_error(buffer.str());
1825  }
1826  break;
1827  }
1828 
1829  // Constraints
1830 
1831  switch(constraints_type)
1832  {
1833  case NO_CONSTRAINTS:
1834  {
1835  // Do nothing
1836  }
1837  break;
1838 
1839  case OUTPUTS_INTEGRALS_CONSTRAINTS:
1840  {
1842  }
1843  break;
1844 
1845  case SOLUTIONS_ERROR_CONSTRAINTS:
1846  {
1848  }
1849  break;
1850 
1851  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
1852  {
1854  }
1855  break;
1856 
1857  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
1858  {
1860  }
1861  break;
1862 
1863  case USER_CONSTRAINTS:
1864  {
1865  user_constraints_pointer->set_neural_network_pointer(new_neural_network_pointer);
1866  }
1867  break;
1868 
1869  default:
1870  {
1871  std::ostringstream buffer;
1872 
1873  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1874  << "void set_neural_network_pointer(NeuralNetwork*) method.\n"
1875  << "Unknown constraints type.\n";
1876 
1877  throw std::logic_error(buffer.str());
1878  }
1879  break;
1880  }
1881 }
1882 
1883 
1884 // void set_mathematical_model_pointer(MathematicalModel*) method
1885 
1888 
1890 {
1891  mathematical_model_pointer = new_mathematical_model_pointer;
1892 
1893  // Objective
1894 
1895  switch(objective_type)
1896  {
1897  case NO_OBJECTIVE:
1898  {
1899  // Do nothing
1900  }
1901  break;
1902 
1903  case SUM_SQUARED_ERROR_OBJECTIVE:
1904  {
1906  }
1907  break;
1908 
1909  case MEAN_SQUARED_ERROR_OBJECTIVE:
1910  {
1912  }
1913  break;
1914 
1915  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
1916  {
1918  }
1919  break;
1920 
1921  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
1922  {
1924  }
1925  break;
1926 
1927  case MINKOWSKI_ERROR_OBJECTIVE:
1928  {
1929  Minkowski_error_objective_pointer->set_mathematical_model_pointer(new_mathematical_model_pointer);
1930  }
1931  break;
1932 
1933  case CROSS_ENTROPY_ERROR_OBJECTIVE:
1934  {
1936  }
1937  break;
1938 
1939  case OUTPUTS_INTEGRALS_OBJECTIVE:
1940  {
1942  }
1943  break;
1944 
1945  case SOLUTIONS_ERROR_OBJECTIVE:
1946  {
1947  solutions_error_objective_pointer->set_mathematical_model_pointer(new_mathematical_model_pointer);
1948  }
1949  break;
1950 
1951  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
1952  {
1954  }
1955  break;
1956 
1957  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
1958  {
1960  }
1961  break;
1962 
1963  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
1964  {
1966  }
1967  break;
1968 
1969  case USER_OBJECTIVE:
1970  {
1971  user_objective_pointer->set_mathematical_model_pointer(new_mathematical_model_pointer);
1972  }
1973  break;
1974 
1975  default:
1976  {
1977  std::ostringstream buffer;
1978 
1979  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
1980  << "void set_mathematical_model_pointer(MathematicalModel*) method.\n"
1981  << "Unknown objective type.\n";
1982 
1983  throw std::logic_error(buffer.str());
1984  }
1985  break;
1986  }
1987 
1988  // Regularization
1989 
1990  switch(regularization_type)
1991  {
1992  case NO_REGULARIZATION:
1993  {
1994  // Do nothing
1995  }
1996  break;
1997 
1998  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
1999  {
2001  }
2002  break;
2003 
2004  case OUTPUTS_INTEGRALS_REGULARIZATION:
2005  {
2007  }
2008  break;
2009 
2010  case USER_REGULARIZATION:
2011  {
2012  user_regularization_pointer->set_mathematical_model_pointer(new_mathematical_model_pointer);
2013  }
2014  break;
2015 
2016  default:
2017  {
2018  std::ostringstream buffer;
2019 
2020  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2021  << "void set_mathematical_model_pointer(NeuralNetwork*) method.\n"
2022  << "Unknown regularization type.\n";
2023 
2024  throw std::logic_error(buffer.str());
2025  }
2026  break;
2027  }
2028 
2029  // Constraints
2030 
2031  switch(constraints_type)
2032  {
2033  case NO_CONSTRAINTS:
2034  {
2035  // Do nothing
2036  }
2037  break;
2038 
2039  case OUTPUTS_INTEGRALS_CONSTRAINTS:
2040  {
2042  }
2043  break;
2044 
2045  case SOLUTIONS_ERROR_CONSTRAINTS:
2046  {
2048  }
2049  break;
2050 
2051  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
2052  {
2054  }
2055  break;
2056 
2057  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
2058  {
2060  }
2061  break;
2062 
2063  case USER_CONSTRAINTS:
2064  {
2065  user_constraints_pointer->set_mathematical_model_pointer(new_mathematical_model_pointer);
2066  }
2067  break;
2068 
2069  default:
2070  {
2071  std::ostringstream buffer;
2072 
2073  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2074  << "void set_mathematical_model_pointer(NeuralNetwork*) method.\n"
2075  << "Unknown constraints type.\n";
2076 
2077  throw std::logic_error(buffer.str());
2078  }
2079  break;
2080  }
2081 }
2082 
2083 
2084 // void set_data_set_pointer(DataSet*) method
2085 
2088 
2090 {
2091  data_set_pointer = new_data_set_pointer;
2092 
2093  // Objective
2094 
2095  switch(objective_type)
2096  {
2097  case NO_OBJECTIVE:
2098  {
2099  // Do nothing
2100  }
2101  break;
2102 
2103  case SUM_SQUARED_ERROR_OBJECTIVE:
2104  {
2106  }
2107  break;
2108 
2109  case MEAN_SQUARED_ERROR_OBJECTIVE:
2110  {
2112  }
2113  break;
2114 
2115  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
2116  {
2118  }
2119  break;
2120 
2121  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
2122  {
2124  }
2125  break;
2126 
2127  case MINKOWSKI_ERROR_OBJECTIVE:
2128  {
2130  }
2131  break;
2132 
2133  case CROSS_ENTROPY_ERROR_OBJECTIVE:
2134  {
2136  }
2137  break;
2138 
2139  case OUTPUTS_INTEGRALS_OBJECTIVE:
2140  {
2142  }
2143  break;
2144 
2145  case SOLUTIONS_ERROR_OBJECTIVE:
2146  {
2148  }
2149  break;
2150 
2151  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
2152  {
2154  }
2155  break;
2156 
2157  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
2158  {
2160  }
2161  break;
2162 
2163  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
2164  {
2166  }
2167  break;
2168 
2169  case USER_OBJECTIVE:
2170  {
2171  user_objective_pointer->set_data_set_pointer(new_data_set_pointer);
2172  }
2173  break;
2174 
2175  default:
2176  {
2177  std::ostringstream buffer;
2178 
2179  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2180  << "void set_data_set_pointer(DataSet*) method.\n"
2181  << "Unknown objective type.\n";
2182 
2183  throw std::logic_error(buffer.str());
2184  }
2185  break;
2186  }
2187 
2188  // Regularization
2189 
2190  switch(regularization_type)
2191  {
2192  case NO_REGULARIZATION:
2193  {
2194  // Do nothing
2195  }
2196  break;
2197 
2198  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
2199  {
2201  }
2202  break;
2203 
2204  case OUTPUTS_INTEGRALS_REGULARIZATION:
2205  {
2207  }
2208  break;
2209 
2210  case USER_REGULARIZATION:
2211  {
2212  user_regularization_pointer->set_data_set_pointer(new_data_set_pointer);
2213  }
2214  break;
2215 
2216  default:
2217  {
2218  std::ostringstream buffer;
2219 
2220  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2221  << "void set_data_set_pointer(DataSet*) method.\n"
2222  << "Unknown regularization type.\n";
2223 
2224  throw std::logic_error(buffer.str());
2225  }
2226  break;
2227  }
2228 
2229  // Constraints
2230 
2231  switch(constraints_type)
2232  {
2233  case NO_CONSTRAINTS:
2234  {
2235  // Do nothing
2236  }
2237  break;
2238 
2239  case OUTPUTS_INTEGRALS_CONSTRAINTS:
2240  {
2242  }
2243  break;
2244 
2245  case SOLUTIONS_ERROR_CONSTRAINTS:
2246  {
2248  }
2249  break;
2250 
2251  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
2252  {
2254  }
2255  break;
2256 
2257  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
2258  {
2260  }
2261  break;
2262 
2263  case USER_CONSTRAINTS:
2264  {
2265  user_constraints_pointer->set_data_set_pointer(new_data_set_pointer);
2266  }
2267  break;
2268 
2269  default:
2270  {
2271  std::ostringstream buffer;
2272 
2273  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2274  << "void set_data_set_pointer(DataSet*) method.\n"
2275  << "Unknown constraints type.\n";
2276 
2277  throw std::logic_error(buffer.str());
2278  }
2279  break;
2280  }
2281 }
2282 
2283 
2284 // void set_user_objective_pointer(PerformanceTerm*) method
2285 
2288 
2290 {
2292 
2293  objective_type = USER_OBJECTIVE;
2294 
2295  user_objective_pointer = new_user_objective_pointer;
2296 }
2297 
2298 
2299 // void set_user_regularization_pointer(PerformanceTerm*) method
2300 
2303 
2305 {
2307 
2308  regularization_type = USER_REGULARIZATION;
2309 
2310  user_regularization_pointer = new_user_regularization_pointer;
2311 }
2312 
2313 
2314 // void set_user_constraints_pointer(PerformanceTerm*) method
2315 
2318 
2320 {
2322 
2323  constraints_type = USER_CONSTRAINTS;
2324 
2325  user_constraints_pointer = new_user_constraints_pointer;
2326 }
2327 
2328 
2329 // void set_default(void) method
2330 
2332 
2334 {
2335  display = true;
2336 }
2337 
2338 
2339 // void set_objective_type(const std::string&) method
2340 
2343 
2344 void PerformanceFunctional::set_objective_type(const std::string& new_objective_type)
2345 {
2346  if(new_objective_type == "NO_OBJECTIVE")
2347  {
2348  set_objective_type(NO_OBJECTIVE);
2349  }
2350  else if(new_objective_type == "SUM_SQUARED_ERROR_OBJECTIVE")
2351  {
2352  set_objective_type(SUM_SQUARED_ERROR_OBJECTIVE);
2353  }
2354  else if(new_objective_type == "MEAN_SQUARED_ERROR_OBJECTIVE")
2355  {
2356  set_objective_type(MEAN_SQUARED_ERROR_OBJECTIVE);
2357  }
2358  else if(new_objective_type == "ROOT_MEAN_SQUARED_ERROR_OBJECTIVE")
2359  {
2360  set_objective_type(ROOT_MEAN_SQUARED_ERROR_OBJECTIVE);
2361  }
2362  else if(new_objective_type == "NORMALIZED_SQUARED_ERROR_OBJECTIVE")
2363  {
2364  set_objective_type(NORMALIZED_SQUARED_ERROR_OBJECTIVE);
2365  }
2366  else if(new_objective_type == "MINKOWSKI_ERROR_OBJECTIVE")
2367  {
2368  set_objective_type(MINKOWSKI_ERROR_OBJECTIVE);
2369  }
2370  else if(new_objective_type == "OUTPUTS_INTEGRALS_OBJECTIVE")
2371  {
2372  set_objective_type(OUTPUTS_INTEGRALS_OBJECTIVE);
2373  }
2374  else if(new_objective_type == "SOLUTIONS_ERROR_OBJECTIVE")
2375  {
2376  set_objective_type(SOLUTIONS_ERROR_OBJECTIVE);
2377  }
2378  else if(new_objective_type == "FINAL_SOLUTIONS_ERROR_OBJECTIVE")
2379  {
2380  set_objective_type(FINAL_SOLUTIONS_ERROR_OBJECTIVE);
2381  }
2382  else if(new_objective_type == "INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE")
2383  {
2384  set_objective_type(INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE);
2385  }
2386  else if(new_objective_type == "INVERSE_SUM_SQUARED_ERROR_OBJECTIVE")
2387  {
2388  set_objective_type(INVERSE_SUM_SQUARED_ERROR_OBJECTIVE);
2389  }
2390  else if(new_objective_type == "USER_OBJECTIVE")
2391  {
2392  set_objective_type(USER_OBJECTIVE);
2393  }
2394  else
2395  {
2396  std::ostringstream buffer;
2397 
2398  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2399  << "void set_objective_type(const std::string&) method.\n"
2400  << "Unknown objective type: " << new_objective_type << ".\n";
2401 
2402  throw std::logic_error(buffer.str());
2403  }
2404 }
2405 
2406 
2407 // void set_regularization_type(const std::string&) method
2408 
2411 
2412 void PerformanceFunctional::set_regularization_type(const std::string& new_regularization_type)
2413 {
2414  if(new_regularization_type == "NO_REGULARIZATION")
2415  {
2416  set_regularization_type(NO_REGULARIZATION);
2417  }
2418  else if(new_regularization_type == "NEURAL_PARAMETERS_NORM_REGULARIZATION")
2419  {
2420  set_regularization_type(NEURAL_PARAMETERS_NORM_REGULARIZATION);
2421  }
2422  else if(new_regularization_type == "OUTPUTS_INTEGRALS_REGULARIZATION")
2423  {
2424  set_regularization_type(OUTPUTS_INTEGRALS_REGULARIZATION);
2425  }
2426  else
2427  {
2428  std::ostringstream buffer;
2429 
2430  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2431  << "void set_regularization_type(const std::string&) method.\n"
2432  << "Unknown regularization type: " << new_regularization_type << ".\n";
2433 
2434  throw std::logic_error(buffer.str());
2435  }
2436 }
2437 
2438 
2439 // void set_constraints_type(const std::string&) method
2440 
2443 
2444 void PerformanceFunctional::set_constraints_type(const std::string& new_constraints_type)
2445 {
2446  if(new_constraints_type == "NO_CONSTRAINTS")
2447  {
2448  set_constraints_type(NO_CONSTRAINTS);
2449  }
2450  else if(new_constraints_type == "OUTPUTS_INTEGRALS_CONSTRAINTS")
2451  {
2452  set_constraints_type(OUTPUTS_INTEGRALS_CONSTRAINTS);
2453  }
2454  else if(new_constraints_type == "SOLUTIONS_ERROR_CONSTRAINTS")
2455  {
2456  set_constraints_type(SOLUTIONS_ERROR_CONSTRAINTS);
2457  }
2458  else if(new_constraints_type == "FINAL_SOLUTIONS_ERROR_CONSTRAINTS")
2459  {
2460  set_constraints_type(FINAL_SOLUTIONS_ERROR_CONSTRAINTS);
2461  }
2462  else if(new_constraints_type == "INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS")
2463  {
2464  set_constraints_type(INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS);
2465  }
2466  else if(new_constraints_type == "USER_CONSTRAINTS")
2467  {
2468  set_constraints_type(USER_CONSTRAINTS);
2469  }
2470  else
2471  {
2472  std::ostringstream buffer;
2473 
2474  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2475  << "void set_constraints_type(const std::string&) method.\n"
2476  << "Unknown constraints term type: " << new_constraints_type << ".\n";
2477 
2478  throw std::logic_error(buffer.str());
2479  }
2480 }
2481 
2482 
2483 // void set_display(const bool&) method
2484 
2489 
2490 void PerformanceFunctional::set_display(const bool& new_display)
2491 {
2492  display = new_display;
2493 }
2494 
2495 
2496 // void set_objective_type(const ObjectiveType&) method
2497 
2500 
2502 {
2504 
2505  objective_type = new_objective_type;
2506 
2507  switch(new_objective_type)
2508  {
2509  case NO_OBJECTIVE:
2510  {
2511  // Do nothing
2512  }
2513  break;
2514 
2515  case SUM_SQUARED_ERROR_OBJECTIVE:
2516  {
2518  }
2519  break;
2520 
2521  case MEAN_SQUARED_ERROR_OBJECTIVE:
2522  {
2524  }
2525  break;
2526 
2527  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
2528  {
2530  }
2531  break;
2532 
2533  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
2534  {
2536  }
2537  break;
2538 
2539  case MINKOWSKI_ERROR_OBJECTIVE:
2540  {
2542  }
2543  break;
2544 
2545  case CROSS_ENTROPY_ERROR_OBJECTIVE:
2546  {
2548  }
2549  break;
2550 
2551  case OUTPUTS_INTEGRALS_OBJECTIVE:
2552  {
2554  }
2555  break;
2556 
2557  case SOLUTIONS_ERROR_OBJECTIVE:
2558  {
2560  }
2561  break;
2562 
2563  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
2564  {
2566  }
2567  break;
2568 
2569  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
2570  {
2572  }
2573  break;
2574 
2575  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
2576  {
2578  }
2579  break;
2580 
2581  case USER_OBJECTIVE:
2582  {
2583  //user_objective_pointer = NULL;
2584  }
2585  break;
2586 
2587  default:
2588  {
2589  std::ostringstream buffer;
2590 
2591  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2592  << "void set_objective_type(const ObjectiveType&) method.\n"
2593  << "Unknown objective type.\n";
2594 
2595  throw std::logic_error(buffer.str());
2596  }
2597  break;
2598  }
2599 }
2600 
2601 
2602 // void set_regularization_type(const RegularizationType&) method
2603 
2606 
2608 {
2610 
2611  regularization_type = new_regularization_type;
2612 
2613  switch(regularization_type)
2614  {
2615  case NO_REGULARIZATION:
2616  {
2617  // Do nothing
2618  }
2619  break;
2620 
2621  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
2622  {
2624  }
2625  break;
2626 
2627  case OUTPUTS_INTEGRALS_REGULARIZATION:
2628  {
2630  }
2631  break;
2632 
2633  case USER_REGULARIZATION:
2634  {
2635  // regularization_pointer = NULL;
2636  }
2637  break;
2638 
2639  default:
2640  {
2641  std::ostringstream buffer;
2642 
2643  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2644  << "void set_regularization_type(const RegularizationType&) method.\n"
2645  << "Unknown regularization type.\n";
2646 
2647  throw std::logic_error(buffer.str());
2648  }
2649  break;
2650  }
2651 }
2652 
2653 
2654 // void set_constraints_type(const ConstraintsType&) method
2655 
2658 
2660 {
2662 
2663  constraints_type = new_constraints_type;
2664 
2665  switch(constraints_type)
2666  {
2667  case NO_CONSTRAINTS:
2668  {
2669  // Do nothing
2670  }
2671  break;
2672 
2673  case OUTPUTS_INTEGRALS_CONSTRAINTS:
2674  {
2676  }
2677  break;
2678 
2679  case SOLUTIONS_ERROR_CONSTRAINTS:
2680  {
2682  }
2683  break;
2684 
2685  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
2686  {
2688  }
2689  break;
2690 
2691  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
2692  {
2694  }
2695  break;
2696 
2697  case USER_CONSTRAINTS:
2698  {
2699  user_constraints_pointer = NULL;
2700  }
2701  break;
2702 
2703  default:
2704  {
2705  std::ostringstream buffer;
2706 
2707  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2708  << "void set_constraints_type(const ConstraintsType&) method.\n"
2709  << "Unknown constraints type.\n";
2710 
2711  throw std::logic_error(buffer.str());
2712  }
2713  break;
2714  }
2715 }
2716 
2717 
2718 // void destruct_objective(void) method
2719 
2722 
2724 {
2736  delete user_objective_pointer;
2737 
2749  user_objective_pointer = NULL;
2750 
2751  objective_type = NO_OBJECTIVE;
2752 }
2753 
2754 
2755 // void destruct_regularization(void) method
2756 
2759 
2761 {
2765 
2769 
2770  regularization_type = NO_REGULARIZATION;
2771 }
2772 
2773 
2774 // void destruct_constraints(void) method
2775 
2778 
2780 {
2785  delete user_constraints_pointer;
2786 
2791  user_constraints_pointer = NULL;
2792 
2793  constraints_type = NO_CONSTRAINTS;
2794 }
2795 
2796 
2797 // void destruct_all_terms(void) method
2798 
2800 
2802 {
2806 }
2807 
2808 
2809 // double calculate_objective(void) const method
2810 
2813 
2815 {
2816  // Control sentence (if debug)
2817 
2818  #ifndef NDEBUG
2819 
2821 
2822  #endif
2823 
2824  double objective = 0.0;
2825 
2826  // Objective
2827 
2828  switch(objective_type)
2829  {
2830  case NO_OBJECTIVE:
2831  {
2832  // Do nothing
2833  }
2834  break;
2835 
2836  case SUM_SQUARED_ERROR_OBJECTIVE:
2837  {
2839  }
2840  break;
2841 
2842  case MEAN_SQUARED_ERROR_OBJECTIVE:
2843  {
2845  }
2846  break;
2847 
2848  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
2849  {
2851  }
2852  break;
2853 
2854  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
2855  {
2857  }
2858  break;
2859 
2860  case MINKOWSKI_ERROR_OBJECTIVE:
2861  {
2863  }
2864  break;
2865 
2866  case CROSS_ENTROPY_ERROR_OBJECTIVE:
2867  {
2869  }
2870  break;
2871 
2872  case OUTPUTS_INTEGRALS_OBJECTIVE:
2873  {
2875  }
2876  break;
2877 
2878  case SOLUTIONS_ERROR_OBJECTIVE:
2879  {
2881  }
2882  break;
2883 
2884  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
2885  {
2887  }
2888  break;
2889 
2890  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
2891  {
2893  }
2894  break;
2895 
2896  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
2897  {
2899  }
2900  break;
2901 
2902  case USER_OBJECTIVE:
2903  {
2905  }
2906  break;
2907 
2908  default:
2909  {
2910  std::ostringstream buffer;
2911 
2912  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
2913  << "double calculate_objective(void) const method.\n"
2914  << "Unknown objective type.\n";
2915 
2916  throw std::logic_error(buffer.str());
2917  }
2918  break;
2919  }
2920 
2921  return(objective);
2922 }
2923 
2924 
2925 // double calculate_objective(const Vector<double>&) const method
2926 
2929 
2931 {
2932  // Control sentence (if debug)
2933 
2934  #ifndef NDEBUG
2935 
2937 
2938  #endif
2939 
2940  double objective = 0.0;
2941 
2942  // Objective
2943 
2944  switch(objective_type)
2945  {
2946  case NO_OBJECTIVE:
2947  {
2948  // Do nothing
2949  }
2950  break;
2951 
2952  case SUM_SQUARED_ERROR_OBJECTIVE:
2953  {
2955  }
2956  break;
2957 
2958  case MEAN_SQUARED_ERROR_OBJECTIVE:
2959  {
2961  }
2962  break;
2963 
2964  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
2965  {
2967  }
2968  break;
2969 
2970  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
2971  {
2973  }
2974  break;
2975 
2976  case MINKOWSKI_ERROR_OBJECTIVE:
2977  {
2979  }
2980  break;
2981 
2982  case CROSS_ENTROPY_ERROR_OBJECTIVE:
2983  {
2984  //objective = cross_entropy_error_objective_pointer->calculate_performance(parameters);
2985  }
2986  break;
2987 
2988  case OUTPUTS_INTEGRALS_OBJECTIVE:
2989  {
2991  }
2992  break;
2993 
2994  case SOLUTIONS_ERROR_OBJECTIVE:
2995  {
2997  }
2998  break;
2999 
3000  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
3001  {
3002  //objective = final_solutions_error_objective_pointer->calculate_performance(parameters);
3003  }
3004  break;
3005 
3006  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
3007  {
3009  }
3010  break;
3011 
3012  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
3013  {
3015  }
3016  break;
3017 
3018  case USER_OBJECTIVE:
3019  {
3020  objective = user_objective_pointer->calculate_performance(parameters);
3021  }
3022  break;
3023 
3024  default:
3025  {
3026  std::ostringstream buffer;
3027 
3028  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3029  << "double calculate_objective(const Vector<double>&) const method.\n"
3030  << "Unknown objective type.\n";
3031 
3032  throw std::logic_error(buffer.str());
3033  }
3034  break;
3035  }
3036 
3037  return(objective);
3038 }
3039 
3040 
3041 // double calculate_regularization(void) const method
3042 
3045 
3047 {
3048  // Control sentence (if debug)
3049 
3050  #ifndef NDEBUG
3051 
3053 
3054  #endif
3055 
3056  double regularization = 0.0;
3057 
3058  switch(regularization_type)
3059  {
3060  case NO_REGULARIZATION:
3061  {
3062  // Do nothing
3063  }
3064  break;
3065 
3066  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
3067  {
3069  }
3070  break;
3071 
3072  case OUTPUTS_INTEGRALS_REGULARIZATION:
3073  {
3075  }
3076  break;
3077 
3078  case USER_REGULARIZATION:
3079  {
3081  }
3082  break;
3083 
3084  default:
3085  {
3086  std::ostringstream buffer;
3087 
3088  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3089  << "double calculate_regularization(void) const method.\n"
3090  << "Unknown regularization type.\n";
3091 
3092  throw std::logic_error(buffer.str());
3093  }
3094  break;
3095  }
3096 
3097  return(regularization);
3098 }
3099 
3100 
3101 // double calculate_regularization(const Vector<double>&) const method
3102 
3105 
3107 {
3108  // Control sentence (if debug)
3109 
3110  #ifndef NDEBUG
3111 
3113 
3114  #endif
3115 
3116  double regularization = 0.0;
3117 
3118  switch(regularization_type)
3119  {
3120  case NO_REGULARIZATION:
3121  {
3122  // Do nothing
3123  }
3124  break;
3125 
3126  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
3127  {
3129  }
3130  break;
3131 
3132  case OUTPUTS_INTEGRALS_REGULARIZATION:
3133  {
3135  }
3136  break;
3137 
3138  case USER_REGULARIZATION:
3139  {
3140  regularization = user_regularization_pointer->calculate_performance(parameters);
3141  }
3142  break;
3143 
3144  default:
3145  {
3146  std::ostringstream buffer;
3147 
3148  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3149  << "double calculate_regularization(const Vector<double>&) const method.\n"
3150  << "Unknown regularization type.\n";
3151 
3152  throw std::logic_error(buffer.str());
3153  }
3154  break;
3155  }
3156 
3157  return(regularization);
3158 }
3159 
3160 
3161 // double calculate_constraints(void) const method
3162 
3165 
3167 {
3168  // Control sentence (if debug)
3169 
3170  #ifndef NDEBUG
3171 
3173 
3174  #endif
3175 
3176  double constraints = 0.0;
3177 
3178  // Constraints
3179 
3180  switch(constraints_type)
3181  {
3182  case NO_CONSTRAINTS:
3183  {
3184  // Do nothing
3185  }
3186  break;
3187 
3188  case OUTPUTS_INTEGRALS_CONSTRAINTS:
3189  {
3191  }
3192  break;
3193 
3194  case SOLUTIONS_ERROR_CONSTRAINTS:
3195  {
3197  }
3198  break;
3199 
3200  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
3201  {
3203  }
3204  break;
3205 
3206  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
3207  {
3209  }
3210  break;
3211 
3212  case USER_CONSTRAINTS:
3213  {
3215  }
3216  break;
3217 
3218  default:
3219  {
3220  std::ostringstream buffer;
3221 
3222  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3223  << "double calculate_constraints(void) const method.\n"
3224  << "Unknown constraints type.\n";
3225 
3226  throw std::logic_error(buffer.str());
3227  }
3228  break;
3229  }
3230 
3231  return(constraints);
3232 }
3233 
3234 
3235 // double calculate_constraints(const Vector<double>&) const method
3236 
3239 
3241 {
3242  // Control sentence (if debug)
3243 
3244  #ifndef NDEBUG
3245 
3247 
3248  #endif
3249 
3250  double constraints = 0.0;
3251 
3252  // Constraints
3253 
3254  switch(constraints_type)
3255  {
3256  case NO_CONSTRAINTS:
3257  {
3258  // Do nothing
3259  }
3260  break;
3261 
3262  case OUTPUTS_INTEGRALS_CONSTRAINTS:
3263  {
3265  }
3266  break;
3267 
3268  case SOLUTIONS_ERROR_CONSTRAINTS:
3269  {
3271  }
3272  break;
3273 
3274  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
3275  {
3276  //constraints = final_solutions_error_constraints_pointer->calculate_performance(parameters);
3277  }
3278  break;
3279 
3280  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
3281  {
3283  }
3284  break;
3285 
3286  case USER_CONSTRAINTS:
3287  {
3288  constraints = user_constraints_pointer->calculate_performance(parameters);
3289  }
3290  break;
3291 
3292  default:
3293  {
3294  std::ostringstream buffer;
3295 
3296  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3297  << "double calculate_constraints(const Vector<double>&) const method.\n"
3298  << "Unknown constraints type.\n";
3299 
3300  throw std::logic_error(buffer.str());
3301  }
3302  break;
3303  }
3304 
3305  return(constraints);
3306 }
3307 
3308 
3309 // Vector<double> calculate_objective_terms(void) const method
3310 
3314 
3316 {
3317  // Control sentence (if debug)
3318 
3319  #ifndef NDEBUG
3320 
3322 
3323  #endif
3324 
3325  std::ostringstream buffer;
3326 
3327  const Instances& instances = data_set_pointer->get_instances();
3328 
3329  const size_t training_instances_number = instances.count_training_instances_number();
3330 
3331  Vector<double> objective_terms(training_instances_number, 0.0);
3332 
3333  // Objective
3334 
3335  switch(objective_type)
3336  {
3337  case NO_OBJECTIVE:
3338  {
3339  // Do nothing
3340  }
3341  break;
3342 
3343  case SUM_SQUARED_ERROR_OBJECTIVE:
3344  {
3346  }
3347  break;
3348 
3349  case MEAN_SQUARED_ERROR_OBJECTIVE:
3350  {
3352  }
3353  break;
3354 
3355  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
3356  {
3357  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3358  << "Vector<double> calculate_objective_terms(void) const method.\n"
3359  << "Cannot calculate performance terms for root mean squared error objective.\n";
3360 
3361  throw std::logic_error(buffer.str());
3362  }
3363  break;
3364 
3365  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
3366  {
3368  }
3369  break;
3370 
3371  case MINKOWSKI_ERROR_OBJECTIVE:
3372  {
3373  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3374  << "Vector<double> calculate_objective_terms(void) const method.\n"
3375  << "Cannot calculate performance terms for Minkowski error objective.\n";
3376 
3377  throw std::logic_error(buffer.str());
3378  }
3379  break;
3380 
3381  case OUTPUTS_INTEGRALS_OBJECTIVE:
3382  {
3383  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3384  << "Vector<double> calculate_objective_terms(void) const method.\n"
3385  << "Cannot calculate performance terms for outputs integrals objective.\n";
3386 
3387  throw std::logic_error(buffer.str());
3388  }
3389  break;
3390 
3391  case SOLUTIONS_ERROR_OBJECTIVE:
3392  {
3393  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3394  << "Vector<double> calculate_objective_terms(void) const method.\n"
3395  << "Cannot calculate performance terms for solutions error objective.\n";
3396 
3397  throw std::logic_error(buffer.str());
3398  }
3399  break;
3400 
3401  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
3402  {
3403  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3404  << "Vector<double> calculate_objective_terms(void) const method.\n"
3405  << "Cannot calculate performance terms for final solutions error objective.\n";
3406 
3407  throw std::logic_error(buffer.str());
3408  }
3409  break;
3410 
3411  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
3412  {
3413  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3414  << "Vector<double> calculate_objective_terms(void) const method.\n"
3415  << "Cannot calculate performance terms for independent parameters error objective.\n";
3416 
3417  throw std::logic_error(buffer.str());
3418  }
3419  break;
3420 
3421  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
3422  {
3423  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3424  << "Vector<double> calculate_objective_terms(void) const method.\n"
3425  << "Cannot calculate performance terms for inverse sum squared error objective.\n";
3426 
3427  throw std::logic_error(buffer.str());
3428  }
3429  break;
3430 
3431  case USER_OBJECTIVE:
3432  {
3433  objective_terms = user_objective_pointer->calculate_terms();
3434  }
3435  break;
3436 
3437  default:
3438  {
3439  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3440  << "Vector<double> calculate_objective_terms(void) const method.\n"
3441  << "Unknown objective type.\n";
3442 
3443  throw std::logic_error(buffer.str());
3444  }
3445  break;
3446  }
3447 
3448  return(objective_terms);
3449 }
3450 
3451 
3452 // Vector<double> calculate_regularization_terms(void) const method
3453 
3455 
3457 {
3458  std::ostringstream buffer;
3459 
3460  Vector<double> regularization_terms;
3461 
3462  switch(regularization_type)
3463  {
3464  case NO_REGULARIZATION:
3465  {
3466  // Do nothing
3467  }
3468  break;
3469 
3470  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
3471  {
3472  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3473  << "Vector<double> calculate_regularization_terms(void) const method.\n"
3474  << "Cannot calculate performance terms for neural parameters norm.\n";
3475 
3476  throw std::logic_error(buffer.str());
3477  }
3478  break;
3479 
3480  case OUTPUTS_INTEGRALS_REGULARIZATION:
3481  {
3482  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3483  << "Vector<double> calculate_regularization_terms(void) const method.\n"
3484  << "Cannot calculate performance terms for outputs integrals.\n";
3485 
3486  throw std::logic_error(buffer.str());
3487  }
3488  break;
3489 
3490  case USER_REGULARIZATION:
3491  {
3492  regularization_terms = user_regularization_pointer->calculate_terms();
3493  }
3494  break;
3495 
3496  default:
3497  {
3498  std::ostringstream buffer;
3499 
3500  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3501  << "Vector<double> calculate_regularization_terms(void) const method.\n"
3502  << "Unknown regularization type.\n";
3503 
3504  throw std::logic_error(buffer.str());
3505  }
3506  break;
3507  }
3508 
3509  return(regularization_terms);
3510 }
3511 
3512 
3513 // Vector<double> calculate_constraints_terms(void) const method
3514 
3516 
3518 {
3519  Vector<double> constraints_terms;
3520 
3521  std::ostringstream buffer;
3522 
3523  // Constraints
3524 
3525  switch(constraints_type)
3526  {
3527  case NO_CONSTRAINTS:
3528  {
3529  // Do nothing
3530  }
3531  break;
3532 
3533  case OUTPUTS_INTEGRALS_CONSTRAINTS:
3534  {
3535  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3536  << "Vector<double> calculate_constraints_terms(void) const method.\n"
3537  << "Cannot calculate performance terms for outputs integrals.\n";
3538 
3539  throw std::logic_error(buffer.str());
3540  }
3541  break;
3542 
3543  case SOLUTIONS_ERROR_CONSTRAINTS:
3544  {
3545  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3546  << "Vector<double> calculate_constraints_terms(void) const method.\n"
3547  << "Cannot calculate performance terms for solutions error.\n";
3548 
3549  throw std::logic_error(buffer.str());
3550  }
3551  break;
3552 
3553  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
3554  {
3555  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3556  << "Vector<double> calculate_constraints_terms(void) const method.\n"
3557  << "Cannot calculate performance terms for final solutions error.\n";
3558 
3559  throw std::logic_error(buffer.str());
3560  }
3561  break;
3562 
3563  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
3564  {
3565  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3566  << "Vector<double> calculate_constraints_terms(void) const method.\n"
3567  << "Cannot calculate performance terms for independent parameters error.\n";
3568 
3569  throw std::logic_error(buffer.str());
3570  }
3571  break;
3572 
3573  case USER_CONSTRAINTS:
3574  {
3575  constraints_terms = user_constraints_pointer->calculate_terms();
3576  }
3577  break;
3578 
3579  default:
3580  {
3581  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3582  << "Vector<double> calculate_constraints_terms(void) const method.\n"
3583  << "Unknown constraints type.\n";
3584 
3585  throw std::logic_error(buffer.str());
3586  }
3587  break;
3588  }
3589 
3590  return(constraints_terms);
3591 }
3592 
3593 
3594 // Matrix<double> calculate_objective_terms_Jacobian(void) const method
3595 
3602 
3604 {
3605  // Control sentence (if debug)
3606 
3607  #ifndef NDEBUG
3608 
3610 
3611  #endif
3612 
3613  std::ostringstream buffer;
3614 
3615  Matrix<double> objective_terms_Jacobian;
3616 
3617  // Objective
3618 
3619  switch(objective_type)
3620  {
3621  case NO_OBJECTIVE:
3622  {
3623  // Do nothing
3624  }
3625  break;
3626 
3627  case SUM_SQUARED_ERROR_OBJECTIVE:
3628  {
3629  objective_terms_Jacobian = sum_squared_error_objective_pointer->calculate_terms_Jacobian();
3630  }
3631  break;
3632 
3633  case MEAN_SQUARED_ERROR_OBJECTIVE:
3634  {
3635  objective_terms_Jacobian = mean_squared_error_objective_pointer->calculate_terms_Jacobian();
3636  }
3637  break;
3638 
3639  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
3640  {
3641  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3642  << "Matrix<double> calculate_objective_terms_Jacobian(void) const method.\n"
3643  << "Cannot calculate performance terms for root mean squared error objective.\n";
3644 
3645  throw std::logic_error(buffer.str());
3646  }
3647  break;
3648 
3649  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
3650  {
3652  }
3653  break;
3654 
3655  case MINKOWSKI_ERROR_OBJECTIVE:
3656  {
3657  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3658  << "Matrix<double> calculate_objective_terms_Jacobian(void) const method.\n"
3659  << "Cannot calculate performance terms for Minkowski error objective.\n";
3660 
3661  throw std::logic_error(buffer.str());
3662  }
3663  break;
3664 
3665  case OUTPUTS_INTEGRALS_OBJECTIVE:
3666  {
3667  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3668  << "Matrix<double> calculate_objective_terms_Jacobian(void) const method.\n"
3669  << "Cannot calculate performance terms for outputs integrals objective.\n";
3670 
3671  throw std::logic_error(buffer.str());
3672  }
3673  break;
3674 
3675  case SOLUTIONS_ERROR_OBJECTIVE:
3676  {
3677  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3678  << "Matrix<double> calculate_objective_terms_Jacobian(void) const method.\n"
3679  << "Cannot calculate performance terms for solutions error objective.\n";
3680 
3681  throw std::logic_error(buffer.str());
3682  }
3683  break;
3684 
3685  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
3686  {
3687  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3688  << "Matrix<double> calculate_objective_terms_Jacobian(void) const method.\n"
3689  << "Cannot calculate performance terms for final solutions error objective.\n";
3690 
3691  throw std::logic_error(buffer.str());
3692  }
3693  break;
3694 
3695  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
3696  {
3697  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3698  << "Matrix<double> calculate_objective_terms_Jacobian(void) const method.\n"
3699  << "Cannot calculate performance terms for independent parameters error objective.\n";
3700 
3701  throw std::logic_error(buffer.str());
3702  }
3703  break;
3704 
3705  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
3706  {
3707  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3708  << "Matrix<double> calculate_objective_terms_Jacobian(void) const method.\n"
3709  << "Cannot calculate performance terms for inverse sum squared error objective.\n";
3710 
3711  throw std::logic_error(buffer.str());
3712  }
3713  break;
3714 
3715  case USER_OBJECTIVE:
3716  {
3717  objective_terms_Jacobian = user_objective_pointer->calculate_terms_Jacobian();
3718  }
3719  break;
3720 
3721  default:
3722  {
3723  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3724  << "Matrix<double> calculate_objective_terms_Jacobian(void) const method.\n"
3725  << "Unknown objective type.\n";
3726 
3727  throw std::logic_error(buffer.str());
3728  }
3729  break;
3730  }
3731 
3732  return(objective_terms_Jacobian);
3733 }
3734 
3735 
3736 // Matrix<double> calculate_regularization_terms_Jacobian(void) const method
3737 
3744 
3746 {
3747  Matrix<double> regularization_terms_Jacobian;
3748 
3749  std::ostringstream buffer;
3750 
3751  switch(regularization_type)
3752  {
3753  case NO_REGULARIZATION:
3754  {
3755  // Do nothing
3756  }
3757  break;
3758 
3759  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
3760  {
3761  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3762  << "Matrix<double> calculate_regularization_terms_Jacobian(void) const method.\n"
3763  << "Cannot calculate performance terms for neural parameters norm.\n";
3764 
3765  throw std::logic_error(buffer.str());
3766  }
3767  break;
3768 
3769  case OUTPUTS_INTEGRALS_REGULARIZATION:
3770  {
3771  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3772  << "Matrix<double> calculate_regularization_terms_Jacobian(void) const method.\n"
3773  << "Cannot calculate performance terms for outputs integrals.\n";
3774 
3775  throw std::logic_error(buffer.str());
3776  }
3777  break;
3778 
3779  case USER_REGULARIZATION:
3780  {
3781  regularization_terms_Jacobian = user_regularization_pointer->calculate_terms_Jacobian();
3782  }
3783  break;
3784 
3785  default:
3786  {
3787  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3788  << "Matrix<double> calculate_regularization_terms_Jacobian(void) const method.\n"
3789  << "Unknown regularization type.\n";
3790 
3791  throw std::logic_error(buffer.str());
3792  }
3793  break;
3794  }
3795 
3796  return(regularization_terms_Jacobian);
3797 }
3798 
3799 
3800 // Matrix<double> calculate_constraints_terms_Jacobian(void) const method
3801 
3808 
3810 {
3811  Matrix<double> constraints_terms_Jacobian;
3812 
3813  std::ostringstream buffer;
3814 
3815  // Constraints
3816 
3817  switch(constraints_type)
3818  {
3819  case NO_CONSTRAINTS:
3820  {
3821  // Do nothing
3822  }
3823  break;
3824 
3825  case OUTPUTS_INTEGRALS_CONSTRAINTS:
3826  {
3827  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3828  << "Matrix<double> calculate_constraints_terms_Jacobian(void) const method.\n"
3829  << "Cannot calculate performance terms for outputs integrals.\n";
3830 
3831  throw std::logic_error(buffer.str());
3832  }
3833  break;
3834 
3835  case SOLUTIONS_ERROR_CONSTRAINTS:
3836  {
3837  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3838  << "Matrix<double> calculate_constraints_terms_Jacobian(void) const method.\n"
3839  << "Cannot calculate performance terms for solutions error.\n";
3840 
3841  throw std::logic_error(buffer.str());
3842  }
3843  break;
3844 
3845  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
3846  {
3847  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3848  << "Matrix<double> calculate_constraints_terms_Jacobian(void) const method.\n"
3849  << "Cannot calculate performance terms for final solutions error.\n";
3850 
3851  throw std::logic_error(buffer.str());
3852  }
3853  break;
3854 
3855  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
3856  {
3857  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3858  << "Matrix<double> calculate_constraints_terms_Jacobian(void) const method.\n"
3859  << "Cannot calculate performance terms for independent parameters error.\n";
3860 
3861  throw std::logic_error(buffer.str());
3862  }
3863  break;
3864 
3865  case USER_CONSTRAINTS:
3866  {
3867  constraints_terms_Jacobian = user_constraints_pointer->calculate_terms_Jacobian();
3868  }
3869  break;
3870 
3871  default:
3872  {
3873  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3874  << "Matrix<double> calculate_constraints_terms_Jacobian(void) const method.\n"
3875  << "Unknown constraints type.\n";
3876 
3877  throw std::logic_error(buffer.str());
3878  }
3879  break;
3880  }
3881 
3882  return(constraints_terms_Jacobian);
3883 }
3884 
3885 
3886 // Vector<double> calculate_objective_gradient(void) const method
3887 
3891 
3893 {
3894  // Control sentence (if debug)
3895 
3896  #ifndef NDEBUG
3897 
3899 
3900  #endif
3901 
3902  const size_t parameters_number = neural_network_pointer->count_parameters_number();
3903 
3904  Vector<double> gradient(parameters_number, 0.0);
3905 
3906  // Objective
3907 
3908  switch(objective_type)
3909  {
3910  case NO_OBJECTIVE:
3911  {
3912  // Do nothing
3913  }
3914  break;
3915 
3916  case SUM_SQUARED_ERROR_OBJECTIVE:
3917  {
3919  }
3920  break;
3921 
3922  case MEAN_SQUARED_ERROR_OBJECTIVE:
3923  {
3925  }
3926  break;
3927 
3928  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
3929  {
3931  }
3932  break;
3933 
3934  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
3935  {
3937  }
3938  break;
3939 
3940  case MINKOWSKI_ERROR_OBJECTIVE:
3941  {
3943  }
3944  break;
3945 
3946  case CROSS_ENTROPY_ERROR_OBJECTIVE:
3947  {
3949  }
3950  break;
3951 
3952  case OUTPUTS_INTEGRALS_OBJECTIVE:
3953  {
3955  }
3956  break;
3957 
3958  case SOLUTIONS_ERROR_OBJECTIVE:
3959  {
3961  }
3962  break;
3963 
3964  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
3965  {
3967  }
3968  break;
3969 
3970  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
3971  {
3973  }
3974  break;
3975 
3976  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
3977  {
3979  }
3980  break;
3981 
3982  case USER_OBJECTIVE:
3983  {
3985  }
3986  break;
3987 
3988  default:
3989  {
3990  std::ostringstream buffer;
3991 
3992  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
3993  << "Vector<double> calculate_objective_gradient(void) const method.\n"
3994  << "Unknown objective type.\n";
3995 
3996  throw std::logic_error(buffer.str());
3997  }
3998  break;
3999  }
4000 
4001  return(gradient);
4002 }
4003 
4004 
4005 // Vector<double> calculate_objective_gradient(const Vector<double>&) const method
4006 
4010 
4012 {
4013  // Control sentence (if debug)
4014 
4015  #ifndef NDEBUG
4016 
4018 
4019  #endif
4020 
4021  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4022 
4023  Vector<double> gradient(parameters_number, 0.0);
4024 
4025  // Objective
4026 
4027  switch(objective_type)
4028  {
4029  case NO_OBJECTIVE:
4030  {
4031  // Do nothing
4032  }
4033  break;
4034 
4035  case SUM_SQUARED_ERROR_OBJECTIVE:
4036  {
4037  //gradient = sum_squared_error_objective_pointer->calculate_gradient(parameters);
4038  }
4039  break;
4040 
4041  case MEAN_SQUARED_ERROR_OBJECTIVE:
4042  {
4043  //gradient = mean_squared_error_objective_pointer->calculate_gradient(parameters);
4044  }
4045  break;
4046 
4047  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
4048  {
4049  //gradient = root_mean_squared_error_objective_pointer->calculate_gradient(parameters);
4050  }
4051  break;
4052 
4053  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
4054  {
4055  //gradient = normalized_squared_error_objective_pointer->calculate_gradient(parameters);
4056  }
4057  break;
4058 
4059  case MINKOWSKI_ERROR_OBJECTIVE:
4060  {
4061  //gradient = Minkowski_error_objective_pointer->calculate_gradient(parameters);
4062  }
4063  break;
4064 
4065  case CROSS_ENTROPY_ERROR_OBJECTIVE:
4066  {
4067  //gradient = cross_entropy_error_objective_pointer->calculate_gradient(parameters);
4068  }
4069  break;
4070 
4071  case OUTPUTS_INTEGRALS_OBJECTIVE:
4072  {
4073  //gradient = outputs_integrals_objective_pointer->calculate_gradient(parameters);
4074  }
4075  break;
4076 
4077  case SOLUTIONS_ERROR_OBJECTIVE:
4078  {
4079  gradient = solutions_error_objective_pointer->calculate_gradient(parameters);
4080  }
4081  break;
4082 
4083  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
4084  {
4086  }
4087  break;
4088 
4089  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
4090  {
4091  //gradient = independent_parameters_error_objective_pointer->calculate_gradient(parameters);
4092  }
4093  break;
4094 
4095  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
4096  {
4098  }
4099  break;
4100 
4101  case USER_OBJECTIVE:
4102  {
4103  gradient = user_objective_pointer->calculate_gradient(parameters);
4104  }
4105  break;
4106 
4107  default:
4108  {
4109  std::ostringstream buffer;
4110 
4111  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4112  << "Vector<double> calculate_objective_gradient(const Vector<double>&) const method.\n"
4113  << "Unknown objective type.\n";
4114 
4115  throw std::logic_error(buffer.str());
4116  }
4117  break;
4118  }
4119 
4120  return(gradient);
4121 }
4122 
4123 
4124 // Vector<double> calculate_regularization_gradient(void) const method
4125 
4129 
4131 {
4132  // Control sentence (if debug)
4133 
4134  #ifndef NDEBUG
4135 
4137 
4138  #endif
4139 
4140  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4141 
4142  Vector<double> gradient(parameters_number, 0.0);
4143 
4144  // Regularization
4145 
4146  switch(regularization_type)
4147  {
4148  case NO_REGULARIZATION:
4149  {
4150  // Do nothing
4151  }
4152  break;
4153 
4154  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
4155  {
4157  }
4158  break;
4159 
4160  case OUTPUTS_INTEGRALS_REGULARIZATION:
4161  {
4163  }
4164  break;
4165 
4166  case USER_REGULARIZATION:
4167  {
4169  }
4170  break;
4171 
4172  default:
4173  {
4174  std::ostringstream buffer;
4175 
4176  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4177  << "Vector<double> calculate_regularization_gradient(void) const method.\n"
4178  << "Unknown regularization type.\n";
4179 
4180  throw std::logic_error(buffer.str());
4181  }
4182  break;
4183  }
4184 
4185  return(gradient);
4186 }
4187 
4188 
4189 // Vector<double> calculate_regularization_gradient(const Vector<double>&) const method
4190 
4194 
4196 {
4197  // Control sentence (if debug)
4198 
4199  #ifndef NDEBUG
4200 
4202 
4203  #endif
4204 
4205  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4206 
4207  Vector<double> gradient(parameters_number, 0.0);
4208 
4209  // Regularization
4210 
4211  switch(regularization_type)
4212  {
4213  case NO_REGULARIZATION:
4214  {
4215  // Do nothing
4216  }
4217  break;
4218 
4219  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
4220  {
4221  //gradient = neural_parameters_norm_regularization_pointer->calculate_gradient(parameters);
4222  }
4223  break;
4224 
4225  case OUTPUTS_INTEGRALS_REGULARIZATION:
4226  {
4227  //gradient = outputs_integrals_regularization_pointer->calculate_gradient(parameters);
4228  }
4229  break;
4230 
4231  case USER_REGULARIZATION:
4232  {
4233  gradient = user_regularization_pointer->calculate_gradient(parameters);
4234  }
4235  break;
4236 
4237  default:
4238  {
4239  std::ostringstream buffer;
4240 
4241  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4242  << "Vector<double> calculate_regularization_gradient(const Vector<double>&) const method.\n"
4243  << "Unknown regularization type.\n";
4244 
4245  throw std::logic_error(buffer.str());
4246  }
4247  break;
4248  }
4249 
4250  return(gradient);
4251 }
4252 
4253 
4254 // Vector<double> calculate_constraints_gradient(void) const method
4255 
4259 
4261 {
4262  // Control sentence (if debug)
4263 
4264  #ifndef NDEBUG
4265 
4267 
4268  #endif
4269 
4270  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4271 
4272  Vector<double> gradient(parameters_number, 0.0);
4273 
4274  // Constraints
4275 
4276  switch(constraints_type)
4277  {
4278  case NO_CONSTRAINTS:
4279  {
4280  // Do nothing
4281  }
4282  break;
4283 
4284  case OUTPUTS_INTEGRALS_CONSTRAINTS:
4285  {
4287  }
4288  break;
4289 
4290  case SOLUTIONS_ERROR_CONSTRAINTS:
4291  {
4293  }
4294  break;
4295 
4296  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
4297  {
4299  }
4300  break;
4301 
4302  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
4303  {
4305  }
4306  break;
4307 
4308  case USER_CONSTRAINTS:
4309  {
4311  }
4312  break;
4313 
4314  default:
4315  {
4316  std::ostringstream buffer;
4317 
4318  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4319  << "Vector<double> calculate_constraints_gradient(void) const method.\n"
4320  << "Unknown constraints type.\n";
4321 
4322  throw std::logic_error(buffer.str());
4323  }
4324  break;
4325  }
4326 
4327  return(gradient);
4328 }
4329 
4330 
4331 // Vector<double> calculate_constraints_gradient(const Vector<double>&) const method
4332 
4336 
4338 {
4339  // Control sentence (if debug)
4340 
4341  #ifndef NDEBUG
4342 
4344 
4345  #endif
4346 
4347  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4348 
4349  Vector<double> gradient(parameters_number, 0.0);
4350 
4351  // Constraints
4352 
4353  switch(constraints_type)
4354  {
4355  case NO_CONSTRAINTS:
4356  {
4357  // Do nothing
4358  }
4359  break;
4360 
4361  case OUTPUTS_INTEGRALS_CONSTRAINTS:
4362  {
4363  //gradient = outputs_integrals_constraints_pointer->calculate_gradient(parameters);
4364  }
4365  break;
4366 
4367  case SOLUTIONS_ERROR_CONSTRAINTS:
4368  {
4370  }
4371  break;
4372 
4373  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
4374  {
4376  }
4377  break;
4378 
4379  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
4380  {
4381  //gradient = independent_parameters_error_constraints_pointer->calculate_gradient(parameters);
4382  }
4383  break;
4384 
4385  case USER_CONSTRAINTS:
4386  {
4387  gradient = user_constraints_pointer->calculate_gradient(parameters);
4388  }
4389  break;
4390 
4391  default:
4392  {
4393  std::ostringstream buffer;
4394 
4395  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4396  << "Vector<double> calculate_constraints_gradient(const Vector<double>&) const method.\n"
4397  << "Unknown constraints type.\n";
4398 
4399  throw std::logic_error(buffer.str());
4400  }
4401  break;
4402  }
4403 
4404  return(gradient);
4405 }
4406 
4407 
4408 // Matrix<double> calculate_objective_Hessian(void) const method
4409 
4413 
4415 {
4416  // Control sentence (if debug)
4417 
4418  #ifndef NDEBUG
4419 
4421 
4422  #endif
4423 
4424  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4425 
4426  Matrix<double> Hessian(parameters_number, parameters_number, 0.0);
4427 
4428  // Objective
4429 
4430  switch(objective_type)
4431  {
4432  case NO_OBJECTIVE:
4433  {
4434  // Do nothing
4435  }
4436  break;
4437 
4438  case SUM_SQUARED_ERROR_OBJECTIVE:
4439  {
4441  }
4442  break;
4443 
4444  case MEAN_SQUARED_ERROR_OBJECTIVE:
4445  {
4447  }
4448  break;
4449 
4450  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
4451  {
4453  }
4454  break;
4455 
4456  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
4457  {
4459  }
4460  break;
4461 
4462  case MINKOWSKI_ERROR_OBJECTIVE:
4463  {
4465  }
4466  break;
4467 
4468  case CROSS_ENTROPY_ERROR_OBJECTIVE:
4469  {
4471  }
4472  break;
4473 
4474  case OUTPUTS_INTEGRALS_OBJECTIVE:
4475  {
4477  }
4478  break;
4479 
4480  case SOLUTIONS_ERROR_OBJECTIVE:
4481  {
4483  }
4484  break;
4485 
4486  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
4487  {
4489  }
4490  break;
4491 
4492  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
4493  {
4495  }
4496  break;
4497 
4498  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
4499  {
4501  }
4502  break;
4503 
4504  case USER_OBJECTIVE:
4505  {
4507  }
4508  break;
4509 
4510  default:
4511  {
4512  std::ostringstream buffer;
4513 
4514  buffer << "Matrix<double> Exception: PerformanceFunctional class.\n"
4515  << "Matrix<double> calculate_objective_Hessian(void) const method.\n"
4516  << "Unknown objective type.\n";
4517 
4518  throw std::logic_error(buffer.str());
4519  }
4520  break;
4521  }
4522 
4523  return(Hessian);
4524 }
4525 
4526 
4527 // Matrix<double> calculate_objective_Hessian(const Vector<double>&) const method
4528 
4533 
4535 {
4536  // Control sentence (if debug)
4537 
4538  #ifndef NDEBUG
4539 
4541 
4542  #endif
4543 
4544  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4545 
4546  Matrix<double> Hessian(parameters_number, parameters_number, 0.0);
4547 
4548  // Objective
4549 
4550  switch(objective_type)
4551  {
4552  case NO_OBJECTIVE:
4553  {
4554  // Do nothing
4555  }
4556  break;
4557 
4558  case SUM_SQUARED_ERROR_OBJECTIVE:
4559  {
4561  }
4562  break;
4563 
4564  case MEAN_SQUARED_ERROR_OBJECTIVE:
4565  {
4566  //Hessian = mean_squared_error_objective_pointer->calculate_Hessian(parameters);
4567  }
4568  break;
4569 
4570  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
4571  {
4572  //Hessian = root_mean_squared_error_objective_pointer->calculate_Hessian(parameters);
4573  }
4574  break;
4575 
4576  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
4577  {
4578  //Hessian = normalized_squared_error_objective_pointer->calculate_Hessian(parameters);
4579  }
4580  break;
4581 
4582  case MINKOWSKI_ERROR_OBJECTIVE:
4583  {
4584  //Hessian = Minkowski_error_objective_pointer->calculate_Hessian(parameters);
4585  }
4586  break;
4587 
4588  case CROSS_ENTROPY_ERROR_OBJECTIVE:
4589  {
4590  //Hessian = cross_entropy_error_objective_pointer->calculate_Hessian(parameters);
4591  }
4592  break;
4593 
4594  case OUTPUTS_INTEGRALS_OBJECTIVE:
4595  {
4596  //Hessian = outputs_integrals_objective_pointer->calculate_Hessian(parameters);
4597  }
4598  break;
4599 
4600  case SOLUTIONS_ERROR_OBJECTIVE:
4601  {
4602  //Hessian = solutions_error_objective_pointer->calculate_Hessian(parameters);
4603  }
4604  break;
4605 
4606  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
4607  {
4608  //Hessian = final_solutions_error_objective_pointer->calculate_Hessian(parameters);
4609  }
4610  break;
4611 
4612  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
4613  {
4614  //Hessian = independent_parameters_error_objective_pointer->calculate_Hessian(parameters);
4615  }
4616  break;
4617 
4618  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
4619  {
4620  //Hessian = inverse_sum_squared_error_objective_pointer->calculate_Hessian(parameters);
4621  }
4622  break;
4623 
4624  case USER_OBJECTIVE:
4625  {
4626  //Hessian = user_objective_pointer->calculate_Hessian(parameters);
4627  }
4628  break;
4629 
4630  default:
4631  {
4632  std::ostringstream buffer;
4633 
4634  buffer << "Matrix<double> Exception: PerformanceFunctional class.\n"
4635  << "Matrix<double> calculate_objective_Hessian(const Vector<double>&) const method.\n"
4636  << "Unknown objective type.\n";
4637 
4638  throw std::logic_error(buffer.str());
4639  }
4640  break;
4641  }
4642 
4643  return(Hessian);
4644 }
4645 
4646 
4647 // Matrix<double> calculate_regularization_Hessian(void) const method
4648 
4652 
4654 {
4655  // Control sentence (if debug)
4656 
4657  #ifndef NDEBUG
4658 
4660 
4661  #endif
4662 
4663  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4664 
4665  Matrix<double> Hessian(parameters_number, parameters_number, 0.0);
4666 
4667  // Regularization
4668 
4669  switch(regularization_type)
4670  {
4671  case NO_REGULARIZATION:
4672  {
4673  // Do nothing
4674  }
4675  break;
4676 
4677  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
4678  {
4680  }
4681  break;
4682 
4683  case OUTPUTS_INTEGRALS_REGULARIZATION:
4684  {
4686  }
4687  break;
4688 
4689  case USER_REGULARIZATION:
4690  {
4692  }
4693  break;
4694 
4695  default:
4696  {
4697  std::ostringstream buffer;
4698 
4699  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4700  << "Matrix<double> calculate_regularization_Hessian(void) const method.\n"
4701  << "Unknown regularization type.\n";
4702 
4703  throw std::logic_error(buffer.str());
4704  }
4705  break;
4706  }
4707 
4708  return(Hessian);
4709 }
4710 
4711 
4712 // Matrix<double> calculate_regularization_Hessian(const Vector<double>&) const method
4713 
4718 
4720 {
4721  // Control sentence (if debug)
4722 
4723  #ifndef NDEBUG
4724 
4726 
4727  #endif
4728 
4729  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4730 
4731  Matrix<double> Hessian(parameters_number, parameters_number, 0.0);
4732 
4733  // Regularization
4734 
4735  switch(regularization_type)
4736  {
4737  case NO_REGULARIZATION:
4738  {
4739  // Do nothing
4740  }
4741  break;
4742 
4743  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
4744  {
4745  //Hessian = neural_parameters_norm_regularization_pointer->calculate_Hessian(parameters);
4746  }
4747  break;
4748 
4749  case OUTPUTS_INTEGRALS_REGULARIZATION:
4750  {
4751  //Hessian = outputs_integrals_regularization_pointer->calculate_Hessian(parameters);
4752  }
4753  break;
4754 
4755  case USER_REGULARIZATION:
4756  {
4757  //Hessian = user_regularization_pointer->calculate_Hessian(parameters);
4758  }
4759  break;
4760 
4761  default:
4762  {
4763  std::ostringstream buffer;
4764 
4765  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4766  << "Matrix<double> calculate_regularization_Hessian(const Vector<double>&) const method.\n"
4767  << "Unknown regularization type.\n";
4768 
4769  throw std::logic_error(buffer.str());
4770  }
4771  break;
4772  }
4773 
4774  return(Hessian);
4775 }
4776 
4777 
4778 // Matrix<double> calculate_constraints_Hessian(void) const method
4779 
4783 
4785 {
4786  // Control sentence (if debug)
4787 
4788  #ifndef NDEBUG
4789 
4791 
4792  #endif
4793 
4794  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4795 
4796  Matrix<double> Hessian(parameters_number, parameters_number, 0.0);
4797 
4798  // Constraints
4799 
4800  switch(constraints_type)
4801  {
4802  case NO_CONSTRAINTS:
4803  {
4804  // Do nothing
4805  }
4806  break;
4807 
4808  case OUTPUTS_INTEGRALS_CONSTRAINTS:
4809  {
4811  }
4812  break;
4813 
4814  case SOLUTIONS_ERROR_CONSTRAINTS:
4815  {
4817  }
4818  break;
4819 
4820  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
4821  {
4823  }
4824  break;
4825 
4826  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
4827  {
4829  }
4830  break;
4831 
4832  case USER_CONSTRAINTS:
4833  {
4835  }
4836  break;
4837 
4838  default:
4839  {
4840  std::ostringstream buffer;
4841 
4842  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4843  << "Matrix<double> calculate_constraints_Hessian(void) const method.\n"
4844  << "Unknown constraints type.\n";
4845 
4846  throw std::logic_error(buffer.str());
4847  }
4848  break;
4849  }
4850 
4851  return(Hessian);
4852 }
4853 
4854 
4855 // Matrix<double> calculate_constraints_Hessian(const Vector<double>&) const method
4856 
4861 
4863 {
4864  // Control sentence (if debug)
4865 
4866  #ifndef NDEBUG
4867 
4869 
4870  #endif
4871 
4872  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4873 
4874  Matrix<double> Hessian(parameters_number, parameters_number, 0.0);
4875 
4876  // Constraints
4877 
4878  switch(constraints_type)
4879  {
4880  case NO_CONSTRAINTS:
4881  {
4882  // Do nothing
4883  }
4884  break;
4885 
4886  case OUTPUTS_INTEGRALS_CONSTRAINTS:
4887  {
4888  //Hessian = outputs_integrals_constraints_pointer->calculate_Hessian(parameters);
4889  }
4890  break;
4891 
4892  case SOLUTIONS_ERROR_CONSTRAINTS:
4893  {
4894  //Hessian = solutions_error_constraints_pointer->calculate_Hessian(parameters);
4895  }
4896  break;
4897 
4898  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
4899  {
4900  //Hessian = final_solutions_error_constraints_pointer->calculate_Hessian(parameters);
4901  }
4902  break;
4903 
4904  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
4905  {
4906  //Hessian = independent_parameters_error_constraints_pointer->calculate_Hessian(parameters);
4907  }
4908  break;
4909 
4910  case USER_CONSTRAINTS:
4911  {
4912  //Hessian = user_constraints_pointer->calculate_Hessian(parameters);
4913  }
4914  break;
4915 
4916  default:
4917  {
4918  std::ostringstream buffer;
4919 
4920  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4921  << "Matrix<double> calculate_constraints_Hessian(const Vector<double>&) const method.\n"
4922  << "Unknown constraints type.\n";
4923 
4924  throw std::logic_error(buffer.str());
4925  }
4926  break;
4927  }
4928 
4929  return(Hessian);
4930 }
4931 
4932 
4933 // double calculate_performance(void) const method
4934 
4937 
4939 {
4940  // Control sentence (if debug)
4941 
4942  #ifndef NDEBUG
4943 
4945 
4947 
4948  #endif
4949 
4951 }
4952 
4953 
4954 // double calculate_performance(const Vector<double>&) const method
4955 
4959 
4961 {
4962  // Control sentence (if debug)
4963 
4964  #ifndef NDEBUG
4965 
4967 
4969 
4970  const size_t size = parameters.size();
4971 
4972  const size_t parameters_number = neural_network_pointer->count_parameters_number();
4973 
4974  if(size != parameters_number)
4975  {
4976  std::ostringstream buffer;
4977 
4978  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
4979  << "double calculate_performance(const Vector<double>&) method.\n"
4980  << "Size (" << size << ") must be equal to number of parameters (" << parameters_number << ").\n";
4981 
4982  throw std::logic_error(buffer.str());
4983  }
4984 
4985  #endif
4986 
4987  return(calculate_objective(parameters) + calculate_regularization(parameters) + calculate_constraints(parameters));
4988 }
4989 
4990 
4991 // double calculate_generalization_objective(void) const method
4992 
4994 
4996 {
4997  double generalization_objective = 0.0;
4998 
4999  switch(objective_type)
5000  {
5001  case NO_OBJECTIVE:
5002  {
5003  // Do nothing
5004  }
5005  break;
5006 
5007  case SUM_SQUARED_ERROR_OBJECTIVE:
5008  {
5010  }
5011  break;
5012 
5013  case MEAN_SQUARED_ERROR_OBJECTIVE:
5014  {
5016  }
5017  break;
5018 
5019  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
5020  {
5022  }
5023  break;
5024 
5025  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
5026  {
5028  }
5029  break;
5030 
5031  case MINKOWSKI_ERROR_OBJECTIVE:
5032  {
5034  }
5035  break;
5036 
5037  case CROSS_ENTROPY_ERROR_OBJECTIVE:
5038  {
5040  }
5041  break;
5042 
5043  case OUTPUTS_INTEGRALS_OBJECTIVE:
5044  {
5046  }
5047  break;
5048 
5049  case SOLUTIONS_ERROR_OBJECTIVE:
5050  {
5052  }
5053  break;
5054 
5055  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
5056  {
5058  }
5059  break;
5060 
5061  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
5062  {
5064  }
5065  break;
5066 
5067  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
5068  {
5070  }
5071  break;
5072 
5073  case USER_OBJECTIVE:
5074  {
5075  generalization_objective += user_objective_pointer->calculate_generalization_performance();
5076  }
5077  break;
5078 
5079  default:
5080  {
5081  std::ostringstream buffer;
5082 
5083  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5084  << "double calculate_generalization_objective(void) const method.\n"
5085  << "Unknown objective type.\n";
5086 
5087  throw std::logic_error(buffer.str());
5088  }
5089  break;
5090  }
5091 
5092  return(generalization_objective);
5093 }
5094 
5095 
5096 // double calculate_generalization_regularization(void) const method
5097 
5099 
5101 {
5102  double generalization_regularization = 0.0;
5103 
5104  switch(regularization_type)
5105  {
5106  case NO_REGULARIZATION:
5107  {
5108  // Do nothing
5109  }
5110  break;
5111 
5112  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
5113  {
5115  }
5116  break;
5117 
5118  case OUTPUTS_INTEGRALS_REGULARIZATION:
5119  {
5121  }
5122  break;
5123 
5124  case USER_REGULARIZATION:
5125  {
5126  generalization_regularization = user_regularization_pointer->calculate_generalization_performance();
5127  }
5128  break;
5129 
5130  default:
5131  {
5132  std::ostringstream buffer;
5133 
5134  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5135  << "double calculate_generalization_regularization(void) const method.\n"
5136  << "Unknown regularization type.\n";
5137 
5138  throw std::logic_error(buffer.str());
5139  }
5140  break;
5141  }
5142 
5143  return(generalization_regularization);
5144 }
5145 
5146 
5147 // double calculate_generalization_constraints(void) const
5148 
5150 
5152 {
5153  double generalization_constraints = 0.0;
5154 
5155  switch(constraints_type)
5156  {
5157  case NO_CONSTRAINTS:
5158  {
5159  // Do nothing
5160  }
5161  break;
5162 
5163  case OUTPUTS_INTEGRALS_CONSTRAINTS:
5164  {
5166  }
5167  break;
5168 
5169  case SOLUTIONS_ERROR_CONSTRAINTS:
5170  {
5172  }
5173  break;
5174 
5175  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
5176  {
5178  }
5179  break;
5180 
5181  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
5182  {
5184  }
5185  break;
5186 
5187  case USER_CONSTRAINTS:
5188  {
5189  generalization_constraints += user_constraints_pointer->calculate_generalization_performance();
5190  }
5191  break;
5192 
5193  default:
5194  {
5195  std::ostringstream buffer;
5196 
5197  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5198  << "double calculate_generalization_constraints(void) const method.\n"
5199  << "Unknown constraints type.\n";
5200 
5201  throw std::logic_error(buffer.str());
5202  }
5203  break;
5204  }
5205 
5206  return(generalization_constraints);
5207 }
5208 
5209 
5210 // double calculate_generalization_performance(void) const method method
5211 
5214 
5216 {
5217  // Control sentence (if debug)
5218 
5219  #ifndef NDEBUG
5220 
5222 
5224 
5225  #endif
5226 
5227  const double generalization_performance
5231 
5232  return(generalization_performance);
5233 }
5234 
5235 
5236 // Vector<double> calculate_gradient(void) const method
5237 
5239 
5241 {
5242  // Control sentence (if debug)
5243 
5244  #ifndef NDEBUG
5245 
5247 
5249 
5250  #endif
5251 
5253 }
5254 
5255 
5256 // Vector<double> calculate_gradient(const Vector<double>&) const method
5257 
5261 
5263 {
5264  #ifndef NDEBUG
5265 
5267 
5269 
5270  #endif
5271 
5272  #ifndef NDEBUG
5273 
5274  const size_t parameters_number = neural_network_pointer->count_parameters_number();
5275 
5276  const size_t size = parameters.size();
5277 
5278  if(size != parameters_number)
5279  {
5280  std::ostringstream buffer;
5281 
5282  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5283  << "Vector<double> calculate_gradient(const Vector<double>&) const method.\n"
5284  << "Size (" << size << ") must be equal to number of parameters (" << parameters_number << ").\n";
5285 
5286  throw std::logic_error(buffer.str());
5287  }
5288 
5289  #endif
5290 
5292 }
5293 
5294 
5295 
5296 // Matrix<double> calculate_Hessian(void) const method
5297 
5300 
5302 {
5303  #ifndef NDEBUG
5304 
5306 
5308 
5309  #endif
5310 
5312 }
5313 
5314 
5315 // Vector<double> calculate_Hessian(const Vector<double>&) const method
5316 
5322 
5324 {
5325  // Control sentence (if debug)
5326 
5327  #ifndef NDEBUG
5328 
5330 
5332 
5333  const size_t size = parameters.size();
5334  const size_t parameters_number = neural_network_pointer->count_parameters_number();
5335 
5336  if(size != parameters_number)
5337  {
5338  std::ostringstream buffer;
5339 
5340  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5341  << "double calculate_Hessian(const Vector<double>&) method.\n"
5342  << "Size must be equal to number of parameters.\n";
5343 
5344  throw std::logic_error(buffer.str());
5345  }
5346 
5347  #endif
5348 
5349  return(calculate_objective_Hessian(parameters) + calculate_regularization_Hessian(parameters) + calculate_constraints_Hessian(parameters));
5350 }
5351 
5352 
5353 // Vector<double> calculate_terms(void) const method
5354 
5357 
5359 {
5360  // Control sentence (if debug)
5361 
5362  #ifndef NDEBUG
5363 
5365 
5367 
5368  #endif
5369 
5370  const Vector<double> objective_terms = calculate_objective_terms();
5371 
5372  const Vector<double> regularization_terms = calculate_regularization_terms();
5373 
5374  const Vector<double> constraints_terms = calculate_constraints_terms();
5375 
5376  return(objective_terms.assemble(regularization_terms).assemble(constraints_terms));
5377 }
5378 
5379 
5380 // Matrix<double> calculate_terms_Jacobian(void) const method
5381 
5383 
5385 {
5386  // Control sentence (if debug)
5387 
5388  #ifndef NDEBUG
5389 
5391 
5393 
5394  #endif
5395 
5396  const Matrix<double> objective_terms_Jacobian = calculate_objective_terms_Jacobian();
5397 
5398 // const Matrix<double> regularization_terms_Jacobian = calculate_regularization_terms_Jacobian();
5399 
5400 // const Matrix<double> constraints_terms_Jacobian = calculate_constraints_terms_Jacobian();
5401 
5402 // Matrix<double> terms_Jacobian;
5403 
5404 // if(!objective_terms_Jacobian.empty())
5405 // {
5406 // terms_Jacobian = objective_terms_Jacobian;
5407 // }
5408 
5409  return(objective_terms_Jacobian);
5410 }
5411 
5412 
5413 // Matrix<double> calculate_inverse_Hessian(void) const method
5414 
5418 
5420 {
5421  std::ostringstream buffer;
5422 
5423  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5424  << "Matrix<double> calculate_inverse_Hessian(void) const method.\n"
5425  << "This method is not yet implemented.\n";
5426 
5427  throw std::logic_error(buffer.str());
5428 
5429  // Control sentence (if debug)
5430 
5431  #ifndef NDEBUG
5432 
5434 
5436 
5437  #endif
5438 
5439 // const Matrix<double> Hessian = calculate_Hessian();
5440 
5441 // return(Hessian.calculate_inverse());
5442 }
5443 
5444 
5445 // Vector<double> calculate_vector_dot_Hessian(Vector<double>) const method
5446 
5451 
5453 {
5454  // Control sentence (if debug)
5455 
5456  #ifndef NDEBUG
5457 
5459 
5461 
5462  #endif
5463 
5464 
5465  // Control sentence
5466 
5467  const size_t size = vector.size();
5468 
5469  const size_t parameters_number = neural_network_pointer->count_parameters_number();
5470 
5471  if(size != parameters_number)
5472  {
5473  std::ostringstream buffer;
5474 
5475  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5476  << "Vector<double> calculate_vector_dot_Hessian(Vector<double>) method.\n"
5477  << "Size of vector must be equal to number of parameters.\n";
5478 
5479  throw std::logic_error(buffer.str());
5480  }
5481 
5482  // Calculate vector Hessian product
5483 
5484  Vector<double> vector_Hessian_product(parameters_number);
5485 
5486  return(vector_Hessian_product);
5487 }
5488 
5489 
5490 // ZeroOrderperformance calculate_zero_order_performance(void) const method
5491 
5493 
5495 {
5496  ZeroOrderperformance zero_order_performance;
5497 
5498  zero_order_performance.performance = calculate_performance();
5499 
5500  return(zero_order_performance);
5501 }
5502 
5503 
5504 // FirstOrderperformance calculate_first_order_performance(void) const method
5505 
5507 
5509 {
5510  FirstOrderperformance first_order_performance;
5511 
5512  first_order_performance.performance = calculate_performance();
5513  first_order_performance.gradient = calculate_gradient();
5514 
5515  return(first_order_performance);
5516 }
5517 
5518 
5519 // SecondOrderperformance calculate_second_order_performance(void) const method
5520 
5522 
5524 {
5525  SecondOrderperformance second_order_performance;
5526 
5527  second_order_performance.performance = calculate_performance();
5528  second_order_performance.gradient = calculate_gradient();
5529  second_order_performance.Hessian = calculate_Hessian();
5530 
5531  return(second_order_performance);
5532 }
5533 
5534 
5535 // double calculate_zero_order_Taylor_approximation(const Vector<double>&) const method
5536 
5539 
5541 {
5542  return(calculate_performance());
5543 }
5544 
5545 
5546 // double calculate_first_order_Taylor_approximation(const Vector<double>&) const method
5547 
5551 
5553 {
5554  // Control sentence (if debug)
5555 
5556  #ifndef NDEBUG
5557 
5558  const size_t parameters_size = parameters.size();
5559  const size_t parameters_number = neural_network_pointer->count_parameters_number();
5560 
5561  if(parameters_size != parameters_number)
5562  {
5563  std::ostringstream buffer;
5564 
5565  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5566  << "double calculate_first_order_Taylor_approximation(const Vector<double>&) const method.\n"
5567  << "Size of potential parameters must be equal to number of parameters.\n";
5568 
5569  throw std::logic_error(buffer.str());
5570  }
5571 
5572  #endif
5573 
5574  const Vector<double> original_parameters = neural_network_pointer->arrange_parameters();
5575 
5576  const double performance = calculate_performance();
5577  const Vector<double> gradient = calculate_gradient();
5578 
5579  const double first_order_Taylor_approximation = performance + gradient.dot(parameters-parameters);
5580 
5581  return(first_order_Taylor_approximation);
5582 }
5583 
5584 
5585 // double calculate_second_order_Taylor_approximation(const Vector<double>&) const method
5586 
5590 
5592 {
5593  // Control sentence (if debug)
5594 
5595  #ifndef NDEBUG
5596 
5597  const size_t parameters_size = parameters.size();
5598  const size_t parameters_number = neural_network_pointer->count_parameters_number();
5599 
5600  if(parameters_size != parameters_number)
5601  {
5602  std::ostringstream buffer;
5603 
5604  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5605  << "double calculate_second_order_Taylor_approximation(const Vector<double>&) const method.\n"
5606  << "Size of potential parameters must be equal to number of parameters.\n";
5607 
5608  throw std::logic_error(buffer.str());
5609  }
5610 
5611  #endif
5612 
5613  // Neural network stuff
5614 
5615  const Vector<double> original_parameters = neural_network_pointer->arrange_parameters();
5616  const Vector<double> parameters_difference = parameters - parameters;
5617 
5618  // Performance functioal stuff
5619 
5620  const double performance = calculate_performance();
5621  const Vector<double> gradient = calculate_gradient();
5622  const Matrix<double> Hessian = calculate_Hessian();
5623 
5624  const double second_order_Taylor_approximation = performance
5625  + gradient.dot(parameters_difference)
5626  + parameters_difference.dot(Hessian).dot(parameters_difference)/2.0;
5627 
5628  return(second_order_Taylor_approximation);
5629 }
5630 
5631 
5632 // double calculate_performance(const Vector<double>&, const double&) const method
5633 
5637 
5638 double PerformanceFunctional::calculate_performance(const Vector<double>& direction, const double& rate) const
5639 {
5641  const Vector<double> increment = direction*rate;
5642 
5643  return(calculate_performance(parameters + increment));
5644 }
5645 
5646 
5647 // double calculate_performance_derivative(const Vector<double>&, const double&) const method
5648 
5652 
5653 double PerformanceFunctional::calculate_performance_derivative(const Vector<double>& direction, const double& rate) const
5654 {
5655  if(direction == 0.0)
5656  {
5657  return(0.0);
5658  }
5659 
5661  const Vector<double> potential_parameters = parameters + direction*rate;
5662 
5663  const Vector<double> gradient = calculate_gradient(potential_parameters);
5664 
5665  const Vector<double> normalized_direction = direction/direction.calculate_norm();
5666 
5667  return(gradient.dot(normalized_direction));
5668 }
5669 
5670 
5671 // double calculate_performance_second_derivative(const Vector<double>&, double) const method
5672 
5676 
5677 double PerformanceFunctional::calculate_performance_second_derivative(const Vector<double>& direction, const double& rate) const
5678 {
5679  if(direction == 0.0)
5680  {
5681  return(0.0);
5682  }
5683 
5685  const Vector<double> potential_parameters = parameters + direction*rate;
5686 
5687  const Matrix<double> Hessian = calculate_Hessian(potential_parameters);
5688 
5689  const Vector<double> normalized_direction = direction/direction.calculate_norm();
5690 
5691  return(normalized_direction.dot(Hessian).dot(normalized_direction));
5692 }
5693 
5694 
5695 // tinyxml2::XMLDocument* to_XML(void) const method
5696 
5699 
5700 tinyxml2::XMLDocument* PerformanceFunctional::to_XML(void) const
5701 {
5702  std::ostringstream buffer;
5703 
5704  tinyxml2::XMLDocument* document = new tinyxml2::XMLDocument;
5705 
5706  // Performance functional
5707 
5708  tinyxml2::XMLElement* performance_functional_element = document->NewElement("PerformanceFunctional");
5709 
5710  document->InsertFirstChild(performance_functional_element);
5711 
5712  // Objective
5713 
5714  switch(objective_type)
5715  {
5716  case NO_OBJECTIVE:
5717  {
5718  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5719  performance_functional_element->LinkEndChild(objective_element);
5720 
5721  objective_element->SetAttribute("Type", "NO_OBJECTIVE");
5722  }
5723  break;
5724 
5725  case SUM_SQUARED_ERROR_OBJECTIVE:
5726  {
5727  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5728  performance_functional_element->LinkEndChild(objective_element);
5729 
5730  objective_element->SetAttribute("Type", "SUM_SQUARED_ERROR_OBJECTIVE");
5731 
5732  const tinyxml2::XMLDocument* sum_squared_error_document = sum_squared_error_objective_pointer->to_XML();
5733 
5734  const tinyxml2::XMLElement* sum_squared_error_element = sum_squared_error_document->FirstChildElement("SumSquaredError");
5735 
5736  DeepClone(objective_element, sum_squared_error_element, document, NULL);
5737 
5738  delete sum_squared_error_document;
5739  }
5740  break;
5741 
5742  case MEAN_SQUARED_ERROR_OBJECTIVE:
5743  {
5744  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5745  performance_functional_element->LinkEndChild(objective_element);
5746 
5747  objective_element->SetAttribute("Type", "MEAN_SQUARED_ERROR_OBJECTIVE");
5748 
5749  const tinyxml2::XMLDocument* mean_squared_error_document = mean_squared_error_objective_pointer->to_XML();
5750 
5751  const tinyxml2::XMLElement* mean_squared_error_element = mean_squared_error_document->FirstChildElement("MeanSquaredError");
5752 
5753  DeepClone(objective_element, mean_squared_error_element, document, NULL);
5754 
5755  delete mean_squared_error_document;
5756  }
5757  break;
5758 
5759  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
5760  {
5761  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5762  performance_functional_element->LinkEndChild(objective_element);
5763 
5764  objective_element->SetAttribute("Type", "ROOT_MEAN_SQUARED_ERROR_OBJECTIVE");
5765 
5766  const tinyxml2::XMLDocument* root_mean_squared_error_document = root_mean_squared_error_objective_pointer->to_XML();
5767 
5768  const tinyxml2::XMLElement* root_mean_squared_error_element = root_mean_squared_error_document->FirstChildElement("RootMeanSquaredError");
5769 
5770  DeepClone(objective_element, root_mean_squared_error_element, document, NULL);
5771 
5772  delete root_mean_squared_error_document;
5773  }
5774  break;
5775 
5776  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
5777  {
5778  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5779  performance_functional_element->LinkEndChild(objective_element);
5780 
5781  objective_element->SetAttribute("Type", "NORMALIZED_SQUARED_ERROR_OBJECTIVE");
5782 
5783  const tinyxml2::XMLDocument* normalized_squared_error_document = normalized_squared_error_objective_pointer->to_XML();
5784 
5785  const tinyxml2::XMLElement* normalized_squared_error_element = normalized_squared_error_document->FirstChildElement("NormalizedSquaredError");
5786 
5787  DeepClone(objective_element, normalized_squared_error_element, document, NULL);
5788 
5789  delete normalized_squared_error_document;
5790  }
5791  break;
5792 
5793  case MINKOWSKI_ERROR_OBJECTIVE:
5794  {
5795  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5796  performance_functional_element->LinkEndChild(objective_element);
5797 
5798  objective_element->SetAttribute("Type", "MINKOWSKI_ERROR_OBJECTIVE");
5799 
5800  const tinyxml2::XMLDocument* Minkowski_error_document = Minkowski_error_objective_pointer->to_XML();
5801 
5802  const tinyxml2::XMLElement* Minkowski_error_element = Minkowski_error_document->FirstChildElement("MinkowskiError");
5803 
5804  DeepClone(objective_element, Minkowski_error_element, document, NULL);
5805 
5806  delete Minkowski_error_document;
5807  }
5808  break;
5809 
5810  case CROSS_ENTROPY_ERROR_OBJECTIVE:
5811  {
5812  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5813  performance_functional_element->LinkEndChild(objective_element);
5814 
5815  objective_element->SetAttribute("Type", "CROSS_ENTROPY_ERROR_OBJECTIVE");
5816 
5817  const tinyxml2::XMLDocument* cross_entropy_error_document = cross_entropy_error_objective_pointer->to_XML();
5818 
5819  const tinyxml2::XMLElement* cross_entropy_error_element = cross_entropy_error_document->FirstChildElement("CrossEntropyError");
5820 
5821  DeepClone(objective_element, cross_entropy_error_element, document, NULL);
5822 
5823  delete cross_entropy_error_document;
5824  }
5825  break;
5826 
5827  case OUTPUTS_INTEGRALS_OBJECTIVE:
5828  {
5829  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5830  performance_functional_element->LinkEndChild(objective_element);
5831 
5832  objective_element->SetAttribute("Type", "OUTPUTS_INTEGRALS_OBJECTIVE");
5833 
5834  const tinyxml2::XMLDocument* outputs_integrals_document = outputs_integrals_objective_pointer->to_XML();
5835 
5836  const tinyxml2::XMLElement* outputs_integrals_element = outputs_integrals_document->FirstChildElement("OutputsIntegrals");
5837 
5838  DeepClone(objective_element, outputs_integrals_element, document, NULL);
5839 
5840  delete outputs_integrals_document;
5841  }
5842  break;
5843 
5844  case SOLUTIONS_ERROR_OBJECTIVE:
5845  {
5846  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5847  performance_functional_element->LinkEndChild(objective_element);
5848 
5849  objective_element->SetAttribute("Type", "SOLUTIONS_ERROR_OBJECTIVE");
5850 
5851  const tinyxml2::XMLDocument* solutions_error_document = solutions_error_objective_pointer->to_XML();
5852 
5853  const tinyxml2::XMLElement* solutions_error_element = solutions_error_document->FirstChildElement("SolutionsError");
5854 
5855  DeepClone(objective_element, solutions_error_element, document, NULL);
5856 
5857  delete solutions_error_document;
5858  }
5859  break;
5860 
5861  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
5862  {
5863  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5864  performance_functional_element->LinkEndChild(objective_element);
5865 
5866  objective_element->SetAttribute("Type", "FINAL_SOLUTIONS_ERROR_OBJECTIVE");
5867 
5868  const tinyxml2::XMLDocument* final_solutions_error_document = final_solutions_error_objective_pointer->to_XML();
5869 
5870  const tinyxml2::XMLElement* final_solutions_error_element = final_solutions_error_document->FirstChildElement("FinalSolutionsError");
5871 
5872  DeepClone(objective_element, final_solutions_error_element, document, NULL);
5873 
5874  delete final_solutions_error_document;
5875  }
5876  break;
5877 
5878  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
5879  {
5880  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5881  performance_functional_element->LinkEndChild(objective_element);
5882 
5883  objective_element->SetAttribute("Type", "INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE");
5884 
5885  const tinyxml2::XMLDocument* independent_parameters_error_document = independent_parameters_error_objective_pointer->to_XML();
5886 
5887  const tinyxml2::XMLElement* independent_parameters_error_element = independent_parameters_error_document->FirstChildElement("IndependentParametersError");
5888 
5889  DeepClone(objective_element, independent_parameters_error_element, document, NULL);
5890 
5891  delete independent_parameters_error_document;
5892  }
5893  break;
5894 
5895  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
5896  {
5897  tinyxml2::XMLElement* objective_element = document->NewElement("Objective");
5898  performance_functional_element->LinkEndChild(objective_element);
5899 
5900  objective_element->SetAttribute("Type", "INVERSE_SUM_SQUARED_ERROR_OBJECTIVE");
5901 
5902  const tinyxml2::XMLDocument* inverse_sum_squared_error_document = inverse_sum_squared_error_objective_pointer->to_XML();
5903 
5904  const tinyxml2::XMLElement* inverse_sum_squared_error_element = inverse_sum_squared_error_document->FirstChildElement("InverseSumSquaredError");
5905 
5906  DeepClone(objective_element, inverse_sum_squared_error_element, document, NULL);
5907 
5908  delete inverse_sum_squared_error_document;
5909  }
5910  break;
5911 
5912  case USER_OBJECTIVE:
5913  {
5914  // Do nothing
5915  }
5916  break;
5917 
5918  default:
5919  {
5920  std::ostringstream buffer;
5921 
5922  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5923  << "tinyxml2::XMLDocument* to_XML(void) const method.\n"
5924  << "Unknown objective type.\n";
5925 
5926  throw std::logic_error(buffer.str());
5927  }
5928  break;
5929  }
5930 
5931  // Regularization
5932 
5933  switch(regularization_type)
5934  {
5935  case NO_REGULARIZATION:
5936  {
5937  tinyxml2::XMLElement* regularization_element = document->NewElement("Regularization");
5938  performance_functional_element->LinkEndChild(regularization_element);
5939 
5940  regularization_element->SetAttribute("Type", "NO_REGULARIZATION");
5941  }
5942  break;
5943 
5944  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
5945  {
5946  tinyxml2::XMLElement* regularization_element = document->NewElement("Regularization");
5947  performance_functional_element->LinkEndChild(regularization_element);
5948 
5949  regularization_element->SetAttribute("Type", "NEURAL_PARAMETERS_NORM_REGULARIZATION");
5950 
5951  const tinyxml2::XMLDocument* neural_parameters_norm_document = neural_parameters_norm_regularization_pointer->to_XML();
5952 
5953  const tinyxml2::XMLElement* neural_parameters_norm_element = neural_parameters_norm_document->FirstChildElement("NeuralParametersNorm");
5954 
5955  DeepClone(regularization_element, neural_parameters_norm_element, document, NULL);
5956 
5957  delete neural_parameters_norm_document;
5958  }
5959  break;
5960 
5961  case OUTPUTS_INTEGRALS_REGULARIZATION:
5962  {
5963  tinyxml2::XMLElement* regularization_element = document->NewElement("OUTPUTS_INTEGRALS_REGULARIZATION");
5964  performance_functional_element->LinkEndChild(regularization_element);
5965 
5966  regularization_element->SetAttribute("Type", "NEURAL_PARAMETERS_NORM_REGULARIZATION");
5967 
5968  const tinyxml2::XMLDocument* outputs_integrals_document = outputs_integrals_regularization_pointer->to_XML();
5969 
5970  const tinyxml2::XMLElement* outputs_integrals_element = outputs_integrals_document->FirstChildElement("OutputsIntegrals");
5971 
5972  DeepClone(regularization_element, outputs_integrals_element, document, NULL);
5973 
5974  delete outputs_integrals_document;
5975  }
5976  break;
5977 
5978  case USER_REGULARIZATION:
5979  {
5980  // Do nothing
5981  }
5982  break;
5983 
5984  default:
5985  {
5986  std::ostringstream buffer;
5987 
5988  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
5989  << "tinyxml2::XMLDocument* to_XML(void) const method.\n"
5990  << "Unknown regularization type.\n";
5991 
5992  throw std::logic_error(buffer.str());
5993  }
5994  break;
5995  }
5996 
5997  // Constraints
5998 
5999  switch(constraints_type)
6000  {
6001  case NO_CONSTRAINTS:
6002  {
6003  // Do nothing
6004  }
6005  break;
6006 
6007  case OUTPUTS_INTEGRALS_CONSTRAINTS:
6008  {
6009  tinyxml2::XMLElement* constraints_element = document->NewElement("Constraints");
6010  performance_functional_element->LinkEndChild(constraints_element);
6011  constraints_element->SetAttribute("Type", "OUTPUTS_INTEGRALS_CONSTRAINTS");
6012 
6013  const tinyxml2::XMLDocument* outputs_integrals_document = outputs_integrals_constraints_pointer->to_XML();
6014 
6015  const tinyxml2::XMLElement* outputs_integrals_element = outputs_integrals_document->FirstChildElement("OutputsIntegrals");
6016 
6017  DeepClone(constraints_element, outputs_integrals_element, document, NULL);
6018 
6019  delete outputs_integrals_document;
6020  }
6021  break;
6022 
6023  case SOLUTIONS_ERROR_CONSTRAINTS:
6024  {
6025  tinyxml2::XMLElement* constraints_element = document->NewElement("Constraints");
6026  performance_functional_element->LinkEndChild(constraints_element);
6027 
6028  constraints_element->SetAttribute("Type", "SOLUTIONS_ERROR_CONSTRAINTS");
6029 
6030  const tinyxml2::XMLDocument* solutions_error_document = solutions_error_constraints_pointer->to_XML();
6031 
6032  const tinyxml2::XMLElement* solutions_error_element = solutions_error_document->FirstChildElement("SolutionsError");
6033 
6034  DeepClone(constraints_element, solutions_error_element, document, NULL);
6035 
6036  delete solutions_error_document;
6037  }
6038  break;
6039 
6040  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
6041  {
6042  tinyxml2::XMLElement* constraints_element = document->NewElement("Constraints");
6043  performance_functional_element->LinkEndChild(constraints_element);
6044 
6045  constraints_element->SetAttribute("Type", "FINAL_SOLUTIONS_ERROR_CONSTRAINTS");
6046 
6047  const tinyxml2::XMLDocument* final_solutions_error_document = final_solutions_error_constraints_pointer->to_XML();
6048 
6049  const tinyxml2::XMLElement* final_solutions_error_element = final_solutions_error_document->FirstChildElement("FinalSolutionsError");
6050 
6051  DeepClone(constraints_element, final_solutions_error_element, document, NULL);
6052 
6053  delete final_solutions_error_document;
6054 
6055  }
6056  break;
6057 
6058  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
6059  {
6060  tinyxml2::XMLElement* constraints_element = document->NewElement("Constraints");
6061  performance_functional_element->LinkEndChild(constraints_element);
6062 
6063  constraints_element->SetAttribute("Type", "INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS");
6064 
6065  const tinyxml2::XMLDocument* independent_parameters_error_document = independent_parameters_error_constraints_pointer->to_XML();
6066 
6067  const tinyxml2::XMLElement* independent_parameters_error_element = independent_parameters_error_document->FirstChildElement("FinalSolutionsError");
6068 
6069  DeepClone(constraints_element, independent_parameters_error_element, document, NULL);
6070 
6071  delete independent_parameters_error_document;
6072 
6073  }
6074  break;
6075 
6076  case USER_CONSTRAINTS:
6077  {
6078  // Do nothing
6079  }
6080  break;
6081 
6082  default:
6083  {
6084  std::ostringstream buffer;
6085 
6086  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6087  << "tinyxml2::XMLDocument* to_XML(void) const method.\n"
6088  << "Unknown constraints type.\n";
6089 
6090  throw std::logic_error(buffer.str());
6091  }
6092  break;
6093  }
6094 
6095  // Display
6096 
6097  tinyxml2::XMLElement* display_element = document->NewElement("Display");
6098  performance_functional_element->LinkEndChild(display_element);
6099 
6100  buffer.str("");
6101  buffer << display;
6102 
6103  tinyxml2::XMLText* display_text = document->NewText(buffer.str().c_str());
6104  display_element->LinkEndChild(display_text);
6105 
6106  return(document);
6107 }
6108 
6109 
6110 // void from_XML(const tinyxml2::XMLDocument&) method
6111 
6114 
6115 void PerformanceFunctional::from_XML(const tinyxml2::XMLDocument& document)
6116 {
6117  const tinyxml2::XMLElement* performance_functional_element = document.FirstChildElement("PerformanceFunctional");
6118 
6119  if(!performance_functional_element)
6120  {
6121  std::ostringstream buffer;
6122 
6123  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6124  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
6125  << "Performance functional element is NULL.\n";
6126 
6127  throw std::logic_error(buffer.str());
6128  }
6129 
6130  // Objective type
6131 
6132  const tinyxml2::XMLElement* objective_element = performance_functional_element->FirstChildElement("Objective");
6133 
6134  if(objective_element)
6135  {
6136  const std::string new_objective_type = objective_element->Attribute("Type");
6137 
6138  set_objective_type(new_objective_type);
6139 
6140  switch(objective_type)
6141  {
6142  case NO_OBJECTIVE:
6143  {
6144  // Do nothing
6145  }
6146  break;
6147 
6148  case SUM_SQUARED_ERROR_OBJECTIVE:
6149  {
6150  tinyxml2::XMLDocument new_document;
6151 
6152  tinyxml2::XMLElement* element_clone = new_document.NewElement("SumSquaredError");
6153  new_document.InsertFirstChild(element_clone);
6154 
6155  DeepClone(element_clone, objective_element, &new_document, NULL);
6156 
6158  }
6159  break;
6160 
6161  case MEAN_SQUARED_ERROR_OBJECTIVE:
6162  {
6163  tinyxml2::XMLDocument new_document;
6164 
6165  tinyxml2::XMLElement* element_clone = new_document.NewElement("MeanSquaredError");
6166  new_document.InsertFirstChild(element_clone);
6167 
6168  DeepClone(element_clone, objective_element, &new_document, NULL);
6169 
6171  }
6172  break;
6173 
6174  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
6175  {
6176  tinyxml2::XMLDocument new_document;
6177 
6178  tinyxml2::XMLElement* element_clone = new_document.NewElement("RootMeanSquaredError");
6179  new_document.InsertFirstChild(element_clone);
6180 
6181  DeepClone(element_clone, objective_element, &new_document, NULL);
6182 
6184  }
6185  break;
6186 
6187  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
6188  {
6189  tinyxml2::XMLDocument new_document;
6190 
6191  tinyxml2::XMLElement* element_clone = new_document.NewElement("NormalizedSquaredError");
6192  new_document.InsertFirstChild(element_clone);
6193 
6194  DeepClone(element_clone, objective_element, &new_document, NULL);
6195 
6197  }
6198  break;
6199 
6200  case MINKOWSKI_ERROR_OBJECTIVE:
6201  {
6202  tinyxml2::XMLDocument new_document;
6203 
6204  tinyxml2::XMLElement* element_clone = new_document.NewElement("MinkowskiError");
6205  new_document.InsertFirstChild(element_clone);
6206 
6207  DeepClone(element_clone, objective_element, &new_document, NULL);
6208 
6210  }
6211  break;
6212 
6213  case CROSS_ENTROPY_ERROR_OBJECTIVE:
6214  {
6215  tinyxml2::XMLDocument new_document;
6216 
6217  tinyxml2::XMLElement* element_clone = new_document.NewElement("CrossEntropyError");
6218  new_document.InsertFirstChild(element_clone);
6219 
6220  DeepClone(element_clone, objective_element, &new_document, NULL);
6221 
6223  }
6224  break;
6225 
6226  case OUTPUTS_INTEGRALS_OBJECTIVE:
6227  {
6228  tinyxml2::XMLDocument new_document;
6229 
6230  tinyxml2::XMLElement* element_clone = new_document.NewElement("OutputsIntegralsError");
6231  new_document.InsertFirstChild(element_clone);
6232 
6233  DeepClone(element_clone, objective_element, &new_document, NULL);
6234 
6236  }
6237  break;
6238 
6239  case SOLUTIONS_ERROR_OBJECTIVE:
6240  {
6241  tinyxml2::XMLDocument new_document;
6242 
6243  tinyxml2::XMLElement* element_clone = new_document.NewElement("SolutionsError");
6244  new_document.InsertFirstChild(element_clone);
6245 
6246  DeepClone(element_clone, objective_element, &new_document, NULL);
6247 
6249  }
6250  break;
6251 
6252  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
6253  {
6254  tinyxml2::XMLDocument new_document;
6255 
6256  tinyxml2::XMLElement* element_clone = new_document.NewElement("FinalSolutionsError");
6257  new_document.InsertFirstChild(element_clone);
6258 
6259  DeepClone(element_clone, objective_element, &new_document, NULL);
6260 
6262  }
6263  break;
6264 
6265  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
6266  {
6267  tinyxml2::XMLDocument new_document;
6268 
6269  tinyxml2::XMLElement* element_clone = new_document.NewElement("IndependentParametersError");
6270  new_document.InsertFirstChild(element_clone);
6271 
6272  DeepClone(element_clone, objective_element, &new_document, NULL);
6273 
6275  }
6276  break;
6277 
6278  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
6279  {
6280  tinyxml2::XMLDocument new_document;
6281 
6282  tinyxml2::XMLElement* element_clone = new_document.NewElement("InverseSumSquaredError");
6283  new_document.InsertFirstChild(element_clone);
6284 
6285  DeepClone(element_clone, objective_element, &new_document, NULL);
6286 
6288  }
6289  break;
6290 
6291  case USER_OBJECTIVE:
6292  {
6293  //user_objective_pointer = NULL;
6294  }
6295  break;
6296 
6297  default:
6298  {
6299  std::ostringstream buffer;
6300 
6301  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6302  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
6303  << "Unknown objective type.\n";
6304 
6305  throw std::logic_error(buffer.str());
6306  }
6307  break;
6308  }
6309  }
6310 
6311  // Regularization type
6312 
6313  const tinyxml2::XMLElement* regularization_element = performance_functional_element->FirstChildElement("Regularization");
6314 
6315  if(regularization_element)
6316  {
6317  const std::string new_regularization_type = regularization_element->Attribute("Type");
6318 
6319  set_regularization_type(new_regularization_type);
6320 
6321  switch(regularization_type)
6322  {
6323  case NO_REGULARIZATION:
6324  {
6325  // Do nothing
6326  }
6327  break;
6328 
6329  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
6330  {
6331  tinyxml2::XMLDocument new_document;
6332 
6333  tinyxml2::XMLElement* element_clone = new_document.NewElement("NeuralParametersNorm");
6334  new_document.InsertFirstChild(element_clone);
6335 
6336  DeepClone(element_clone, regularization_element, &new_document, NULL);
6337 
6339  }
6340  break;
6341 
6342  case OUTPUTS_INTEGRALS_REGULARIZATION:
6343  {
6344  tinyxml2::XMLDocument new_document;
6345 
6346  tinyxml2::XMLElement* element_clone = new_document.NewElement("OutputsIntegrals");
6347  new_document.InsertFirstChild(element_clone);
6348 
6349  DeepClone(element_clone, regularization_element, &new_document, NULL);
6350 
6352  }
6353  break;
6354 
6355  case USER_REGULARIZATION:
6356  {
6357  // Do nothing
6358  }
6359  break;
6360 
6361  default:
6362  {
6363  std::ostringstream buffer;
6364 
6365  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6366  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
6367  << "Unknown regularization type.\n";
6368 
6369  throw std::logic_error(buffer.str());
6370  }
6371  break;
6372  }
6373 
6374  // Constraints term type
6375 
6376  const tinyxml2::XMLElement* constraints_element = performance_functional_element->FirstChildElement("Constraints");
6377 
6378  if(constraints_element)
6379  {
6380  const std::string new_constraints_type = constraints_element->Attribute("Type");
6381 
6382  set_constraints_type(new_constraints_type);
6383 
6384  switch(constraints_type)
6385  {
6386  case NO_CONSTRAINTS:
6387  {
6388  // Do nothing
6389  }
6390  break;
6391 
6392  case OUTPUTS_INTEGRALS_CONSTRAINTS:
6393  {
6394  tinyxml2::XMLDocument new_document;
6395 
6396  tinyxml2::XMLElement* element_clone = new_document.NewElement("OutputsIntegrals");
6397  new_document.InsertFirstChild(element_clone);
6398 
6399  DeepClone(element_clone, constraints_element, &new_document, NULL);
6400 
6402  }
6403  break;
6404 
6405  case SOLUTIONS_ERROR_CONSTRAINTS:
6406  {
6407  tinyxml2::XMLDocument new_document;
6408 
6409  tinyxml2::XMLElement* element_clone = new_document.NewElement("SolutionsError");
6410  new_document.InsertFirstChild(element_clone);
6411 
6412  DeepClone(element_clone, constraints_element, &new_document, NULL);
6413 
6415  }
6416  break;
6417 
6418  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
6419  {
6420  tinyxml2::XMLDocument new_document;
6421 
6422  tinyxml2::XMLElement* element_clone = new_document.NewElement("FinalSolutionsError");
6423  new_document.InsertFirstChild(element_clone);
6424 
6425  DeepClone(element_clone, constraints_element, &new_document, NULL);
6426 
6428  }
6429  break;
6430 
6431  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
6432  {
6433  tinyxml2::XMLDocument new_document;
6434 
6435  tinyxml2::XMLElement* element_clone = new_document.NewElement("IndependentParametersError");
6436  new_document.InsertFirstChild(element_clone);
6437 
6438  DeepClone(element_clone, constraints_element, &new_document, NULL);
6439 
6441  }
6442  break;
6443 
6444  case USER_CONSTRAINTS:
6445  {
6446  // Do nothing
6447  }
6448  break;
6449 
6450  default:
6451  {
6452  std::ostringstream buffer;
6453 
6454  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6455  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
6456  << "Unknown constraints type.\n";
6457 
6458  throw std::logic_error(buffer.str());
6459  }
6460  break;
6461  }
6462  }
6463  }
6464 
6465  const tinyxml2::XMLElement* display_element = performance_functional_element->FirstChildElement("Display");
6466 
6467  if(display_element)
6468  {
6469  std::string new_display_string = display_element->GetText();
6470 
6471  try
6472  {
6473  set_display(new_display_string != "0");
6474  }
6475  catch(const std::logic_error& e)
6476  {
6477  std::cout << e.what() << std::endl;
6478  }
6479  }
6480 }
6481 
6482 
6483 // std::string to_string(void) method
6484 
6486 
6487 std::string PerformanceFunctional::to_string(void) const
6488 {
6489  std::ostringstream buffer;
6490 
6491  buffer << "Performance functional\n"
6492  << "Objective type: " << write_objective_type() << "\n";
6493 
6494  // Objective
6495 
6496  switch(objective_type)
6497  {
6498  case NO_OBJECTIVE:
6499  {
6500  // Do nothing
6501  }
6502  break;
6503 
6504  case SUM_SQUARED_ERROR_OBJECTIVE:
6505  {
6507  }
6508  break;
6509 
6510  case MEAN_SQUARED_ERROR_OBJECTIVE:
6511  {
6513  }
6514  break;
6515 
6516  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
6517  {
6519  }
6520  break;
6521 
6522  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
6523  {
6525  }
6526  break;
6527 
6528  case MINKOWSKI_ERROR_OBJECTIVE:
6529  {
6531  }
6532  break;
6533 
6534  case CROSS_ENTROPY_ERROR_OBJECTIVE:
6535  {
6537  }
6538  break;
6539 
6540  case OUTPUTS_INTEGRALS_OBJECTIVE:
6541  {
6543  }
6544  break;
6545 
6546  case SOLUTIONS_ERROR_OBJECTIVE:
6547  {
6549  }
6550  break;
6551 
6552  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
6553  {
6555  }
6556  break;
6557 
6558  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
6559  {
6561  }
6562  break;
6563 
6564  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
6565  {
6567  }
6568  break;
6569 
6570  case USER_OBJECTIVE:
6571  {
6572  buffer << user_objective_pointer->to_string();
6573  }
6574  break;
6575 
6576  default:
6577  {
6578  buffer.str("");
6579 
6580  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6581  << "std::string to_string(void) method.\n"
6582  << "Unknown objective type.\n";
6583 
6584  throw std::logic_error(buffer.str());
6585  }
6586  break;
6587  }
6588 
6589  // Regularization
6590 
6591  buffer << "Regularization type: " << write_regularization_type() << "\n";
6592 
6593  switch(regularization_type)
6594  {
6595  case NO_REGULARIZATION:
6596  {
6597  // Do nothing
6598  }
6599  break;
6600 
6601  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
6602  {
6604  }
6605  break;
6606 
6607  case OUTPUTS_INTEGRALS_REGULARIZATION:
6608  {
6610  }
6611  break;
6612 
6613  case USER_REGULARIZATION:
6614  {
6616  }
6617  break;
6618 
6619  default:
6620  {
6621  buffer.str("");
6622 
6623  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6624  << "std::string to_string(void) method.\n"
6625  << "Unknown regularization type.\n";
6626 
6627  throw std::logic_error(buffer.str());
6628  }
6629  break;
6630  }
6631 
6632  // Constraints
6633 
6634  buffer << "Constraints type: " << write_constraints_type() << "\n";
6635 
6636  switch(constraints_type)
6637  {
6638  case NO_CONSTRAINTS:
6639  {
6640  // Do nothing
6641  }
6642  break;
6643 
6644  case OUTPUTS_INTEGRALS_CONSTRAINTS:
6645  {
6647  }
6648  break;
6649 
6650  case SOLUTIONS_ERROR_CONSTRAINTS:
6651  {
6653  }
6654  break;
6655 
6656  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
6657  {
6659  }
6660  break;
6661 
6662  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
6663  {
6665  }
6666  break;
6667 
6668  case USER_CONSTRAINTS:
6669  {
6670  buffer << user_constraints_pointer->to_string();
6671  }
6672  break;
6673 
6674  default:
6675  {
6676  buffer.str("");
6677 
6678  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6679  << "std::string to_string(void) method.\n"
6680  << "Unknown constraints type.\n";
6681 
6682  throw std::logic_error(buffer.str());
6683  }
6684  break;
6685  }
6686 
6687  buffer << "Display:" << display << "\n";
6688 
6689  return(buffer.str());
6690 }
6691 
6692 
6693 // void save(const std::string&) const method
6694 
6697 
6698 void PerformanceFunctional::save(const std::string& file_name) const
6699 {
6700  tinyxml2::XMLDocument* document = to_XML();
6701 
6702  // Declaration
6703 
6704 // TiXmlDeclaration* declaration = new TiXmlDeclaration("1.0", "", "");
6705 // document->LinkEndChild(declaration);
6706 
6707  // Performance functional
6708 
6709  document->SaveFile(file_name.c_str());
6710 
6711  delete document;
6712 }
6713 
6714 
6715 // void load(const std::string&) method
6716 
6719 
6720 void PerformanceFunctional::load(const std::string& file_name)
6721 {
6722  std::ostringstream buffer;
6723 
6724  tinyxml2::XMLDocument document;
6725 
6726  if(document.LoadFile(file_name.c_str()))
6727  {
6728  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6729  << "void load(const std::string&) method.\n"
6730  << "Cannot load XML file " << file_name << ".\n";
6731 
6732  throw std::logic_error(buffer.str());
6733  }
6734 
6735  from_XML(document);
6736 }
6737 
6738 
6739 // std::string write_information(void) method
6740 
6743 
6745 {
6746  std::ostringstream buffer;
6747 
6748  // Objective
6749 
6750  switch(objective_type)
6751  {
6752  case NO_OBJECTIVE:
6753  {
6754  // Do nothing
6755  }
6756  break;
6757 
6758  case SUM_SQUARED_ERROR_OBJECTIVE:
6759  {
6761  }
6762  break;
6763 
6764  case MEAN_SQUARED_ERROR_OBJECTIVE:
6765  {
6767  }
6768  break;
6769 
6770  case ROOT_MEAN_SQUARED_ERROR_OBJECTIVE:
6771  {
6773  }
6774  break;
6775 
6776  case NORMALIZED_SQUARED_ERROR_OBJECTIVE:
6777  {
6779  }
6780  break;
6781 
6782  case MINKOWSKI_ERROR_OBJECTIVE:
6783  {
6785  }
6786  break;
6787 
6788  case CROSS_ENTROPY_ERROR_OBJECTIVE:
6789  {
6791  }
6792  break;
6793 
6794  case OUTPUTS_INTEGRALS_OBJECTIVE:
6795  {
6797  }
6798  break;
6799 
6800  case SOLUTIONS_ERROR_OBJECTIVE:
6801  {
6803  }
6804  break;
6805 
6806  case FINAL_SOLUTIONS_ERROR_OBJECTIVE:
6807  {
6809  }
6810  break;
6811 
6812  case INDEPENDENT_PARAMETERS_ERROR_OBJECTIVE:
6813  {
6815  }
6816  break;
6817 
6818  case INVERSE_SUM_SQUARED_ERROR_OBJECTIVE:
6819  {
6821  }
6822  break;
6823 
6824  case USER_OBJECTIVE:
6825  {
6827  }
6828  break;
6829 
6830  default:
6831  {
6832  buffer.str("");
6833 
6834  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6835  << "std::string write_information(void) method.\n"
6836  << "Unknown objective type.\n";
6837 
6838  throw std::logic_error(buffer.str());
6839  }
6840  break;
6841  }
6842 
6843  // Regularization
6844 
6845  switch(regularization_type)
6846  {
6847  case NO_REGULARIZATION:
6848  {
6849  // Do nothing
6850  }
6851  break;
6852 
6853  case NEURAL_PARAMETERS_NORM_REGULARIZATION:
6854  {
6856  }
6857  break;
6858 
6859  case OUTPUTS_INTEGRALS_REGULARIZATION:
6860  {
6862  }
6863  break;
6864 
6865  case USER_REGULARIZATION:
6866  {
6868  }
6869  break;
6870 
6871  default:
6872  {
6873  buffer.str("");
6874 
6875  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6876  << "std::string write_information(void) method.\n"
6877  << "Unknown regularization type.\n";
6878 
6879  throw std::logic_error(buffer.str());
6880  }
6881  break;
6882  }
6883 
6884  // Constraints
6885 
6886  switch(constraints_type)
6887  {
6888  case NO_CONSTRAINTS:
6889  {
6890  // Do nothing
6891  }
6892  break;
6893 
6894  case OUTPUTS_INTEGRALS_CONSTRAINTS:
6895  {
6897  }
6898  break;
6899 
6900  case SOLUTIONS_ERROR_CONSTRAINTS:
6901  {
6903  }
6904  break;
6905 
6906  case FINAL_SOLUTIONS_ERROR_CONSTRAINTS:
6907  {
6909  }
6910  break;
6911 
6912  case INDEPENDENT_PARAMETERS_ERROR_CONSTRAINTS:
6913  {
6915  }
6916  break;
6917 
6918  case USER_CONSTRAINTS:
6919  {
6921  }
6922  break;
6923 
6924  default:
6925  {
6926  buffer.str("");
6927 
6928  buffer << "OpenNN Exception: PerformanceFunctional class.\n"
6929  << "std::string write_information(void) method.\n"
6930  << "Unknown constraints type.\n";
6931 
6932  throw std::logic_error(buffer.str());
6933  }
6934  break;
6935  }
6936 
6937  return(buffer.str());
6938 }
6939 
6940 
6941 // void print(void) const method
6942 
6944 
6946 {
6947  std::cout << to_string();
6948 }
6949 
6950 
6951 }
6952 
6953 
6954 // OpenNN: Open Neural Networks Library.
6955 // Copyright (c) 2005-2015 Roberto Lopez.
6956 //
6957 // This library is free software; you can redistribute it and/or
6958 // modify it under the terms of the GNU Lesser General Public
6959 // License as published by the Free Software Foundation; either
6960 // version 2.1 of the License, or any later version.
6961 //
6962 // This library is distributed in the hope that it will be useful,
6963 // but WITHOUT ANY WARRANTY; without even the implied warranty of
6964 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
6965 // Lesser General Public License for more details.
6966 
6967 // You should have received a copy of the GNU Lesser General Public
6968 // License along with this library; if not, write to the Free Software
6969 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
SumSquaredError * get_sum_squared_error_objective_pointer(void) const
Vector< double > calculate_regularization_terms(void) const
std::string write_objective_type_text(void) const
Returns a string in text format with the type of objective term used in the performance functional ex...
virtual double calculate_generalization_performance(void) const
std::string write_objective_type(void) const
Returns a string with the type of objective term used in the performance functional expression...
virtual Vector< double > calculate_vector_dot_Hessian(const Vector< double > &) const
double calculate_generalization_objective(void) const
Returns the evaluation of the objective term on the generalization instances of the associated data s...
double calculate_performance(void) const
Returns the Minkowski error performance.
IndependentParametersError * independent_parameters_error_objective_pointer
Pointer to the independent parameters error object wich can be used as objective term.
size_t count_parameters_number(void) const
Matrix< double > Hessian
Performance function Hessian matrix.
double calculate_performance(void) const
Returns the performance value of a neural network according to the sum squared error on a data set...
Vector< double > calculate_gradient(void) const
PerformanceTerm * get_user_constraints_pointer(void) const
double calculate_generalization_performance(void) const
Returns the sum squared error of the neural network measured on the generalization instances of the d...
OutputsIntegrals * outputs_integrals_constraints_pointer
Pointer to the outputs integrals object wich can be used as constraints term.
Vector< double > calculate_terms(void) const
Matrix< double > calculate_Hessian(void) const
Returns the performance term Hessian.
MinkowskiError * get_Minkowski_error_objective_pointer(void) const
tinyxml2::XMLDocument * to_XML(void) const
Returns a representation of the solutions error object, in XML format.
virtual void save(const std::string &) const
virtual SecondOrderperformance calculate_second_order_performance(void) const
Returns a second order performance structure, which contains the value, the gradient and the Hessian ...
void set_user_objective_pointer(PerformanceTerm *)
Vector< double > calculate_gradient(void) const
Returns the performance function gradient, as the sum of the objective and the regularization gradien...
void destruct_all_terms(void)
This method destructs the objective, regularization and constraints terms.
double performance
Performance function performance.
NeuralParametersNorm * get_neural_parameters_norm_regularization_pointer(void) const
Matrix< double > calculate_Hessian(void) const
tinyxml2::XMLDocument * to_XML(void) const
Returns a representation of the sum squared error object, in XML format.
void print(void) const
Print the members of this object to the standard output.
double calculate_generalization_performance(void) const
Returns the root mean squared error of the multilayer perceptron measured on the generalization insta...
Vector< double > arrange_parameters(void) const
const bool & get_display(void) const
const RegularizationType & get_regularization_type(void) const
Returns the type of regularization term used in the performance functional expression.
bool display
Display messages to screen.
double calculate_generalization_regularization(void) const
Returns the evaluation of the regularization term on the generalization instances of the associated d...
IndependentParametersError * independent_parameters_error_constraints_pointer
Pointer to the independent parameters error object wich can be used as constraints term...
double calculate_performance(void) const
Returns the performance value of the performance term.
OutputsIntegrals * outputs_integrals_objective_pointer
Pointer to the outputs integrals object wich can be used as objective term.
Vector< double > calculate_gradient(void) const
double calculate_performance_second_derivative(const Vector< double > &, const double &) const
virtual Vector< double > calculate_gradient(void) const
Returns the performance term gradient.
MathematicalModel * mathematical_model_pointer
Pointer to a mathematical model object.
size_t count_training_instances_number(void) const
Returns the number of instances in the data set which will be used for training.
Definition: instances.cpp:387
Vector< double > calculate_gradient(void) const
std::string write_constraints_type_text(void) const
Returns a string in text format with the type of constraints term used in the performance functional ...
double calculate_generalization_performance(void) const
Returns an performance of the performance term for generalization purposes.
tinyxml2::XMLDocument * to_XML(void) const
void from_XML(const tinyxml2::XMLDocument &)
RegularizationType regularization_type
Type of regularization term.
MinkowskiError * Minkowski_error_objective_pointer
Pointer to the Mikowski error object wich can be used as objective term.
virtual void set_data_set_pointer(DataSet *)
Sets a new data set on which the performance term is to be measured.
virtual Matrix< double > calculate_inverse_Hessian(void) const
ObjectiveType
Enumeration of available objective types in OpenNN.
void from_XML(const tinyxml2::XMLDocument &)
Vector< double > calculate_terms(void) const
tinyxml2::XMLDocument * to_XML(void) const
PerformanceTerm * get_user_objective_pointer(void) const
PerformanceTerm * user_objective_pointer
Pointer to the user performance term object wich can be used as objective.
IndependentParametersError * get_independent_parameters_error_constraints_pointer(void) const
FinalSolutionsError * final_solutions_error_objective_pointer
Pointer to the final solutions error object wich can be used as objective term.
NormalizedSquaredError * normalized_squared_error_objective_pointer
Pointer to the normalized squared error object wich can be used as objective term.
tinyxml2::XMLDocument * to_XML(void) const
void set_regularization_type(const RegularizationType &)
Matrix< double > calculate_constraints_terms_Jacobian(void) const
virtual tinyxml2::XMLDocument * to_XML(void) const
Vector< double > calculate_gradient(void) const
tinyxml2::XMLDocument * to_XML(void) const
Vector< double > calculate_gradient(void) const
ObjectiveType objective_type
Type of objective term.
Matrix< double > calculate_objective_terms_Jacobian(void) const
double calculate_generalization_performance(void) const
void from_XML(const tinyxml2::XMLDocument &)
virtual std::string to_string(void) const
Returns the default string representation of a performance term.
virtual double calculate_performance(void) const =0
Returns the performance value of the performance term.
Vector< double > calculate_gradient(void) const
Matrix< double > calculate_Hessian(void) const
void from_XML(const tinyxml2::XMLDocument &)
NeuralParametersNorm * neural_parameters_norm_regularization_pointer
Pointer to the neural parameters norm object wich can be used as regularization term.
Vector< T > assemble(const Vector< T > &) const
Definition: vector.h:5110
void set_objective_type(const ObjectiveType &)
Matrix< double > calculate_terms_Jacobian(void) const
ConstraintsType
Enumeration of available constraints types in OpenNN.
Matrix< double > calculate_regularization_terms_Jacobian(void) const
double performance
Performance function performance.
FinalSolutionsError * final_solutions_error_constraints_pointer
Pointer to the final solutions error object wich can be used as constraints term. ...
FinalSolutionsError * get_final_solutions_error_objective_pointer(void) const
void set_neural_network_pointer(NeuralNetwork *)
OutputsIntegrals * get_outputs_integrals_regularization_pointer(void) const
PerformanceTerm * user_constraints_pointer
Pointer to a user performance term to represent the contraint.
Matrix< double > calculate_Hessian(void) const
virtual void set_neural_network_pointer(NeuralNetwork *)
virtual double calculate_generalization_performance(void) const
Returns an performance of the performance term for generalization purposes.
PerformanceTerm * user_regularization_pointer
Pointer to a user performance term to be used for regularization.
Vector< double > calculate_regularization_gradient(void) const
virtual Matrix< double > calculate_terms_Jacobian(void) const
Returns the Jacobian matrix of the subterms composing the performance term.
virtual std::string to_string(void) const
Writes to a string the members of the performance functional object in text format.
std::string write_information(void) const
size_t count_generalization_instances_number(void) const
Returns the number of instances in the data set which will be used for generalization.
Definition: instances.cpp:409
std::string write_information(void) const
virtual std::string write_information(void)
double calculate_performance(void) const
Returns the dot product between the independent parameters vector and its targets vector...
Matrix< double > calculate_Hessian(void) const
Matrix< double > calculate_terms_Jacobian(void) const
virtual Vector< double > calculate_terms(void) const
Returns the performance of all the subterms composing the performance term.
double calculate_generalization_constraints(void) const
Returns the evaluation of the constraints term on the generalization instances of the associated data...
void from_XML(const tinyxml2::XMLDocument &)
double calculate_norm(void) const
Returns the vector norm.
Definition: vector.h:2358
SolutionsError * solutions_error_constraints_pointer
Pointer to the solutions error object wich can be used as constraints term.
Matrix< double > calculate_Hessian(void) const
virtual Matrix< double > calculate_Hessian(void) const
Returns the performance term Hessian.
const ConstraintsType & get_constraints_type(void) const
Returns the type of constraints term used in the performance functional expression.
RootMeanSquaredError * get_root_mean_squared_error_objective_pointer(void) const
virtual ZeroOrderperformance calculate_zero_order_performance(void) const
Returns a zero order performance structure, which just contains the performance value of the performa...
NormalizedSquaredError * get_normalized_squared_error_objective_pointer(void) const
RootMeanSquaredError * root_mean_squared_error_objective_pointer
Pointer to the root mean squared error object wich can be used as objective term. ...
Matrix< double > calculate_terms_Jacobian(void) const
void set_mathematical_model_pointer(MathematicalModel *)
Vector< double > gradient
Performance function gradient vector.
double calculate_performance_derivative(const Vector< double > &, const double &) const
std::string write_regularization_type(void) const
Returns a string with the type of regularization term used in the performance functional expression...
double calculate_generalization_performance(void) const
std::string write_constraints_type(void) const
Returns a string with the type of constraints term used in the performance functional expression...
double performance
Performance function performance.
tinyxml2::XMLDocument * to_XML(void) const
Returns a representation of the sum squared error object, in XML format.
Vector< double > calculate_objective_terms(void) const
double calculate_second_order_Taylor_approximation(const Vector< double > &) const
double calculate_performance(void) const
Returns the mean squared error of a neural network on a data set.
SolutionsError * get_solutions_error_objective_pointer(void) const
Vector< double > calculate_gradient(void) const
Calculates the gradient the root mean squared error funcion by means of the back-propagation algorith...
virtual void set_mathematical_model_pointer(MathematicalModel *)
Sets a new mathematical model on which the performance term is to be measured.
Matrix< double > calculate_Hessian(void) const
double calculate_generalization_performance(void) const
virtual std::string write_information(void) const
SumSquaredError * sum_squared_error_objective_pointer
Pointer to the sum squared error object wich can be used as objective term.
Vector< double > calculate_objective_gradient(void) const
void set_user_regularization_pointer(PerformanceTerm *)
virtual double calculate_performance(void) const
Returns the objective value of a neural network according to the solutions error on a mathematical mo...
FinalSolutionsError * get_final_solutions_error_constraints_pointer(void) const
InverseSumSquaredError * get_inverse_sum_squared_error_objective_pointer(void) const
double dot(const Vector< double > &) const
Definition: vector.h:3654
Vector< double > calculate_constraints_gradient(void) const
Vector< double > gradient
Performance function gradient vector.
double calculate_performance(void) const
Returns the performance value of a neural network according to the normalized squared error on a data...
NeuralNetwork * neural_network_pointer
Pointer to a neural network object.
tinyxml2::XMLDocument * to_XML(void) const
void set_mathematical_model_pointer(MathematicalModel *)
OutputsIntegrals * get_outputs_integrals_objective_pointer(void) const
void from_XML(const tinyxml2::XMLDocument &)
Vector< double > calculate_gradient(void) const
Returns the performance term gradient.
tinyxml2::XMLDocument * to_XML(void) const
void from_XML(const tinyxml2::XMLDocument &)
tinyxml2::XMLDocument * to_XML(void) const
Returns a representation of the sum squared error object, in XML format.
void check_neural_network(void) const
Throws an exception if no neural network is associated to the performance functional.
virtual void load(const std::string &)
OutputsIntegrals * get_outputs_integrals_constraints_pointer(void) const
Matrix< double > calculate_regularization_Hessian(void) const
Vector< double > calculate_constraints_terms(void) const
double calculate_performance(void) const
IndependentParametersError * get_independent_parameters_error_objective_pointer(void) const
ConstraintsType constraints_type
Type of constraints term.
SolutionsError * get_solutions_error_constraints_pointer(void) const
double calculate_performance(void) const
virtual void from_XML(const tinyxml2::XMLDocument &)
double calculate_zero_order_Taylor_approximation(const Vector< double > &) const
const ObjectiveType & get_objective_type(void) const
Returns the type of objective term used in the performance functional expression. ...
MeanSquaredError * get_mean_squared_error_objective_pointer(void) const
tinyxml2::XMLDocument * to_XML(void) const
Returns a representation of the sum squared error object, in XML format.
Matrix< double > calculate_Hessian(void) const
double calculate_generalization_performance(void) const
CrossEntropyError * cross_entropy_error_objective_pointer
Pointer to the cross entropy error object wich can be used as objective term.
virtual FirstOrderperformance calculate_first_order_performance(void) const
Returns a first order performance structure, which contains the value and the gradient of the perform...
Vector< double > calculate_terms(void) const
Calculates the squared error terms for each instance, and returns it in a vector of size the number t...
PerformanceTerm * get_user_regularization_pointer(void) const
Matrix< double > calculate_Hessian(void) const
Matrix< double > calculate_constraints_Hessian(void) const
Matrix< double > calculate_Hessian(void) const
InverseSumSquaredError * inverse_sum_squared_error_objective_pointer
Pointer to the inverse sum squared error object wich can be used as objective term.
CrossEntropyError * get_cross_entropy_error_objective_pointer(void) const
DataSet * data_set_pointer
Pointer to a data set object.
tinyxml2::XMLDocument * to_XML(void) const
std::string write_information(void) const
double calculate_first_order_Taylor_approximation(const Vector< double > &) const
void set_constraints_type(const ConstraintsType &)
void set_user_constraints_pointer(PerformanceTerm *)
Vector< double > calculate_gradient(void) const
MeanSquaredError * mean_squared_error_objective_pointer
Pointer to the mean squared error object wich can be used as objective term.
void set_default(void)
Sets the members of the performance functional object to their default values.
Matrix< double > calculate_terms_Jacobian(void) const
OutputsIntegrals * outputs_integrals_regularization_pointer
Pointer to the sum outputs integrals object wich can be used as regularization term.
Vector< double > calculate_terms(void) const
virtual void from_XML(const tinyxml2::XMLDocument &)
RegularizationType
Enumeration of available regularization types in OpenNN.
void from_XML(const tinyxml2::XMLDocument &)
Matrix< double > calculate_objective_Hessian(void) const
SolutionsError * solutions_error_objective_pointer
Pointer to the solutions error object wich can be used as objective term.
void from_XML(const tinyxml2::XMLDocument &)
std::string write_regularization_type_text(void) const
Returns a string in text format with the type of regularization term used in the performance function...
void from_XML(const tinyxml2::XMLDocument &)
void from_XML(const tinyxml2::XMLDocument &)
const Instances & get_instances(void) const
Returns a constant reference to the instances object composing this data set object.
Definition: data_set.cpp:222