OpenNN  2.2
Open Neural Networks Library
scaling_layer.cpp
1 /****************************************************************************************************************/
2 /* */
3 /* OpenNN: Open Neural Networks Library */
4 /* www.artelnics.com/opennn */
5 /* */
6 /* S C A L I N G L A Y E R C L A S S */
7 /* */
8 /* Roberto Lopez */
9 /* Artelnics - Making intelligent use of data */
11 /* */
12 /****************************************************************************************************************/
13 
14 // OpenNN includes
15 
16 #include "scaling_layer.h"
17 
18 namespace OpenNN
19 {
20 
21 // DEFAULT CONSTRUCTOR
22 
25 
27 {
28  set();
29 }
30 
31 
32 // SCALING NEURONS NUMBER CONSTRUCTOR
33 
38 
39 ScalingLayer::ScalingLayer(const size_t& new_scaling_neurons_number)
40 {
41  set(new_scaling_neurons_number);
42 }
43 
44 
45 // STATISTICS CONSTRUCTOR
46 
51 
53 {
54  set(new_statistics);
55 }
56 
57 
58 // COPY CONSTRUCTOR
59 
61 
62 ScalingLayer::ScalingLayer(const ScalingLayer& new_scaling_layer)
63 {
64  set(new_scaling_layer);
65 }
66 
67 
68 // DESTRUCTOR
69 
71 
73 {
74 }
75 
76 
77 // ASSIGNMENT OPERATOR
78 
79 // ScalingLayer& operator = (const ScalingLayer&) method
80 
83 
85 {
86  if(this != &other_scaling_layer)
87  {
88  statistics = other_scaling_layer.statistics;
89 
90  scaling_method = other_scaling_layer.scaling_method;
91 
92  display = other_scaling_layer.display;
93  }
94 
95  return(*this);
96 }
97 
98 
99 // EQUAL TO OPERATOR
100 
101 // bool operator == (const ScalingLayer&) const method
102 
106 
107 bool ScalingLayer::operator == (const ScalingLayer& other_scaling_layer) const
108 {
109  if(/*statistics == other_scaling_layer.statistics
110  &&*/ scaling_method == other_scaling_layer.scaling_method
111  && display == other_scaling_layer.display)
112  {
113  return(true);
114  }
115  else
116  {
117  return(false);
118  }
119 }
120 
121 
122 // size_t get_scaling_neurons_number(void) const method
123 
125 
127 {
128  return(statistics.size());
129 }
130 
131 
132 // Vector< Statistics<double> > get_statistics(void) const method
133 
136 
138 {
139  return(statistics);
140 }
141 
142 
143 // Statistics<double> get_statistics(const size_t&) const method
144 
147 
149 {
150  return(statistics[index]);
151 }
152 
153 
154 // Matrix<double> arrange_statistics(void) const method
155 
159 
161 {
162  const size_t scaling_neurons_number = get_scaling_neurons_number();
163 
164  Matrix<double> statistics_matrix(scaling_neurons_number, 4);
165 
166  for(size_t i = 0; i < scaling_neurons_number; i++)
167  {
168  statistics_matrix.set_row(i, statistics[i].to_vector());
169  }
170 
171  return(statistics_matrix);
172 }
173 
174 
175 // Vector<double> arrange_means(void) const method
176 
178 
180 {
181  const size_t scaling_neurons_number = get_scaling_neurons_number();
182 
183  Vector<double> means(scaling_neurons_number);
184 
185  for(size_t i = 0; i < scaling_neurons_number; i++)
186  {
187  means[i] = statistics[i].mean;
188  }
189 
190  return(means);
191 }
192 
193 
194 // Vector<double> arrange_standard_deviations(void) const method
195 
197 
199 {
200  const size_t scaling_neurons_number = get_scaling_neurons_number();
201 
202  Vector<double> standard_deviations(scaling_neurons_number);
203 
204  for(size_t i = 0; i < scaling_neurons_number; i++)
205  {
206  standard_deviations[i] = statistics[i].standard_deviation;
207  }
208 
209  return(standard_deviations);
210 }
211 
212 
213 // const Method& get_scaling_method(void) const method
214 
216 
218 {
219  return(scaling_method);
220 }
221 
222 
223 // std::string write_scaling_method(void) const method
224 
226 
227 std::string ScalingLayer::write_scaling_method(void) const
228 {
229  if(scaling_method == MeanStandardDeviation)
230  {
231  return("MeanStandardDeviation");
232  }
233  else if(scaling_method == MinimumMaximum)
234  {
235  return("MinimumMaximum");
236  }
237  if(scaling_method == NoScaling)
238  {
239  return("NoScaling");
240  }
241  else
242  {
243  std::ostringstream buffer;
244 
245  buffer << "OpenNN Exception: ScalingLayer class.\n"
246  << "std::string write_scaling_method(void) const method.\n"
247  << "Unknown scaling method.\n";
248 
249  throw std::logic_error(buffer.str());
250  }
251 }
252 
253 
254 // std::string write_scaling_method_text(void) const method
255 
258 
260 {
261  if(scaling_method == NoScaling)
262  {
263  return("no scaling");
264  }
265  else if(scaling_method == MeanStandardDeviation)
266  {
267  return("mean and standard deviation");
268  }
269  else if(scaling_method == MinimumMaximum)
270  {
271  return("minimum and maximum");
272  }
273  else
274  {
275  std::ostringstream buffer;
276 
277  buffer << "OpenNN Exception: ScalingLayer class.\n"
278  << "std::string write_scaling_method_text(void) const method.\n"
279  << "Unknown scaling method.\n";
280 
281  throw std::logic_error(buffer.str());
282  }
283 }
284 
285 
286 // const bool& get_display(void) const method
287 
290 
291 const bool& ScalingLayer::get_display(void) const
292 {
293  return(display);
294 }
295 
296 
297 // void set(void) method
298 
300 
302 {
303  statistics.set();
304 
305  set_default();
306 }
307 
308 
309 // void set(const size_t&) method
310 
313 
314 void ScalingLayer::set(const size_t& new_inputs_number)
315 {
316  statistics.set(new_inputs_number);
317 
318  set_default();
319 }
320 
321 
322 // void set(const Vector< Vector<double> >&) method
323 
328 
329 void ScalingLayer::set(const Vector< Statistics<double> >& new_statistics)
330 {
331  statistics = new_statistics;
332 
333  set_default();
334 }
335 
336 
337 // void set(const tinyxml2::XMLDocument&) method
338 
341 
342 void ScalingLayer::set(const tinyxml2::XMLDocument& new_scaling_layer_document)
343 {
344  set_default();
345 
346  from_XML(new_scaling_layer_document);
347 }
348 
349 
350 // void set(const ScalingLayer&) method
351 
354 
355 void ScalingLayer::set(const ScalingLayer& new_scaling_layer)
356 {
357  statistics = new_scaling_layer.statistics;
358 
359  scaling_method = new_scaling_layer.scaling_method;
360 
361  display = new_scaling_layer.display;
362 }
363 
364 
365 // void set_default(void) method
366 
376 
378 {
379 // minimums.initialize(-1.0);
380 // maximums.initialize(1.0);
381 // means.initialize(0.0);
382 // standard_deviations.initialize(1.0);
383 
384  set_scaling_method(MinimumMaximum);
385 
386  set_display(true);
387 }
388 
389 
390 // void set_statistics(const Vector< Statisitcs<double> >&) method
391 
395 
397 {
398  // Control sentence (if debug)
399 
400  #ifndef NDEBUG
401 
402  const size_t new_statistics_size = new_statistics.size();
403 
404  const size_t scaling_neurons_number = get_scaling_neurons_number();
405 
406  if(new_statistics_size != scaling_neurons_number)
407  {
408  std::ostringstream buffer;
409 
410  buffer << "OpenNN Exception: ScalingLayer class.\n"
411  << "void set_statistics(const Vector< Statistics<double> >&) method.\n"
412  << "Size of statistics is not equal to number of scaling neurons.\n";
413 
414  throw std::logic_error(buffer.str());
415  }
416 
417  #endif
418 
419  // Set all statistics
420 
421  statistics = new_statistics;
422 }
423 
424 
425 // void set_item_statistics(const size_t&, const Statistics<double>&) method
426 
430 
431 void ScalingLayer::set_item_statistics(const size_t& i, const Statistics<double>& item_statistics)
432 {
433  statistics[i] = item_statistics;
434 }
435 
436 
437 // void set_minimum(const size_t&, const double&) method
438 
442 
443 void ScalingLayer::set_minimum(const size_t& i, const double& new_minimum)
444 {
445  statistics[i].set_minimum(new_minimum);
446 }
447 
448 
449 // void set_maximum(const size_t&, const double&) method
450 
454 
455 void ScalingLayer::set_maximum(const size_t& i, const double& new_maximum)
456 {
457  statistics[i].set_maximum(new_maximum);
458 }
459 
460 
461 // void set_mean(const size_t&, const double&) method
462 
466 
467 void ScalingLayer::set_mean(const size_t& i, const double& new_mean)
468 {
469  statistics[i].set_mean(new_mean);
470 }
471 
472 
473 // void set_standard_deviation(const size_t&, const double&) method
474 
478 
479 void ScalingLayer::set_standard_deviation(const size_t& i, const double& new_standard_deviation)
480 {
481  statistics[i].set_standard_deviation(new_standard_deviation);
482 }
483 
484 
485 // void set_scaling_method(const ScalingMethod&)
486 
489 
491 {
492  scaling_method = new_scaling_method;
493 }
494 
495 
496 // void set_scaling_method(const std::string&) method
497 
501 
502 void ScalingLayer::set_scaling_method(const std::string& new_scaling_method)
503 {
504  if(new_scaling_method == "NoScaling")
505  {
506  set_scaling_method(NoScaling);
507  }
508  else if(new_scaling_method == "MeanStandardDeviation")
509  {
510  set_scaling_method(MeanStandardDeviation);
511  }
512  else if(new_scaling_method == "MinimumMaximum")
513  {
514  set_scaling_method(MinimumMaximum);
515  }
516  else
517  {
518  std::ostringstream buffer;
519 
520  buffer << "OpenNN Exception: ScalingLayer class.\n"
521  << "void set_scaling_method(const std::string&) method.\n"
522  << "Unknown scaling method: " << new_scaling_method << ".\n";
523 
524  throw std::logic_error(buffer.str());
525  }
526 }
527 
528 
529 // void set_display(const bool&) method
530 
535 
536 void ScalingLayer::set_display(const bool& new_display)
537 {
538  display = new_display;
539 }
540 
541 
542 // void prune_scaling_neuron(const size_t&) method
543 
546 
547 void ScalingLayer::prune_scaling_neuron(const size_t& index)
548 {
549  // Control sentence (if debug)
550 
551  #ifndef NDEBUG
552 
553  const size_t scaling_neurons_number = get_scaling_neurons_number();
554 
555  if(index >= scaling_neurons_number)
556  {
557  std::ostringstream buffer;
558 
559  buffer << "OpenNN Exception: ScalingLayer class.\n"
560  << "void prune_scaling_neuron(const size_t&) method.\n"
561  << "Index of scaling neuron is equal or greater than number of scaling neurons.\n";
562 
563  throw std::logic_error(buffer.str());
564  }
565 
566  #endif
567 
568  statistics.erase(statistics.begin() + index);
569 }
570 
571 
572 // bool is_empty(void) const method
573 
575 
576 bool ScalingLayer::is_empty(void) const
577 {
578  const size_t inputs_number = get_scaling_neurons_number();
579 
580  if(inputs_number == 0)
581  {
582  return(true);
583  }
584  else
585  {
586  return(false);
587  }
588 }
589 
590 
591 // void check_range(const Vector<double>&) const method
592 
598 
599 void ScalingLayer::check_range(const Vector<double>& inputs) const
600 {
601  const size_t inputs_number = get_scaling_neurons_number();
602 
603  // Control sentence (if debug)
604 
605  #ifndef NDEBUG
606 
607  const size_t size = inputs.size();
608 
609  if(size != inputs_number)
610  {
611  std::ostringstream buffer;
612 
613  buffer << "OpenNN Exception: ScalingLayer class.\n"
614  << "void check_range(const Vector<double>&) const method.\n"
615  << "Size of inputs must be equal to number of inputs.\n";
616 
617  throw std::logic_error(buffer.str());
618  }
619 
620  #endif
621 
622  // Check inputs
623 
624  if(display)
625  {
626  for(size_t i = 0; i < inputs_number; i++)
627  {
628  if(inputs[i] < statistics[i].minimum)
629  {
630  std::cout << "OpenNN Warning: ScalingLayer class.\n"
631  << "void check_range(const Vector<double>&) const method.\n"
632  << "Input value " << i << " is less than corresponding minimum.\n";
633  }
634 
635  if(inputs[i] > statistics[i].maximum)
636  {
637  std::cout << "OpenNN Warning: ScalingLayer class.\n"
638  << "void check_range(const Vector<double>&) const method.\n"
639  << "Input value " << i << " is greater than corresponding maximum.\n";
640  }
641  }
642  }
643 }
644 
645 
646 // void initialize_random(void) method
647 
650 
652 {
653  const size_t scaling_neurons_number = get_scaling_neurons_number();
654 
655  // Statistics
656 
657  for(size_t i = 0; i < scaling_neurons_number; i++)
658  {
659  statistics[i].initialize_random();
660  }
661 
662  // Unscaling method
663 
664  switch(rand()%2)
665  {
666  case 0:
667  {
668  scaling_method = MinimumMaximum;
669  }
670  break;
671 
672  case 1:
673  {
674  scaling_method = MeanStandardDeviation;
675  }
676  break;
677 
678  default:
679  {
680  std::ostringstream buffer;
681 
682  buffer << "OpenNN Exception: ScalingLayer class.\n"
683  << "void initialize_random(void) method.\n"
684  << "Unknown scaling method.\n";
685 
686  throw std::logic_error(buffer.str());
687  }
688  break;
689  }
690 }
691 
692 
693 // Vector<double> calculate_outputs(const Vector<double>&) const method
694 
697 
699 {
700  // Control sentence (if debug)
701 
702  #ifndef NDEBUG
703 
704  std::ostringstream buffer;
705 
706  const size_t inputs_number = get_scaling_neurons_number();
707 
708  const size_t size = inputs.size();
709 
710  if(size != inputs_number)
711  {
712  buffer << "OpenNN Exception: ScalingLayer class.\n"
713  << "Vector<double> calculate_outputs(const Vector<double>&) const method.\n"
714  << "Size of inputs must be equal to number of scaling neurons.\n";
715 
716  throw std::logic_error(buffer.str());
717  }
718 
719  #endif
720 
721  switch(scaling_method)
722  {
723  case MinimumMaximum:
724  {
725  return(calculate_minimum_maximum_outputs(inputs));
726  }
727  break;
728 
729  case MeanStandardDeviation:
730  {
732  }
733  break;
734 
735  case NoScaling:
736  {
737  return(inputs);
738  }
739  break;
740 
741  default:
742  {
743  std::ostringstream buffer;
744 
745  buffer << "OpenNN Exception: ScalingLayer class\n"
746  << "Vector<double> calculate_outputs(const Vector<double>&) const method.\n"
747  << "Unknown scaling and unscaling method.\n";
748 
749  throw std::logic_error(buffer.str());
750  }
751  break;
752  }
753 
754  // Never reach here
755 
756  return(inputs);
757 }
758 
759 
760 // Vector<double> calculate_derivatives(const Vector<double>&) const method
761 
764 
766 {
767  switch(scaling_method)
768  {
769  case MinimumMaximum:
770  {
772  }
773  break;
774 
775  case MeanStandardDeviation:
776  {
778  }
779  break;
780 
781  default:
782  {
783  std::ostringstream buffer;
784 
785  buffer << "OpenNN Exception: ScalingLayer class.\n"
786  << "Vector<double> calculate_derivatives(const Vector<double>&) const method.\n"
787  << "Unknown scaling and unscaling method.\n";
788 
789  throw std::logic_error(buffer.str());
790  }
791  break;
792  }
793 }
794 
795 
796 // Vector<double> calculate_second_derivatives(const Vector<double>&) const method
797 
800 
802 {
803  switch(scaling_method)
804  {
805  case MinimumMaximum:
806  {
808  }// end minimums and maximums
809  break;
810 
811  case MeanStandardDeviation:
812  {
814  }// end means and standard deviation
815  break;
816 
817  default:
818  {
819  std::ostringstream buffer;
820 
821  buffer << "OpenNN Exception: ScalingLayer class.\n"
822  << "Vector<double> calculate_second_derivatives(const Vector<double>&) const method.\n"
823  << "Unknown scaling and unscaling method.\n";
824 
825  throw std::logic_error(buffer.str());
826  }// end default
827  break;
828 
829  }// end switch
830 }
831 
832 
833 // Vector<double> calculate_minimum_maximum_outputs(const Vector<double>&) const method
834 
837 
839 {
840  const size_t scaling_neurons_number = get_scaling_neurons_number();
841 
842  Vector<double> outputs(scaling_neurons_number);
843 
844  for(size_t i = 0; i < scaling_neurons_number; i++)
845  {
846  if(statistics[i].maximum-statistics[i].minimum < 1e-99)
847  {
848  if(display)
849  {
850  std::cout << "OpenNN Warning: ScalingLayer class\n"
851  << "Vector<double> calculate_minimum_maximum_outputs(Vector<double>&) const method.\n"
852  << "Minimum and maximum values of variable " << i << " are equal.\n"
853  << "Those inputs won't be scaled.\n";
854  }
855 
856  outputs[i] = inputs[i];
857  }
858  else
859  {
860  outputs[i] = 2.0*(inputs[i] - statistics[i].minimum)/(statistics[i].maximum-statistics[i].minimum) - 1.0;
861  }
862  }
863 
864  return(outputs);
865 }
866 
867 
868 // Vector<double> calculate_minimum_maximum_derivatives(const Vector<double>&) const method
869 
872 
874 {
875  const size_t scaling_neurons_number = get_scaling_neurons_number();
876 
877  Vector<double> scaled_derivative(scaling_neurons_number);
878 
879  for(size_t i = 0; i < scaling_neurons_number; i++)
880  {
881  if(statistics[i].maximum-statistics[i].minimum < 1e-99)
882  {
883  if(display)
884  {
885  std::cout << "OpenNN Warning: ScalingLayer class.\n"
886  << "Vector<double> calculate_minimum_maximum_derivatives(const Vector<double>&) const method.\n"
887  << "Minimum and maximum values of variable " << i << " are equal.\n"
888  << "That inputs is not scaled.\n";
889  }
890 
891  scaled_derivative[i] = 1.0;
892  }
893  else
894  {
895  scaled_derivative[i] = 2.0/(statistics[i].maximum-statistics[i].minimum);
896  }
897  }
898 
899  return(scaled_derivative);
900 }
901 
902 
903 // Vector<double> calculate_minimum_maximum_second_derivatives(const Vector<double>&) const method
904 
907 
909 {
910  const size_t scaling_neurons_number = get_scaling_neurons_number();
911 
912  const Vector<double> scaled_second_derivative(scaling_neurons_number, 0.0);
913 
914  return(scaled_second_derivative);
915 }
916 
917 
918 // Vector<double> calculate_mean_standard_deviation_outputs(const Vector<double>&) const method
919 
922 
924 {
925  const size_t scaling_neurons_number = get_scaling_neurons_number();
926 
927  Vector<double> outputs(scaling_neurons_number);
928 
929  for(size_t i = 0; i < scaling_neurons_number; i++)
930  {
931  if(statistics[i].standard_deviation < 1e-99)
932  {
933  if(display)
934  {
935  std::cout << "OpenNN Warning: ScalingLayer class.\n"
936  << "Vector<double> calculate_mean_standard_deviation_outputs(const Vector<double>&) const method.\n"
937  << "Standard deviation of variable " << i << " is zero.\n"
938  << "Those variables won't be scaled.\n";
939  }
940 
941  outputs[i] = inputs[i];
942  }
943  else
944  {
945  outputs[i] = (inputs[i] - statistics[i].mean)/statistics[i].standard_deviation;
946  }
947  }
948 
949  return(outputs);
950 }
951 
952 
953 // Vector<double> calculate_mean_standard_deviation_derivatives(const Vector<double>&) const method
954 
957 
959 {
960  const size_t scaling_neurons_number = get_scaling_neurons_number();
961 
962  Vector<double> derivative(scaling_neurons_number);
963 
964  for(size_t i = 0; i < scaling_neurons_number; i++)
965  {
966  if(statistics[i].standard_deviation < 1e-99)
967  {
968  if(display)
969  {
970  std::cout << "OpenNN Warning: ScalingLayer class.\n"
971  << "Vector<double> calculate_mean_standard_deviation_derivatives(const Vector<double>&) const method.\n"
972  << "Standard deviation of input variable " << i << " is zero.\n"
973  << "That inputs is not be scaled.\n";
974  }
975 
976  derivative[i] = 1.0;
977  }
978  else
979  {
980  derivative[i] = 1.0/statistics[i].standard_deviation;
981  }
982  }
983 
984  return(derivative);
985 }
986 
987 
988 // Vector<double> calculate_mean_standard_deviation_second_derivatives(const Vector<double>&) const method
989 
992 
994 {
995  const size_t scaling_neurons_number = get_scaling_neurons_number();
996 
997  const Vector<double> second_derivative(scaling_neurons_number, 0.0);
998 
999  return(second_derivative);
1000 }
1001 
1002 
1003 // Matrix<double> arrange_Jacobian(const Vector<double>&) const method
1004 
1006 
1008 {
1009  const size_t scaling_neurons_number = get_scaling_neurons_number();
1010 
1011  Matrix<double> Jacobian(scaling_neurons_number, scaling_neurons_number, 0.0);
1012 
1013  Jacobian.set_diagonal(derivatives);
1014 
1015  return(Jacobian);
1016 }
1017 
1018 
1019 // Vector< Matrix<double> > arrange_Hessian_form(const Vector<double>&) const method
1020 
1022 
1024 {
1025  const size_t scaling_neurons_number = get_scaling_neurons_number();
1026 
1027  Vector< Matrix<double> > Hessian_form(scaling_neurons_number);
1028 
1029  for(size_t i = 0; i < scaling_neurons_number; i++)
1030  {
1031  Hessian_form[i].set(scaling_neurons_number, scaling_neurons_number, 0.0);
1032 
1033  Hessian_form[i](i,i) = second_derivative[i];
1034  }
1035 
1036  return(Hessian_form);
1037 }
1038 
1039 
1040 // std::string write_no_scaling_expression(const Vector<std::string>&, const Vector<std::string>&) const method
1041 
1045 
1046 std::string ScalingLayer::write_no_scaling_expression(const Vector<std::string>& inputs_name, const Vector<std::string>& outputs_name) const
1047 {
1048  const size_t inputs_number = get_scaling_neurons_number();
1049 
1050  std::ostringstream buffer;
1051 
1052  for(size_t i = 0; i < inputs_number; i++)
1053  {
1054  buffer << outputs_name[i] << "=" << inputs_name[i] << ";\n";
1055  }
1056 
1057  return(buffer.str());
1058 }
1059 
1060 
1061 
1062 // std::string write_minimum_maximum_expression(const Vector<std::string>&, const Vector<std::string>&) const method
1063 
1067 
1068 std::string ScalingLayer::write_minimum_maximum_expression(const Vector<std::string>& inputs_name, const Vector<std::string>& outputs_name) const
1069 {
1070  const size_t inputs_number = get_scaling_neurons_number();
1071 
1072  std::ostringstream buffer;
1073 
1074  for(size_t i = 0; i < inputs_number; i++)
1075  {
1076  buffer << outputs_name[i] << "=2*(" << inputs_name[i] << "-" << statistics[i].minimum << ")/(" << statistics[i].maximum << "-" << statistics[i].minimum << ")-1;\n";
1077  }
1078 
1079  return(buffer.str());
1080 }
1081 
1082 
1083 // std::string write_mean_standard_deviation_expression(const Vector<std::string>&, const Vector<std::string>&) const method
1084 
1088 
1090 {
1091  const size_t inputs_number = get_scaling_neurons_number();
1092 
1093  std::ostringstream buffer;
1094 
1095  for(size_t i = 0; i < inputs_number; i++)
1096  {
1097  buffer << outputs_name[i] << "=(" << inputs_name[i] << "-" << statistics[i].mean << ")/" << statistics[i].standard_deviation << ";\n";
1098  }
1099 
1100  return(buffer.str());
1101 }
1102 
1103 
1104 // std::string write_expression(const Vector<std::string>&, const Vector<std::string>&) const method
1105 
1107 
1108 std::string ScalingLayer::write_expression(const Vector<std::string>& inputs_name, const Vector<std::string>& outputs_name) const
1109 {
1110  switch(scaling_method)
1111  {
1112  case NoScaling:
1113  {
1114  return(write_no_scaling_expression(inputs_name, outputs_name));
1115  }
1116  break;
1117 
1118  case MinimumMaximum:
1119  {
1120  return(write_minimum_maximum_expression(inputs_name, outputs_name));
1121  }
1122  break;
1123 
1124  case MeanStandardDeviation:
1125  {
1126  return(write_mean_standard_deviation_expression(inputs_name, outputs_name));
1127  }
1128  break;
1129 
1130  default:
1131  {
1132  std::ostringstream buffer;
1133 
1134  buffer << "OpenNN Exception: ScalingLayer class.\n"
1135  << "std::string write_expression(void) const method.\n"
1136  << "Unknown inputs scaling method.\n";
1137 
1138  throw std::logic_error(buffer.str());
1139  }// end default
1140  break;
1141  }
1142 }
1143 
1144 
1145 // std::string to_string(void) const method
1146 
1148 
1149 std::string ScalingLayer::to_string(void) const
1150 {
1151  std::ostringstream buffer;
1152 
1153  const size_t scaling_neurons_number = get_scaling_neurons_number();
1154 
1155  buffer << "Scaling layer\n";
1156 
1157  for(size_t i = 0; i < scaling_neurons_number; i++)
1158  {
1159  buffer << "Statistics " << i+1 << "\n"
1160  << "Minimum: " << statistics[i].minimum << "\n"
1161  << "Maximum: " << statistics[i].maximum << "\n"
1162  << "Mean: " << statistics[i].mean << "\n"
1163  << "Standard deviation: " << statistics[i].standard_deviation << "\n";
1164  }
1165 
1166  buffer << "Scaling method: " << write_scaling_method() << "\n"
1167  << "Display: " << display << "\n";
1168 
1169  return(buffer.str());
1170 }
1171 
1172 
1173 // tinyxml2::XMLDocument* to_XML(void) const method
1174 
1177 
1178 tinyxml2::XMLDocument* ScalingLayer::to_XML(void) const
1179 {
1180  tinyxml2::XMLDocument* document = new tinyxml2::XMLDocument;
1181 
1182  std::ostringstream buffer;
1183 
1184  tinyxml2::XMLElement* scaling_layer_element = document->NewElement("ScalingLayer");
1185 
1186  document->InsertFirstChild(scaling_layer_element);
1187 
1188  // Scaling neurons number
1189 
1190  tinyxml2::XMLElement* size_element = document->NewElement("ScalingNeuronsNumber");
1191  scaling_layer_element->LinkEndChild(size_element);
1192 
1193  const size_t scaling_neurons_number = get_scaling_neurons_number();
1194 
1195  buffer.str("");
1196  buffer << scaling_neurons_number;
1197 
1198  tinyxml2::XMLText* size_text = document->NewText(buffer.str().c_str());
1199  size_element->LinkEndChild(size_text);
1200 
1201  for(size_t i = 0; i < scaling_neurons_number; i++)
1202  {
1203  tinyxml2::XMLElement* statistics_element = document->NewElement("Statistics");
1204  statistics_element->SetAttribute("Index", (unsigned)i+1);
1205 
1206  scaling_layer_element->LinkEndChild(statistics_element);
1207 
1208  // Minimum
1209 
1210  tinyxml2::XMLElement* minimum_element = document->NewElement("Minimum");
1211  statistics_element->LinkEndChild(minimum_element);
1212 
1213  buffer.str("");
1214  buffer << statistics[i].minimum;
1215 
1216  tinyxml2::XMLText* minimum_text = document->NewText(buffer.str().c_str());
1217  minimum_element->LinkEndChild(minimum_text);
1218 
1219  // Maximum
1220 
1221  tinyxml2::XMLElement* maximum_element = document->NewElement("Maximum");
1222  statistics_element->LinkEndChild(maximum_element);
1223 
1224  buffer.str("");
1225  buffer << statistics[i].maximum;
1226 
1227  tinyxml2::XMLText* maximum_text = document->NewText(buffer.str().c_str());
1228  maximum_element->LinkEndChild(maximum_text);
1229 
1230  // Mean
1231 
1232  tinyxml2::XMLElement* mean_element = document->NewElement("Mean");
1233  statistics_element->LinkEndChild(mean_element);
1234 
1235  buffer.str("");
1236  buffer << statistics[i].mean;
1237 
1238  tinyxml2::XMLText* mean_text = document->NewText(buffer.str().c_str());
1239  mean_element->LinkEndChild(mean_text);
1240 
1241  // Standard deviation
1242 
1243  tinyxml2::XMLElement* standard_deviation_element = document->NewElement("StandardDeviation");
1244  statistics_element->LinkEndChild(standard_deviation_element);
1245 
1246  buffer.str("");
1247  buffer << statistics[i].standard_deviation;
1248 
1249  tinyxml2::XMLText* standard_deviation_text = document->NewText(buffer.str().c_str());
1250  standard_deviation_element->LinkEndChild(standard_deviation_text);
1251  }
1252 
1253  // Scaling method
1254 
1255  tinyxml2::XMLElement* method_element = document->NewElement("ScalingMethod");
1256  scaling_layer_element->LinkEndChild(method_element);
1257 
1258  tinyxml2::XMLText* method_text = document->NewText(write_scaling_method().c_str());
1259  method_element->LinkEndChild(method_text);
1260 
1261  // Display warnings
1262 
1263  tinyxml2::XMLElement* display_element = document->NewElement("Display");
1264  scaling_layer_element->LinkEndChild(display_element);
1265 
1266  buffer.str("");
1267  buffer << display;
1268 
1269  tinyxml2::XMLText* display_text = document->NewText(buffer.str().c_str());
1270  display_element->LinkEndChild(display_text);
1271 
1272  return(document);
1273 }
1274 
1275 
1276 // void from_XML(const tinyxml2::XMLDocument&) method
1277 
1280 
1281 void ScalingLayer::from_XML(const tinyxml2::XMLDocument& document)
1282 {
1283  std::ostringstream buffer;
1284 
1285  const tinyxml2::XMLElement* scaling_layer_element = document.FirstChildElement("ScalingLayer");
1286 
1287  if(!scaling_layer_element)
1288  {
1289  buffer << "OpenNN Exception: ScalingLayer class.\n"
1290  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1291  << "Scaling layer element is NULL.\n";
1292 
1293  throw std::logic_error(buffer.str());
1294  }
1295 
1296  // Scaling neurons number
1297 
1298  const tinyxml2::XMLElement* scaling_neurons_number_element = scaling_layer_element->FirstChildElement("ScalingNeuronsNumber");
1299 
1300  if(!scaling_neurons_number_element)
1301  {
1302  buffer << "OpenNN Exception: ScalingLayer class.\n"
1303  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1304  << "Scaling neurons number element is NULL.\n";
1305 
1306  throw std::logic_error(buffer.str());
1307  }
1308 
1309  const size_t scaling_neurons_number = atoi(scaling_neurons_number_element->GetText());
1310 
1311  set(scaling_neurons_number);
1312 
1313  unsigned index = 0; // size_t does not work
1314 
1315  const tinyxml2::XMLElement* start_element = scaling_neurons_number_element;
1316 
1317  for(size_t i = 0; i < scaling_neurons_number; i++)
1318  {
1319  const tinyxml2::XMLElement* statistics_element = start_element->NextSiblingElement("Statistics");
1320  start_element = statistics_element;
1321 
1322  if(!statistics_element)
1323  {
1324  buffer << "OpenNN Exception: ScalingLayer class.\n"
1325  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1326  << "Statistics of scaling neuron " << i+1 << " is NULL.\n";
1327 
1328  throw std::logic_error(buffer.str());
1329  }
1330 
1331  statistics_element->QueryUnsignedAttribute("Index", &index);
1332 
1333  if(index != i+1)
1334  {
1335  buffer << "OpenNN Exception: ScalingLayer class.\n"
1336  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1337  << "Index " << index << " is not correct.\n";
1338 
1339  throw std::logic_error(buffer.str());
1340  }
1341 
1342  // Minimum
1343 
1344  const tinyxml2::XMLElement* minimum_element = statistics_element->FirstChildElement("Minimum");
1345 
1346  if(!minimum_element)
1347  {
1348  buffer << "OpenNN Exception: ScalingLayer class.\n"
1349  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1350  << "Minimum element " << i+1 << " is NULL.\n";
1351 
1352  throw std::logic_error(buffer.str());
1353  }
1354 
1355  if(minimum_element->GetText())
1356  {
1357  statistics[i].minimum = atof(minimum_element->GetText());
1358  }
1359 
1360  // Maximum
1361 
1362  const tinyxml2::XMLElement* maximum_element = statistics_element->FirstChildElement("Maximum");
1363 
1364  if(!maximum_element)
1365  {
1366  buffer << "OpenNN Exception: ScalingLayer class.\n"
1367  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1368  << "Maximum element " << i+1 << " is NULL.\n";
1369 
1370  throw std::logic_error(buffer.str());
1371  }
1372 
1373  if(maximum_element->GetText())
1374  {
1375  statistics[i].maximum = atof(maximum_element->GetText());
1376  }
1377 
1378  // Mean
1379 
1380  const tinyxml2::XMLElement* mean_element = statistics_element->FirstChildElement("Mean");
1381 
1382  if(!mean_element)
1383  {
1384  buffer << "OpenNN Exception: ScalingLayer class.\n"
1385  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1386  << "Mean element " << i+1 << " is NULL.\n";
1387 
1388  throw std::logic_error(buffer.str());
1389  }
1390 
1391  if(mean_element->GetText())
1392  {
1393  statistics[i].mean = atof(mean_element->GetText());
1394  }
1395 
1396  // Standard deviation
1397 
1398  const tinyxml2::XMLElement* standard_deviation_element = statistics_element->FirstChildElement("StandardDeviation");
1399 
1400  if(!standard_deviation_element)
1401  {
1402  buffer << "OpenNN Exception: ScalingLayer class.\n"
1403  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1404  << "Standard deviation element " << i+1 << " is NULL.\n";
1405 
1406  throw std::logic_error(buffer.str());
1407  }
1408 
1409  if(standard_deviation_element->GetText())
1410  {
1411  statistics[i].standard_deviation = atof(standard_deviation_element->GetText());
1412  }
1413  }
1414 
1415  // Scaling method
1416  {
1417  const tinyxml2::XMLElement* scaling_method_element = scaling_layer_element->FirstChildElement("ScalingMethod");
1418 
1419  if(scaling_method_element)
1420  {
1421  std::string new_method = scaling_method_element->GetText();
1422 
1423  try
1424  {
1425  set_scaling_method(new_method);
1426  }
1427  catch(const std::logic_error& e)
1428  {
1429  std::cout << e.what() << std::endl;
1430  }
1431  }
1432  }
1433 
1434  // Display
1435  {
1436  const tinyxml2::XMLElement* display_element = scaling_layer_element->FirstChildElement("Display");
1437 
1438  if(display_element)
1439  {
1440  std::string new_display_string = display_element->GetText();
1441 
1442  try
1443  {
1444  set_display(new_display_string != "0");
1445  }
1446  catch(const std::logic_error& e)
1447  {
1448  std::cout << e.what() << std::endl;
1449  }
1450  }
1451  }
1452 }
1453 
1454 }
1455 
1456 // OpenNN: Open Neural Networks Library.
1457 // Copyright (c) 2005-2015 Roberto Lopez.
1458 //
1459 // This library is free software; you can redistribute it and/or
1460 // modify it under the terms of the GNU Lesser General Public
1461 // License as published by the Free Software Foundation; either
1462 // version 2.1 of the License, or any later version.
1463 //
1464 // This library is distributed in the hope that it will be useful,
1465 // but WITHOUT ANY WARRANTY; without even the implied warranty of
1466 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1467 // Lesser General Public License for more details.
1468 
1469 // You should have received a copy of the GNU Lesser General Public
1470 // License along with this library; if not, write to the Free Software
1471 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Vector< double > calculate_outputs(const Vector< double > &) const
void prune_scaling_neuron(const size_t &)
void set_item_statistics(const size_t &, const Statistics< double > &)
std::string write_scaling_method(void) const
Returns a string with the name of the method used for scaling.
size_t get_scaling_neurons_number(void) const
Returns the number of unscaling neurons in this layer.
Vector< double > calculate_mean_standard_deviation_derivatives(const Vector< double > &) const
const ScalingMethod & get_scaling_method(void) const
Returns the method used for scaling.
std::string write_scaling_method_text(void) const
Vector< Statistics< double > > statistics
Statistics of input variables.
void check_range(const Vector< double > &) const
ScalingMethod scaling_method
Method for scaling the input variables.
virtual void from_XML(const tinyxml2::XMLDocument &)
void set(void)
Sets the size of a vector to zero.
Definition: vector.h:656
Vector< Statistics< double > > get_statistics(void) const
void set(void)
Sets the scaling layer to be empty.
Vector< double > calculate_minimum_maximum_derivatives(const Vector< double > &) const
void set_diagonal(const T &)
Definition: matrix.h:1858
void set_mean(const size_t &, const double &)
void set_statistics(const Vector< Statistics< double > > &)
bool operator==(const ScalingLayer &) const
void set_display(const bool &)
ScalingMethod
Enumeration of available methods for scaling the input variables.
Definition: scaling_layer.h:79
void set_scaling_method(const ScalingMethod &)
Vector< double > arrange_means(void) const
Returns a single matrix with the means of all scaling neurons.
Vector< double > calculate_minimum_maximum_outputs(const Vector< double > &) const
Vector< double > calculate_minimum_maximum_second_derivatives(const Vector< double > &) const
tinyxml2::XMLDocument * to_XML(void) const
const bool & get_display(void) const
void set_maximum(const size_t &, const double &)
std::string write_mean_standard_deviation_expression(const Vector< std::string > &, const Vector< std::string > &) const
Vector< double > arrange_standard_deviations(void) const
Returns a single matrix with the standard deviations of all scaling neurons.
virtual void set_default(void)
Vector< Matrix< double > > arrange_Hessian_form(const Vector< double > &) const
Arranges a "Hessian form" vector of matrices from the vector of second derivatives.
bool display
Display warning messages to screen.
std::string to_string(void) const
Returns a string representation of the current scaling layer object.
virtual ~ScalingLayer(void)
Destructor.
void initialize_random(void)
Matrix< double > arrange_Jacobian(const Vector< double > &) const
Arranges a "Jacobian" matrix from the vector of derivatives.
Vector< double > calculate_derivatives(const Vector< double > &) const
std::string write_no_scaling_expression(const Vector< std::string > &, const Vector< std::string > &) const
Vector< double > calculate_mean_standard_deviation_outputs(const Vector< double > &) const
std::string write_expression(const Vector< std::string > &, const Vector< std::string > &) const
Returns a string with the expression of the inputs scaling process.
Matrix< double > arrange_statistics(void) const
std::string write_minimum_maximum_expression(const Vector< std::string > &, const Vector< std::string > &) const
Vector< double > calculate_second_derivatives(const Vector< double > &) const
Vector< double > calculate_mean_standard_deviation_second_derivatives(const Vector< double > &) const
void set_standard_deviation(const size_t &, const double &)
void set_minimum(const size_t &, const double &)
void set_row(const size_t &, const Vector< T > &)
Definition: matrix.h:1691
ScalingLayer & operator=(const ScalingLayer &)
bool is_empty(void) const
Returns true if the number of scaling neurons is zero, and false otherwise.