OpenNN  2.2
Open Neural Networks Library
perceptron_layer.cpp
1 /****************************************************************************************************************/
2 /* */
3 /* OpenNN: Open Neural Networks Library */
4 /* www.artelnics.com/opennn */
5 /* */
6 /* P E R C E P T R O N L A Y E R C L A S S */
7 /* */
8 /* Roberto Lopez */
9 /* Artelnics - Making intelligent use of data */
11 /* */
12 /****************************************************************************************************************/
13 
14 // OpenNN includes
15 
16 #include "perceptron_layer.h"
17 
18 namespace OpenNN
19 {
20 
21 // DEFAULT CONSTRUCTOR
22 
26 
28 {
29  set();
30 }
31 
32 
33 // ARCHITECTURE CONSTRUCTOR
34 
41 
42 PerceptronLayer::PerceptronLayer(const size_t& new_inputs_number, const size_t& new_perceptrons_number)
43 {
44  set(new_inputs_number, new_perceptrons_number);
45 }
46 
47 
48 // COPY CONSTRUCTOR
49 
53 
54 PerceptronLayer::PerceptronLayer(const PerceptronLayer& other_perceptron_layer)
55 {
56  set(other_perceptron_layer);
57 }
58 
59 
60 // DESTRUCTOR
61 
64 
66 {
67 }
68 
69 
70 // ASSIGNMENT OPERATOR
71 
75 
77 {
78  if(this != &other_perceptron_layer)
79  {
80  perceptrons = other_perceptron_layer.perceptrons;
81 
82  display = other_perceptron_layer.display;
83  }
84 
85  return(*this);
86 }
87 
88 
89 // EQUAL TO OPERATOR
90 
91 // bool operator == (const PerceptronLayer&) const method
92 
97 
98 bool PerceptronLayer::operator == (const PerceptronLayer& other_perceptron_layer) const
99 {
100  if(perceptrons == other_perceptron_layer.perceptrons
101  && display == other_perceptron_layer.display)
102  {
103  return(true);
104  }
105  else
106  {
107  return(false);
108  }
109 }
110 
111 
112 // METHODS
113 
114 
115 // bool is_empty(void) const method
116 
118 
120 {
121  if(perceptrons.empty())
122  {
123  return(true);
124  }
125  else
126  {
127  return(false);
128  }
129 }
130 
131 
132 // const Vector<Perceptron>& get_perceptrons(void) const method
133 
135 
137 {
138  return(perceptrons);
139 }
140 
141 
142 // size_t get_inputs_number(void) const
143 
145 
147 {
148  if(is_empty())
149  {
150  return(0);
151  }
152  else
153  {
154  return(perceptrons[0].get_inputs_number());
155  }
156 }
157 
158 
159 // const size_t& get_perceptrons_number(void) const
160 
162 
164 {
165  const size_t perceptrons_number = perceptrons.size();
166 
167  return(perceptrons_number);
168 }
169 
170 
171 // const Perceptron& get_perceptron(const size_t&) const method
172 
175 
176 const Perceptron& PerceptronLayer::get_perceptron(const size_t& index) const
177 {
178  // Control sentence (if debug)
179 
180  #ifndef NDEBUG
181 
182  const size_t perceptrons_number = get_perceptrons_number();
183 
184  if(index >= perceptrons_number)
185  {
186  std::ostringstream buffer;
187 
188  buffer << "OpenNN Exception: PerceptronLayer class.\n"
189  << "const Perceptron& get_perceptron(const size_t&) const method.\n"
190  << "Index of perceptron must be less than layer size.\n";
191 
192  throw std::logic_error(buffer.str());
193  }
194 
195  #endif
196 
197  return(perceptrons[index]);
198 }
199 
200 
201 // size_t count_parameters_number(void) const method
202 
204 
206 {
207  const size_t perceptrons_number = get_perceptrons_number();
208 
209  size_t parameters_number = 0;
210 
211  for(size_t i = 0; i < perceptrons_number; i++)
212  {
213  parameters_number += perceptrons[i].count_parameters_number();
214  }
215 
216  return(parameters_number);
217 }
218 
219 
220 // Vector<size_t> count_cumulative_parameters_number(void) const method
221 
224 
226 {
227  const size_t perceptrons_number = get_perceptrons_number();
228 
229  Vector<size_t> cumulative_parameters_number(perceptrons_number);
230 
231  if(perceptrons_number > 0)
232  {
233  cumulative_parameters_number[0] = perceptrons[0].count_parameters_number();
234 
235  for(size_t i = 1; i < perceptrons_number; i++)
236  {
237  cumulative_parameters_number[i] = cumulative_parameters_number[i-1] + perceptrons[i].count_parameters_number();
238  }
239  }
240 
241  return(cumulative_parameters_number);
242 }
243 
244 
245 // Vector<double> arrange_biases(void) const method
246 
250 
252 {
253  const size_t perceptrons_number = get_perceptrons_number();
254 
255  Vector<double> biases(perceptrons_number);
256 
257  for(size_t i = 0; i < perceptrons_number; i++)
258  {
259  biases[i] = perceptrons[i].get_bias();
260  }
261 
262  return(biases);
263 }
264 
265 
266 // Matrix<double> arrange_synaptic_weights(void) const method
267 
272 
274 {
275  const size_t perceptrons_number = get_perceptrons_number();
276 
277  const size_t inputs_number = get_inputs_number();
278 
279  Matrix<double> synaptic_weights(perceptrons_number, inputs_number);
280 
281  for(size_t i = 0; i < perceptrons_number; i++)
282  {
283  for(size_t j = 0; j < inputs_number; j++)
284  {
285  synaptic_weights(i,j) = perceptrons[i].get_synaptic_weight(j);
286  }
287  }
288 
289  return(synaptic_weights);
290 }
291 
292 
293 // Vector<double> arrange_parameters(void) const method
294 
298 
300 {
301  const size_t perceptrons_number = get_perceptrons_number();
302 
303  if(perceptrons_number == 0)
304  {
305  Vector<double> parameters;
306 
307  return(parameters);
308  }
309  else
310  {
311  const size_t parameters_number = count_parameters_number();
312 
313  Vector<double> parameters(parameters_number);
314 
315  const size_t perceptron_parameters_number = perceptrons[0].count_parameters_number();
316 
317  Vector<double> perceptron_parameters(perceptron_parameters_number);
318 
319  size_t position = 0;
320 
321  for(size_t i = 0; i < perceptrons_number; i++)
322  {
323  perceptron_parameters = perceptrons[i].arrange_parameters();
324  parameters.tuck_in(position, perceptron_parameters);
325  position += perceptron_parameters_number;
326  }
327 
328  return(parameters);
329  }
330 }
331 
332 
333 // size_t count_perceptron_parameters_number(void) const method
334 
337 
339 {
340  const size_t inputs_number = get_inputs_number();
341 
342  return(1 + inputs_number);
343 }
344 
345 
346 // Vector< Vector<double> > arrange_perceptrons_parameters(void) const method
347 
352 
354 {
355  const size_t perceptrons_number = get_perceptrons_number();
356 
357  Vector< Vector<double> > perceptrons_parameters(perceptrons_number);
358 
359  for(size_t i = 0; i < perceptrons_number; i++)
360  {
361  perceptrons_parameters[i] = perceptrons[i].arrange_parameters();
362  }
363 
364  return(perceptrons_parameters);
365 }
366 
367 
368 // const Perceptron::ActivationFunction& get_activation_function(void) const method
369 
372 
374 {
375  const size_t perceptrons_number = get_perceptrons_number();
376 
377  if(perceptrons_number > 0)
378  {
380  }
381  else
382  {
383  std::ostringstream buffer;
384 
385  buffer << "OpenNN Exception: PerceptronLayer class.\n"
386  << "Perceptron::ActivationFunction& get_activation_function(void) method.\n"
387  << "PerceptronLayer is empty.\n";
388 
389  throw std::logic_error(buffer.str());
390  }
391 }
392 
393 
394 // std::string write_activation_function(void) const method
395 
398 
400 {
401  const Perceptron::ActivationFunction activation_function = get_activation_function();
402 
403  switch(activation_function)
404  {
405  case Perceptron::Logistic:
406  {
407  return("Logistic");
408  }
409  break;
410 
411  case Perceptron::HyperbolicTangent:
412  {
413  return("HyperbolicTangent");
414  }
415  break;
416 
417  case Perceptron::Threshold:
418  {
419  return("Threshold");
420  }
421  break;
422 
423  case Perceptron::SymmetricThreshold:
424  {
425  return("SymmetricThreshold");
426  }
427  break;
428 
429  case Perceptron::Linear:
430  {
431  return("Linear");
432  }
433  break;
434 
435  default:
436  {
437  std::ostringstream buffer;
438 
439  buffer << "OpenNN Exception: PerceptronLayer class.\n"
440  << "std::string write_activation_function_name(void) const method.\n"
441  << "Unknown layer activation function.\n";
442 
443  throw std::logic_error(buffer.str());
444  }
445  break;
446  }
447 }
448 
449 
450 // const bool& get_display(void) const method
451 
454 
455 const bool& PerceptronLayer::get_display(void) const
456 {
457  return(display);
458 }
459 
460 
461 // void set(void) method
462 
465 
467 {
468  perceptrons.set();
469 
470  set_default();
471 }
472 
473 
474 // void set(const Vector<Perceptron>&) method
475 
478 
479 void PerceptronLayer::set(const Vector<Perceptron>& new_perceptrons)
480 {
481  perceptrons = new_perceptrons;
482 
483  set_default();
484 }
485 
486 
487 // void set(const size_t&, const size_t&) method
488 
493 
494 void PerceptronLayer::set(const size_t& new_inputs_number, const size_t& new_perceptrons_number)
495 {
496  perceptrons.set(new_perceptrons_number);
497 
498  for(size_t i = 0; i < new_perceptrons_number; i++)
499  {
500  perceptrons[i].set_inputs_number(new_inputs_number);
501  }
502 
503  set_default();
504 }
505 
506 
507 // void set(const PerceptronLayer&) method
508 
511 
512 void PerceptronLayer::set(const PerceptronLayer& other_perceptron_layer)
513 {
514  perceptrons = other_perceptron_layer.perceptrons;
515 
516  display = other_perceptron_layer.display;
517 }
518 
519 
520 // void set_perceptrons(const Vector<Perceptron>&) method
521 
524 
526 {
527  perceptrons = new_perceptrons;
528 }
529 
530 
531 // void set_perceptron(const size_t&, const Perceptron&) method
532 
536 
537 void PerceptronLayer::set_perceptron(const size_t& i, const Perceptron& new_perceptron)
538 {
539  perceptrons[i] = new_perceptron;
540 }
541 
542 
543 // void set_default(void) method
544 
549 
551 {
552  display = true;
553 }
554 
555 
556 // void set_inputs_number(const size_t&) method
557 
561 
562 void PerceptronLayer::set_inputs_number(const size_t& new_inputs_number)
563 {
564  const size_t perceptrons_number = get_perceptrons_number();
565 
566  for(size_t i = 0; i < perceptrons_number; i++)
567  {
568  perceptrons[i].set_inputs_number(new_inputs_number);
569  }
570 }
571 
572 
573 // void set_perceptrons_number(const size_t&) method
574 
578 
579 void PerceptronLayer::set_perceptrons_number(const size_t& new_perceptrons_number)
580 {
581  const size_t perceptrons_number = get_perceptrons_number();
582  const size_t inputs_number = get_inputs_number();
583 
584  if(perceptrons_number > 0)
585  {
586  const Perceptron::ActivationFunction& activation_function = get_activation_function();
587  perceptrons.set(new_perceptrons_number);
588  set_activation_function(activation_function);
589  }
590  else
591  {
592  perceptrons.set(new_perceptrons_number);
593  }
594 
595  set_inputs_number(inputs_number);
596 }
597 
598 
599 // void set_biases(const Vector<double>&) method
600 
603 
605 {
606  const size_t perceptrons_number = get_perceptrons_number();
607 
608  // Control sentence (if debug)
609 
610  #ifndef NDEBUG
611 
612  const size_t new_biases_size = new_biases.size();
613 
614  if(new_biases_size != perceptrons_number)
615  {
616  std::ostringstream buffer;
617 
618  buffer << "OpenNN Exception: PerceptronLayer class.\n"
619  << "void set_biases(const Vector<double>&) method.\n"
620  << "Size must be equal to number of perceptrons.\n";
621 
622  throw std::logic_error(buffer.str());
623  }
624 
625  #endif
626 
627  // Set layer biases
628 
629  for(size_t i = 0; i < perceptrons_number; i++)
630  {
631  perceptrons[i].set_bias(new_biases[i]);
632  }
633 }
634 
635 
636 // void set_synaptic_weights(const Matrix<double>&) method
637 
643 
644 void PerceptronLayer::set_synaptic_weights(const Matrix<double>& new_synaptic_weights)
645 {
646  const size_t inputs_number = get_inputs_number();
647  const size_t perceptrons_number = get_perceptrons_number();
648 
649  // Control sentence (if debug)
650 
651  #ifndef NDEBUG
652 
653  const size_t rows_number = new_synaptic_weights.get_rows_number();
654  const size_t columns_number = new_synaptic_weights.get_columns_number();
655 
656  std::ostringstream buffer;
657 
658  if(rows_number != perceptrons_number)
659  {
660  buffer << "OpenNN Exception: PerceptronLayer class.\n"
661  << "void set_synaptic_weights(const Matrix<double>&) method.\n"
662  << "Number of rows must be equal to size of layer.\n";
663 
664  throw std::logic_error(buffer.str());
665  }
666  else if(columns_number != inputs_number)
667  {
668  std::ostringstream buffer;
669 
670  buffer << "OpenNN Exception: PerceptronLayer class.\n"
671  << "void set_synaptic_weights(const Matrix<double>&) method.\n"
672  << "Number of columns must be equal to number of inputs.\n";
673 
674  throw std::logic_error(buffer.str());
675  }
676 
677  #endif
678 
679  for(size_t i = 0; i < perceptrons_number; i++)
680  {
681  for(size_t j = 0; j < inputs_number; j++)
682  {
683  perceptrons[i].set_synaptic_weight(j, new_synaptic_weights(i,j));
684  }
685  }
686 }
687 
688 
689 // void set_parameters(const Vector<double>&) method
690 
693 
695 {
696  const size_t perceptrons_number = get_perceptrons_number();
697 
698  // Control sentence (if debug)
699 
700  #ifndef NDEBUG
701 
702  const size_t parameters_number = count_parameters_number();
703 
704  const size_t new_parameters_size = new_parameters.size();
705 
706  if(new_parameters_size != parameters_number)
707  {
708  std::ostringstream buffer;
709 
710  buffer << "OpenNN Exception: PerceptronLayer class.\n"
711  << "void set_parameters(const Vector<double>&) method.\n"
712  << "Size of new parameters vector must be equal to number of parameters.\n";
713 
714  throw std::logic_error(buffer.str());
715  }
716 
717  #endif
718 
719  if(perceptrons_number != 0)
720  {
721  const size_t perceptron_parameters_number = perceptrons[0].count_parameters_number();
722 
723  Vector<double> perceptron_parameters(perceptron_parameters_number);
724 
725  size_t position = 0;
726 
727  for(size_t i = 0; i < perceptrons_number; i++)
728  {
729  perceptron_parameters = new_parameters.take_out(position, perceptron_parameters_number);
730  perceptrons[i].set_parameters(perceptron_parameters);
731  position += perceptron_parameters_number;
732  }
733  }
734 }
735 
736 
737 // void set_activation_function(const Perceptron::ActivationFunction&) method
738 
741 
743 {
744  const size_t perceptrons_number = get_perceptrons_number();
745 
746  for(size_t i = 0; i < perceptrons_number; i++)
747  {
748  perceptrons[i].set_activation_function(new_activation_function);
749  }
750 }
751 
752 
753 // void set_activation_function(const std::string&) method
754 
758 
759 void PerceptronLayer::set_activation_function(const std::string& new_activation_function)
760 {
761  const size_t perceptrons_number = get_perceptrons_number();
762 
763  for(size_t i = 0; i < perceptrons_number; i++)
764  {
765  perceptrons[i].set_activation_function(new_activation_function);
766  }
767 }
768 
769 
770 // void set_display(const bool&) method
771 
776 
777 void PerceptronLayer::set_display(const bool& new_display)
778 {
779  display = new_display;
780 }
781 
782 
783 // void grow_inputs(const size_t&) method
784 
787 
788 void PerceptronLayer::grow_inputs(const size_t& inputs_number)
789 {
790  for(size_t i = 0; i < inputs_number; i++)
791  {
792  const size_t perceptrons_number = get_perceptrons_number();
793 
794  for(size_t i = 0; i < perceptrons_number; i++)
795  {
796  perceptrons[i].grow_input();
797  }
798  }
799 }
800 
801 
802 // void grow_perceptrons(const size_t&) method
803 
806 
807 void PerceptronLayer::grow_perceptrons(const size_t& perceptrons_number)
808 {
809  const size_t inputs_number = get_inputs_number();
810 
811  for(size_t i = 0; i < perceptrons_number; i++)
812  {
813  Perceptron perceptron(inputs_number);
814 
815  perceptron.initialize_parameters(0.0);
816 
817  perceptrons.push_back(perceptron);
818  }
819 }
820 
821 
822 // void prune_input(const size_t&) method
823 
826 
827 void PerceptronLayer::prune_input(const size_t& index)
828 {
829  // Control sentence (if debug)
830 
831  #ifndef NDEBUG
832 
833  const size_t inputs_number = get_inputs_number();
834 
835  if(index >= inputs_number)
836  {
837  std::ostringstream buffer;
838 
839  buffer << "OpenNN Exception: PerceptronLayer class.\n"
840  << "void prune_input(const size_t&) method.\n"
841  << "Index of input is equal or greater than number of inputs.\n";
842 
843  throw std::logic_error(buffer.str());
844  }
845 
846  #endif
847 
848  const size_t perceptrons_number = get_perceptrons_number();
849 
850  for(size_t i = 0; i < perceptrons_number; i++)
851  {
852  perceptrons[i].prune_input(index);
853  }
854 }
855 
856 
857 // void prune_perceptron(const size_t&) method
858 
861 
862 void PerceptronLayer::prune_perceptron(const size_t& index)
863 {
864  // Control sentence (if debug)
865 
866  #ifndef NDEBUG
867 
868  const size_t perceptrons_number = get_perceptrons_number();
869 
870  if(index >= perceptrons_number)
871  {
872  std::ostringstream buffer;
873 
874  buffer << "OpenNN Exception: PerceptronLayer class.\n"
875  << "void prune_perceptron(const size_t&) method.\n"
876  << "Index of perceptron is equal or greater than number of perceptrons.\n";
877 
878  throw std::logic_error(buffer.str());
879  }
880 
881  #endif
882 
883  perceptrons.erase(perceptrons.begin() + index);
884 }
885 
886 
887 // void initialize_random(void) method
888 
891 
893 {
894  const size_t inputs_number = rand()%10 + 1;
895  const size_t perceptrons_number = rand()%10 + 1;
896 
897  set(inputs_number, perceptrons_number);
898 
899  set_display(true);
900 }
901 
902 
903 // void initialize_biases(const double&) method
904 
907 
908 void PerceptronLayer::initialize_biases(const double& value)
909 {
910  const size_t perceptrons_number = get_perceptrons_number();
911 
912  for(size_t i = 0; i < perceptrons_number; i++)
913  {
914  perceptrons[i].initialize_bias(value);
915  }
916 }
917 
918 
919 // void initialize_synaptic_weights(const double&) const method
920 
923 
925 {
926  const size_t perceptrons_number = get_perceptrons_number();
927 
928  for(size_t i = 0; i < perceptrons_number; i++)
929  {
930  perceptrons[i].initialize_synaptic_weights(value);
931  }
932 }
933 
934 
935 // void initialize_parameters(const double&) method
936 
939 
940 void PerceptronLayer::initialize_parameters(const double& value)
941 {
942  const size_t parameters_number = count_parameters_number();
943 
944  const Vector<double> parameters(parameters_number, value);
945 
946  set_parameters(parameters);
947 }
948 
949 
950 // void randomize_parameters_uniform(void) method
951 
954 
956 {
957  const size_t parameters_number = count_parameters_number();
958 
959  Vector<double> parameters(parameters_number);
960 
961  parameters.randomize_uniform();
962 
963  set_parameters(parameters);
964 }
965 
966 
967 // void randomize_parameters_uniform(const double&, const double&) method
968 
973 
974 void PerceptronLayer::randomize_parameters_uniform(const double& minimum, const double& maximum)
975 {
976  const size_t parameters_number = count_parameters_number();
977 
978  Vector<double> parameters(parameters_number);
979 
980  parameters.randomize_uniform(minimum, maximum);
981 
982  set_parameters(parameters);
983 }
984 
985 
986 // void randomize_parameters_uniform(const Vector<double>&, const Vector<double>&) method
987 
992 
994 {
995  const size_t parameters_number = count_parameters_number();
996 
997  Vector<double> parameters(parameters_number);
998 
999  parameters.randomize_uniform(minimum, maximum);
1000 
1001  set_parameters(parameters);
1002 }
1003 
1004 
1005 // void randomize_parameters_uniform(const Vector< Vector<double> >&) method
1006 
1013 
1015 {
1016  const size_t parameters_number = count_parameters_number();
1017 
1018  Vector<double> parameters(parameters_number);
1019 
1020  parameters.randomize_uniform(minimum_maximum[0], minimum_maximum[1]);
1021 
1022  set_parameters(parameters);
1023 }
1024 
1025 
1026 // void randomize_parameters_normal(void) method
1027 
1030 
1032 {
1033  const size_t parameters_number = count_parameters_number();
1034 
1035  Vector<double> parameters(parameters_number);
1036 
1037  parameters.randomize_normal();
1038 
1039  set_parameters(parameters);
1040 }
1041 
1042 
1043 // void randomize_parameters_normal(const double&, const double&) method
1044 
1049 
1050 void PerceptronLayer::randomize_parameters_normal(const double& mean, const double& standard_deviation)
1051 {
1052  const size_t parameters_number = count_parameters_number();
1053 
1054  Vector<double> parameters(parameters_number);
1055 
1056  parameters.randomize_normal(mean, standard_deviation);
1057 
1058  set_parameters(parameters);
1059 }
1060 
1061 
1062 // void randomize_parameters_normal(const Vector<double>&, const Vector<double>&) method
1063 
1068 
1070 {
1071  const size_t parameters_number = count_parameters_number();
1072 
1073  Vector<double> parameters(parameters_number);
1074 
1075  parameters.randomize_normal(mean, standard_deviation);
1076 
1077  set_parameters(parameters);
1078 }
1079 
1080 
1081 // void randomize_parameters_normal(const Vector< Vector<double> >&) method
1082 
1089 
1091 {
1092  const size_t parameters_number = count_parameters_number();
1093 
1094  Vector<double> parameters(parameters_number);
1095 
1096  parameters.randomize_normal(mean_standard_deviation[0], mean_standard_deviation[1]);
1097 
1098  set_parameters(parameters);
1099 }
1100 
1101 
1102 // double calculate_parameters_norm(void) const method
1103 
1105 
1107 {
1108  return(arrange_parameters().calculate_norm());
1109 }
1110 
1111 
1112 // Vector<double> calculate_combinations(const Vector<double>&) const method
1113 
1116 
1118 {
1119  // Control sentence (if debug)
1120 
1121  #ifndef NDEBUG
1122 
1123  const size_t inputs_size = inputs.size();
1124 
1125  const size_t inputs_number = get_inputs_number();
1126 
1127  if(inputs_size != inputs_number)
1128  {
1129  std::ostringstream buffer;
1130 
1131  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1132  << "Vector<double> calculate_combinations(const Vector<double>&) const method.\n"
1133  << "Size of inputs to layer must be equal to number of layer inputs.\n";
1134 
1135  throw std::logic_error(buffer.str());
1136  }
1137 
1138  #endif
1139 
1140  const size_t perceptrons_number = get_perceptrons_number();
1141 
1142  // Calculate combination to layer
1143 
1144  Vector<double> combination(perceptrons_number);
1145 
1146  for(size_t i = 0; i < perceptrons_number; i++)
1147  {
1148  combination[i] = perceptrons[i].calculate_combination(inputs);
1149  }
1150 
1151  return(combination);
1152 }
1153 
1154 
1155 // Matrix<double> calculate_combinations_Jacobian(const Vector<double>&) const method
1156 
1159 
1161 {
1162  return(arrange_synaptic_weights());
1163 }
1164 
1165 
1166 // Vector< Matrix<double> > calculate_combinations_Hessian_form(const Vector<double>&) const method
1167 
1171 
1173 {
1174  const size_t inputs_number = get_inputs_number();
1175  const size_t perceptrons_number = get_perceptrons_number();
1176 
1177  Vector< Matrix<double> > combination_Hessian_form(perceptrons_number);
1178 
1179  for(size_t i = 0; i < perceptrons_number; i++)
1180  {
1181  combination_Hessian_form[i].set(inputs_number, inputs_number, 0.0);
1182  }
1183 
1184  return(combination_Hessian_form);
1185 }
1186 
1187 
1188 // Vector<double> calculate_combinations(const Vector<double>&, const Vector<double>&) const method
1189 
1193 
1195 {
1196  // Control sentence (if debug)
1197 
1198  #ifndef NDEBUG
1199 
1200  const size_t inputs_size = inputs.size();
1201  const size_t inputs_number = get_inputs_number();
1202 
1203  if(inputs_size != inputs_number)
1204  {
1205  std::ostringstream buffer;
1206 
1207  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1208  << "Vector<double> calculate_combination_parameters(const Vector<double>&, const Vector<double>&) const method.\n"
1209  << "Size of layer inputs (" << inputs_size << ") must be equal to number of layer inputs (" << inputs_number << ").\n";
1210 
1211  throw std::logic_error(buffer.str());
1212  }
1213 
1214  const size_t parameters_size = parameters.size();
1215 
1216  const size_t parameters_number = count_parameters_number();
1217 
1218  if(parameters_size != parameters_number)
1219  {
1220  std::ostringstream buffer;
1221 
1222  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1223  << "Vector<double> calculate_combination_parameters(const Vector<double>&, const Vector<double>&) const method.\n"
1224  << "Size of layer parameters (" << parameters_size << ") must be equal to number of lasyer parameters (" << parameters_number << ").\n";
1225 
1226  throw std::logic_error(buffer.str());
1227  }
1228 
1229  #endif
1230 
1231  const size_t perceptrons_number = get_perceptrons_number();
1232 
1233  const size_t perceptron_parameters_number = count_perceptron_parameters_number();
1234 
1235  // Calculate combination to layer
1236 
1237  Vector<double> combinations(perceptrons_number);
1238 
1239  Vector<double> perceptron_parameters(perceptron_parameters_number);
1240 
1241  for(size_t i = 0; i < perceptrons_number; i++)
1242  {
1243  perceptron_parameters = parameters.take_out(i*perceptron_parameters_number, perceptron_parameters_number);
1244 
1245  combinations[i] = perceptrons[i].calculate_combination(inputs, perceptron_parameters);
1246  }
1247 
1248  return(combinations);
1249 }
1250 
1251 
1252 // Matrix<double> calculate_combination_parameters_Jacobian(const Vector<double>&) const method
1253 
1257 
1259 {
1260  const size_t perceptrons_number = get_perceptrons_number();
1261  const size_t parameters_number = count_parameters_number();
1262  const size_t inputs_number = get_inputs_number();
1263 
1264  Matrix<double> combinations_Jacobian(perceptrons_number, parameters_number, 0.0);
1265 
1266  size_t column_index;
1267 
1268  for(size_t i = 0; i < perceptrons_number; i++)
1269  {
1270  // Bias derivative
1271 
1272  column_index = (1 + inputs_number)*i;
1273  combinations_Jacobian(i,column_index) = 1.0;
1274 
1275  // Synaptic weight derivatives
1276 
1277  for(size_t j = 0; j < inputs_number; j++)
1278  {
1279  column_index = 1 + (1 + inputs_number)*i + j;
1280  combinations_Jacobian(i,column_index) = inputs[j];
1281  }
1282  }
1283 
1284  return(combinations_Jacobian);
1285 }
1286 
1287 
1288 // Vector< Matrix<double> > calculate_combination_parameters_Hessian_form(const Vector<double>&) const method
1289 
1294 
1296 {
1297  const size_t perceptrons_number = get_perceptrons_number();
1298 
1299  Vector< Matrix<double> > combination_parameters_Hessian_form(perceptrons_number);
1300 
1301  const size_t parameters_number = count_parameters_number();
1302 
1303  for(size_t i = 0; i < perceptrons_number; i++)
1304  {
1305  combination_parameters_Hessian_form[i].set(parameters_number, parameters_number, 0.0);
1306  }
1307 
1308  return(combination_parameters_Hessian_form);
1309 }
1310 
1311 
1312 // Vector<double> calculate_activations(const Vector<double>&) const method
1313 
1316 
1318 {
1319  const size_t perceptrons_number = get_perceptrons_number();
1320 
1321  // Control sentence (if debug)
1322 
1323  #ifndef NDEBUG
1324 
1325  const size_t combination_size = combinations.size();
1326 
1327  if(combination_size != perceptrons_number)
1328  {
1329  std::ostringstream buffer;
1330 
1331  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1332  << "Vector<double> calculate_activation(const Vector<double>&) const method.\n"
1333  << "Combination size must be equal to number of neurons.\n";
1334 
1335  throw std::logic_error(buffer.str());
1336  }
1337 
1338  #endif
1339 
1340  // Calculate activation from layer
1341 
1342  Vector<double> activations(perceptrons_number);
1343 
1344  for(size_t i = 0; i < perceptrons_number; i++)
1345  {
1346  activations[i] = perceptrons[i].calculate_activation(combinations[i]);
1347  }
1348 
1349  return(activations);
1350 }
1351 
1352 
1353 // Vector<double> calculate_activations_derivatives(const Vector<double>&) const method
1354 
1357 
1359 {
1360  const size_t perceptrons_number = get_perceptrons_number();
1361 
1362  // Control sentence (if debug)
1363 
1364  #ifndef NDEBUG
1365 
1366  const size_t combination_size = combination.size();
1367 
1368  if(combination_size != perceptrons_number)
1369  {
1370  std::ostringstream buffer;
1371 
1372  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1373  << "Vector<double> calculate_activations_derivatives(const Vector<double>&) const method.\n"
1374  << "Size of combination must be equal to number of neurons.\n";
1375 
1376  throw std::logic_error(buffer.str());
1377  }
1378 
1379  #endif
1380 
1381  // Calculate activation derivative from layer
1382 
1383  Vector<double> activation_derivatives(perceptrons_number);
1384 
1385  for(size_t i = 0; i < perceptrons_number; i++)
1386  {
1387  activation_derivatives[i] = perceptrons[i].calculate_activation_derivative(combination[i]);
1388  }
1389 
1390  return(activation_derivatives);
1391 }
1392 
1393 
1394 // Vector<double> calculate_activations_second_derivatives(const Vector<double>&) const method
1395 
1398 
1400 {
1401  const size_t perceptrons_number = get_perceptrons_number();
1402 
1403  // Control sentence (if debug)
1404 
1405  #ifndef NDEBUG
1406 
1407  const size_t combination_size = combination.size();
1408 
1409  if(combination_size != perceptrons_number)
1410  {
1411  std::ostringstream buffer;
1412 
1413  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1414  << "Vector<double> calculate_activations_second_derivatives(const Vector<double>&) const method.\n"
1415  << "Size of combinations must be equal to number of neurons.\n";
1416 
1417  throw std::logic_error(buffer.str());
1418  }
1419 
1420  #endif
1421 
1422  // Calculate activation second derivative from layer
1423 
1424  Vector<double> activation_second_derivatives(perceptrons_number);
1425 
1426  for(size_t i = 0; i < perceptrons_number; i++)
1427  {
1428  activation_second_derivatives[i] = perceptrons[i].calculate_activation_second_derivative(combination[i]);
1429  }
1430 
1431  return(activation_second_derivatives);
1432 }
1433 
1434 
1435 // Matrix<double> arrange_activations_Jacobian(const Vector<double>&) const method
1436 
1439 
1441 {
1442  const size_t perceptrons_number = get_perceptrons_number();
1443 
1444  Matrix<double> activation_Jacobian(perceptrons_number, perceptrons_number, 0.0);
1445 
1446  activation_Jacobian.set_diagonal(activation_derivative);
1447 
1448  return(activation_Jacobian);
1449 }
1450 
1451 
1452 // Vector< Matrix<double> > arrange_activations_Hessian_form(const Vector<double>&) const method
1453 
1456 
1458 {
1459  const size_t perceptrons_number = get_perceptrons_number();
1460 
1461  Vector< Matrix<double> > activation_Hessian_form(perceptrons_number);
1462 
1463  for(size_t i = 0; i < perceptrons_number; i++)
1464  {
1465  activation_Hessian_form[i].set(perceptrons_number, perceptrons_number, 0.0);
1466  activation_Hessian_form[i](i,i) = activation_second_derivative[i];
1467  }
1468 
1469  return(activation_Hessian_form);
1470 }
1471 
1472 
1473 // Vector<double> calculate_outputs(const Vector<double>&) const method
1474 
1477 
1479 {
1480  // Control sentence (if debug)
1481 
1482  #ifndef NDEBUG
1483 
1484  const size_t inputs_size = inputs.size();
1485 
1486  const size_t inputs_number = get_inputs_number();
1487 
1488  if(inputs_size != inputs_number)
1489  {
1490  std::ostringstream buffer;
1491 
1492  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1493  << "Vector<double> calculate_outputs(const Vector<double>&) const method.\n"
1494  << "Size of inputs must be equal to number of inputs to layer.\n";
1495 
1496  throw std::logic_error(buffer.str());
1497  }
1498 
1499  #endif
1500 
1502 }
1503 
1504 
1505 // Matrix<double> calculate_Jacobian(const Vector<double>&) const method
1506 
1512 
1514 {
1515 
1516  // Control sentence (if debug)
1517 
1518  #ifndef NDEBUG
1519 
1520  const size_t inputs_number = get_inputs_number();
1521 
1522  const size_t inputs_size = inputs.size();
1523 
1524  if(inputs_size != inputs_number)
1525  {
1526  std::ostringstream buffer;
1527 
1528  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1529  << "Matrix<double> calculate_Jacobian(const Vector<double>&) const method.\n"
1530  << "Size of inputs must be equal to number of inputs to layer.\n";
1531 
1532  throw std::logic_error(buffer.str());
1533  }
1534 
1535  #endif
1536 
1537  const Vector<double> combinations = calculate_combinations(inputs);
1538 
1539  const Vector<double> activations_derivatives = calculate_activations_derivatives(combinations);
1540 
1541  const Matrix<double> synaptic_weights = arrange_synaptic_weights();
1542 
1543  return(activations_derivatives*synaptic_weights);
1544 }
1545 
1546 
1547 // Vector< Matrix<double> > calculate_Hessian_form(const Vector<double>&) const method
1548 
1551 
1553 {
1554  const size_t perceptrons_number = get_perceptrons_number();
1555 
1556  const Matrix<double> synaptic_weights = arrange_synaptic_weights();
1557 
1558  const Vector<double> combination = calculate_combinations(inputs);
1559 
1560  const Vector<double> activations_second_derivatives = calculate_activations_second_derivatives(combination);
1561 
1562  Vector< Matrix<double> > activation_Hessian_form(perceptrons_number);
1563 
1564  Vector< Matrix<double> > Hessian_form(perceptrons_number);
1565 
1566  for(size_t i = 0; i < perceptrons_number; i++)
1567  {
1568  activation_Hessian_form[i].set(perceptrons_number, perceptrons_number, 0.0);
1569  activation_Hessian_form[i](i,i) = activations_second_derivatives[i];
1570 
1571  Hessian_form[i] = synaptic_weights.calculate_transpose().dot(activation_Hessian_form[i]).dot(synaptic_weights);
1572  }
1573 
1574  return(Hessian_form);
1575 }
1576 
1577 
1578 // Vector<double> calculate_outputs(const Vector<double>&, const Vector<double>&) const method
1579 
1583 
1585 {
1586  // Control sentence (if debug)
1587 
1588  #ifndef NDEBUG
1589 
1590  const size_t inputs_size = inputs.size();
1591 
1592  const size_t inputs_number = get_inputs_number();
1593 
1594  if(inputs_size != inputs_number)
1595  {
1596  std::ostringstream buffer;
1597 
1598  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1599  << "Vector<double> calculate_outputs(const Vector<double>&, const Vector<double>&) const method.\n"
1600  << "Size of layer inputs (" << inputs_size << ") must be equal to number of layer inputs (" << inputs_number << ").\n";
1601 
1602  throw std::logic_error(buffer.str());
1603  }
1604 
1605  const size_t parameters_size = parameters.size();
1606 
1607  const size_t parameters_number = count_parameters_number();
1608 
1609  if(parameters_size != parameters_number)
1610  {
1611  std::ostringstream buffer;
1612 
1613  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1614  << "Vector<double> calculate_outputs(const Vector<double>&, const Vector<double>&) const method.\n"
1615  << "Size of parameters (" << parameters_size << ") must be equal to number of parameters (" << parameters_number << ").\n";
1616 
1617  throw std::logic_error(buffer.str());
1618  }
1619 
1620  #endif
1621 
1622  return(calculate_activations(calculate_combinations(inputs, parameters)));
1623 }
1624 
1625 
1626 // Matrix<double> calculate_Jacobian(const Vector<double>&, const Vector<double>&) const method
1627 
1634 
1636 {
1637  // Control sentence (if debug)
1638 
1639  #ifndef NDEBUG
1640 
1641  const size_t inputs_number = get_inputs_number();
1642  const size_t inputs_size = inputs.size();
1643 
1644  if(inputs_size != inputs_number)
1645  {
1646  std::ostringstream buffer;
1647 
1648  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1649  << "Matrix<double> calculate_parameters_Jacobian(const Vector<double>&, const Vector<double>&) const method.\n"
1650  << "Size of inputs must be equal to number of inputs.\n";
1651 
1652  throw std::logic_error(buffer.str());
1653  }
1654 
1655  #endif
1656 
1657  const Vector<double> combinations = calculate_combinations(inputs, parameters);
1658 
1659  const Matrix<double> combinations_Jacobian = calculate_combinations_Jacobian(inputs, parameters);
1660 
1661  const Vector<double> activation_derivatives = calculate_activations_derivatives(combinations);
1662 
1663  const Matrix<double> activation_Jacobian = arrange_activations_Jacobian(activation_derivatives);
1664 
1665  return(activation_Jacobian.dot(combinations_Jacobian));
1666 }
1667 
1668 
1669 // Vector< Matrix<double> > calculate_Hessian_form(const Vector<double>&, const Vector<double>&) const method
1670 
1676 
1678 {
1679  // Control sentence (if debug)
1680 
1681  #ifndef NDEBUG
1682 
1683  const size_t inputs_number = get_inputs_number();
1684  const size_t inputs_size = inputs.size();
1685 
1686  if(inputs_size != inputs_number)
1687  {
1688  std::ostringstream buffer;
1689 
1690  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1691  << "Vector< Matrix<double> > calculate_Hessian_form(const Vector<double>&, const Vector<double>&) const method.\n"
1692  << "Size must be equal to number of inputs of layer.\n";
1693 
1694  throw std::logic_error(buffer.str());
1695  }
1696 
1697  #endif
1698 
1699  const size_t perceptrons_number = get_perceptrons_number();
1700 
1701  const Vector<double> combination = calculate_combinations(inputs);
1702 
1703  const Matrix<double> combination_parameters_Jacobian = calculate_combinations_Jacobian(inputs, parameters);
1704 
1705  const Vector<double> activation_second_derivatives = calculate_activations_second_derivatives(combination);
1706 
1707  const Vector< Matrix<double> > activation_Hessian_form = arrange_activations_Hessian_form(activation_second_derivatives);
1708 
1709  // Calculate parameters Hessian form
1710 
1711  Vector< Matrix<double> > parameters_Hessian_form(perceptrons_number);
1712 
1713  for(size_t i = 0; i < perceptrons_number; i++)
1714  {
1715  parameters_Hessian_form[i] = combination_parameters_Jacobian.calculate_transpose().dot(activation_Hessian_form[i]).dot(combination_parameters_Jacobian);
1716  }
1717 
1718  return(parameters_Hessian_form);
1719 }
1720 
1721 
1722 // std::string write_expression(const Vector<std::string>&, const Vector<std::string>&) const method
1723 
1727 
1728 std::string PerceptronLayer::write_expression(const Vector<std::string>& inputs_name, const Vector<std::string>& outputs_name) const
1729 {
1730  const size_t perceptrons_number = get_perceptrons_number();
1731 
1732  // Control sentence (if debug)
1733 
1734  #ifndef NDEBUG
1735 
1736  const size_t inputs_number = get_inputs_number();
1737  const size_t inputs_name_size = inputs_name.size();
1738 
1739  if(inputs_name_size != inputs_number)
1740  {
1741  std::ostringstream buffer;
1742 
1743  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1744  << "std::string write_expression(const Vector<std::string>&, const Vector<std::string>&) const method.\n"
1745  << "Size of inputs name must be equal to number of layer inputs.\n";
1746 
1747  throw std::logic_error(buffer.str());
1748  }
1749 
1750  const size_t outputs_name_size = outputs_name.size();
1751 
1752  if(outputs_name_size != perceptrons_number)
1753  {
1754  std::ostringstream buffer;
1755 
1756  buffer << "OpenNN Exception: PerceptronLayer class.\n"
1757  << "std::string write_expression(const Vector<std::string>&, const Vector<std::string>&) const method.\n"
1758  << "Size of outputs name must be equal to number of perceptrons.\n";
1759 
1760  throw std::logic_error(buffer.str());
1761  }
1762 
1763  #endif
1764 
1765  std::ostringstream buffer;
1766 
1767  for(size_t i = 0; i < perceptrons_number; i++)
1768  {
1769  buffer << perceptrons[i].write_expression(inputs_name, outputs_name[i]);
1770  }
1771 
1772  return(buffer.str());
1773 }
1774 
1775 }
1776 
1777 // OpenNN: Open Neural Networks Library.
1778 // Copyright (c) 2005-2015 Roberto Lopez.
1779 //
1780 // This library is free software; you can redistribute it and/or
1781 // modify it under the terms of the GNU Lesser General Public
1782 // License as published by the Free Software Foundation; either
1783 // version 2.1 of the License, or any later version.
1784 //
1785 // This library is distributed in the hope that it will be useful,
1786 // but WITHOUT ANY WARRANTY; without even the implied warranty of
1787 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1788 // Lesser General Public License for more details.
1789 
1790 // You should have received a copy of the GNU Lesser General Public
1791 // License along with this library; if not, write to the Free Software
1792 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
void set_synaptic_weights(const Matrix< double > &)
void randomize_uniform(const double &=-1.0, const double &=1.0)
Definition: vector.h:781
const bool & get_display(void) const
void set_inputs_number(const size_t &)
Vector< double > arrange_parameters(void) const
Vector< T > take_out(const size_t &, const size_t &) const
Definition: vector.h:4928
Vector< double > calculate_activations_derivatives(const Vector< double > &) const
void prune_perceptron(const size_t &)
PerceptronLayer & operator=(const PerceptronLayer &)
void set_biases(const Vector< double > &)
std::string write_activation_function(void) const
const Vector< Perceptron > & get_perceptrons(void) const
Returns a constant reference to the vector of perceptrons defining the layer.
size_t count_parameters_number(void) const
Returns the number of parameters (biases and synaptic weights) of the layer.
Matrix< double > calculate_combinations_Jacobian(const Vector< double > &) const
Vector< double > calculate_combinations(const Vector< double > &) const
size_t get_inputs_number(void) const
Returns the number of inputs to the layer.
void set(void)
Sets the size of a vector to zero.
Definition: vector.h:656
void set_perceptron(const size_t &, const Perceptron &)
ActivationFunction
Enumeration of available activation functions for the perceptron neuron model.
Definition: perceptron.h:72
void set_diagonal(const T &)
Definition: matrix.h:1858
Vector< Matrix< double > > calculate_Hessian_form(const Vector< double > &) const
bool display
Display messages to screen.
const Perceptron::ActivationFunction & get_activation_function(void) const
Vector< Vector< double > > arrange_perceptrons_parameters(void) const
void set_display(const bool &)
const size_t & get_columns_number(void) const
Returns the number of columns in the matrix.
Definition: matrix.h:1090
Vector< double > calculate_outputs(const Vector< double > &) const
void initialize_synaptic_weights(const double &)
Vector< double > calculate_activations_second_derivatives(const Vector< double > &) const
void grow_inputs(const size_t &)
const Perceptron & get_perceptron(const size_t &) const
Vector< size_t > count_cumulative_parameters_number(void) const
void set_parameters(const Vector< double > &)
Vector< Matrix< double > > calculate_combinations_Hessian_form(const Vector< double > &) const
void tuck_in(const size_t &, const Vector< T > &)
Definition: vector.h:4891
void randomize_normal(const double &=0.0, const double &=1.0)
Definition: vector.h:867
double dot(const Vector< double > &) const
Definition: vector.h:3654
Matrix< double > arrange_activations_Jacobian(const Vector< double > &) const
const size_t & get_rows_number(void) const
Returns the number of rows in the matrix.
Definition: matrix.h:1079
Matrix< T > calculate_transpose(void) const
Returns the transpose of the matrix.
Definition: matrix.h:4866
Vector< Matrix< double > > arrange_activations_Hessian_form(const Vector< double > &) const
void initialize_parameters(const double &)
Definition: perceptron.cpp:670
void initialize_biases(const double &)
std::string write_expression(const Vector< std::string > &, const Vector< std::string > &) const
double calculate_parameters_norm(void) const
Calculates the norm of a layer parameters vector.
size_t get_perceptrons_number(void) const
Returns the size of the perceptrons vector.
void set_perceptrons_number(const size_t &)
Vector< double > dot(const Vector< double > &) const
Definition: matrix.h:5772
void prune_input(const size_t &)
bool operator==(const PerceptronLayer &) const
void grow_perceptrons(const size_t &)
bool is_empty(void) const
Returns true if the size of the layer is zero, and false otherwise.
size_t count_perceptron_parameters_number(void) const
Vector< Perceptron > perceptrons
Matrix< double > calculate_Jacobian(const Vector< double > &) const
void set_perceptrons(const Vector< Perceptron > &)
Vector< double > calculate_activations(const Vector< double > &) const
Matrix< double > arrange_synaptic_weights(void) const
Vector< double > arrange_biases(void) const
void set_activation_function(const Perceptron::ActivationFunction &)
void initialize_parameters(const double &)