OpenNN  2.2
Open Neural Networks Library
perceptron.cpp
1 /****************************************************************************************************************/
2 /* */
3 /* OpenNN: Open Neural Networks Library */
4 /* www.artelnics.com/opennn */
5 /* */
6 /* P E R C E P T R O N C L A S S */
7 /* */
8 /* Roberto Lopez */
9 /* Artelnics - Making intelligent use of data */
11 /* */
12 /****************************************************************************************************************/
13 
14 // OpenNN includes
15 
16 #include "perceptron.h"
17 
18 namespace OpenNN
19 {
20 
25 
27 {
28  set();
29 }
30 
31 
38 
39 Perceptron::Perceptron(const size_t& new_inputs_number)
40 {
41  set(new_inputs_number);
42 }
43 
44 
50 
51 Perceptron::Perceptron(const size_t& new_inputs_number, const double& new_parameters_value)
52 {
53  set(new_inputs_number, new_parameters_value);
54 }
55 
56 
60 
61 Perceptron::Perceptron(const Perceptron& other_perceptron)
62 {
63  set(other_perceptron);
64 }
65 
66 
69 
71 {
72 }
73 
74 
75 // ASSIGNMENT OPERATOR
76 
80 
81 Perceptron& Perceptron::operator=(const Perceptron& other_perceptron)
82 {
83  if(this != &other_perceptron)
84  {
85  bias = other_perceptron.bias;
86 
87  synaptic_weights = other_perceptron.synaptic_weights;
88 
89  activation_function = other_perceptron.activation_function;
90 
91  display = other_perceptron.display;
92  }
93 
94  return(*this);
95 }
96 
97 
98 // EQUAL TO OPERATOR
99 
100 // bool operator == (const Perceptron&) const method
101 
106 
107 bool Perceptron::operator == (const Perceptron& other_perceptron) const
108 {
109  if(bias == other_perceptron.bias
110  && synaptic_weights == other_perceptron.synaptic_weights
111  && activation_function == other_perceptron.activation_function
112  && display == other_perceptron.display)
113  {
114  return(true);
115  }
116  else
117  {
118  return(false);
119  }
120 }
121 
122 
123 // METHODS
124 
125 // const ActivationFunction& get_activation_function(void) const method
126 
128 
130 {
131  return(activation_function);
132 }
133 
134 
135 // std::string write_activation_function(void) const method
136 
138 
140 {
141  switch(activation_function)
142  {
143  case Perceptron::Logistic:
144  {
145  return("logistic");
146  }
147  break;
148 
149  case Perceptron::HyperbolicTangent:
150  {
151  return("tanh");
152  }
153  break;
154 
155  case Perceptron::Threshold:
156  {
157  return("threshold");
158  }
159  break;
160 
161  case Perceptron::SymmetricThreshold:
162  {
163  return("symmetric_threshold");
164  }
165  break;
166 
167  case Perceptron::Linear:
168  {
169  return("");
170  }
171  break;
172 
173  default:
174  {
175  std::ostringstream buffer;
176 
177  buffer << "OpenNN Exception: Perceptron class.\n"
178  << "std::string get_activation_function(void) const method.\n"
179  << "Unknown activation function.\n";
180 
181  throw std::logic_error(buffer.str());
182  }
183  break;
184  }
185 }
186 
187 
188 // size_t get_inputs_number(void) const method
189 
191 
193 {
194  return(synaptic_weights.size());
195 }
196 
197 
198 // double get_bias(void) const method
199 
201 
202 const double& Perceptron::get_bias(void) const
203 {
204  return(bias);
205 }
206 
207 
208 // Vector<double>& arrange_synaptic_weights(void)
209 
211 
213 {
214  return(synaptic_weights);
215 }
216 
217 
218 // double get_synaptic_weight(const size_t&) const method
219 
222 
223 const double& Perceptron::get_synaptic_weight(const size_t& synaptic_weight_index) const
224 {
225  // Control sentence (if debug)
226 
227  #ifndef NDEBUG
228 
229  const size_t inputs_number = get_inputs_number();
230 
231  if(synaptic_weight_index >= inputs_number)
232  {
233  std::ostringstream buffer;
234 
235  buffer << "OpenNN Exception: Perceptron class.\n"
236  << "double get_synaptic_weight(const size_t&) const method.\n"
237  << "Index of synaptic weight must be less than number of inputs.\n";
238 
239  throw std::logic_error(buffer.str());
240  }
241 
242  #endif
243 
244  // Get single synaptic weights
245 
246  return(synaptic_weights[synaptic_weight_index]);
247 }
248 
249 
250 // const bool& get_display(void) const method
251 
254 
255 const bool& Perceptron::get_display(void) const
256 {
257  return(display);
258 }
259 
260 
261 // void set(void) method
262 
264 
265 void Perceptron::set(void)
266 {
267  initialize_bias_normal(0.0, 0.2);
268 
270 
271  activation_function = HyperbolicTangent;
272 
273  display = true;
274 }
275 
276 
277 // void set(const size_t&) method
278 
282 
283 void Perceptron::set(const size_t& new_inputs_number)
284 {
285  // Set synaptic weights
286 
287  activation_function = HyperbolicTangent;
288 
289  initialize_bias_normal(0.0, 0.2);
290 
291  synaptic_weights.set(new_inputs_number);
293 
294  display = true;
295 }
296 
297 
298 // void set(const size_t&, const double&) method
299 
303 
304 void Perceptron::set(const size_t& new_inputs_number, const double& new_parameters_value)
305 {
306  bias = new_parameters_value;
307  synaptic_weights.set(new_inputs_number, new_parameters_value);
308 
309  activation_function = HyperbolicTangent;
310 
311  display = true;
312 }
313 
314 
315 // void set(const Perceptron&)
316 
319 
320 void Perceptron::set(const Perceptron& other_perceptron)
321 {
322  bias = other_perceptron.bias;
323 
324  synaptic_weights = other_perceptron.synaptic_weights;
325 
326  activation_function = other_perceptron.activation_function;
327 
328  display = other_perceptron.display;
329 }
330 
331 
332 // void set_activation_function(const ActivationFunction&) method
333 
336 
338 {
339  activation_function = new_activation_function;
340 }
341 
342 
343 // void set_activation_function(const std::string&) method
344 
348 
349 void Perceptron::set_activation_function(const std::string& new_activation_function_name)
350 {
351  if(new_activation_function_name == "Logistic")
352  {
353  activation_function = Logistic;
354  }
355  else if(new_activation_function_name == "HyperbolicTangent")
356  {
357  activation_function = HyperbolicTangent;
358  }
359  else if(new_activation_function_name == "Threshold")
360  {
361  activation_function = Threshold;
362  }
363  else if(new_activation_function_name == "SymmetricThreshold")
364  {
365  activation_function = SymmetricThreshold;
366  }
367  else if(new_activation_function_name == "Linear")
368  {
369  activation_function = Linear;
370  }
371  else
372  {
373  std::ostringstream buffer;
374 
375  buffer << "OpenNN Exception: Perceptron class.\n"
376  << "void set_activation_function(const std::string&) method.\n"
377  << "Unknown activation function: " << new_activation_function_name << ".\n";
378 
379  throw std::logic_error(buffer.str());
380  }
381 }
382 
383 
384 // void set_bias(const double&) method
385 
388 
389 void Perceptron::set_bias(const double& new_bias)
390 {
391  bias = new_bias;
392 }
393 
394 
395 // void set_synaptic_weights(const Vector<double>&) method
396 
399 
400 void Perceptron::set_synaptic_weights(const Vector<double>& new_synaptic_weights)
401 {
402  // Control sentence (if debug)
403 
404  #ifndef NDEBUG
405 
406  const size_t inputs_number = get_inputs_number();
407 
408  if(new_synaptic_weights.size() != inputs_number)
409  {
410  std::ostringstream buffer;
411 
412  buffer << "OpenNN Exception: Perceptron class.\n"
413  << "void set_synaptic_weights(const Vector<double>&) method.\n"
414  << "Size of synaptic weights vector must be equal to number of inputs.\n";
415 
416  throw std::logic_error(buffer.str());
417  }
418 
419  #endif
420 
421  // Set synaptic weights
422 
423  synaptic_weights = new_synaptic_weights;
424 }
425 
426 
427 // void set_synaptic_weight(const size_t&, const double&) method
428 
432 
433 void Perceptron::set_synaptic_weight(const size_t& synaptic_weight_index, const double& new_synaptic_weight)
434 {
435  // Control sentence (if debug)
436 
437  #ifndef NDEBUG
438 
439  const size_t inputs_number = get_inputs_number();
440 
441  if(synaptic_weight_index >= inputs_number)
442  {
443  std::ostringstream buffer;
444 
445  buffer << "OpenNN Exception: Perceptron class.\n"
446  << "void set_synaptic_weight(const size_t&, const double&) method.\n"
447  << "Index of synaptic weight must be less than number of inputs.\n";
448 
449  throw std::logic_error(buffer.str());
450  }
451 
452  #endif
453 
454  // Set single synaptic weight
455 
456  synaptic_weights[synaptic_weight_index] = new_synaptic_weight;
457 }
458 
459 
460 // void set_display(const bool&) method
461 
466 
467 void Perceptron::set_display(const bool& new_display)
468 {
469  display = new_display;
470 }
471 
472 
473 // void set_inputs_number(size_t) method
474 
479 
480 void Perceptron::set_inputs_number(const size_t& new_inputs_number)
481 {
482  initialize_bias_normal(0.0,1.0);
483 
484  synaptic_weights.set(new_inputs_number);
486 }
487 
488 
489 // size_t count_parameters_number(void) const method
490 
492 
494 {
495  const size_t inputs_number = get_inputs_number();
496 
497  return(1 + inputs_number);
498 }
499 
500 
501 // Vector<double> arrange_parameters(void) const method
502 
504 
506 {
507  const size_t parameters_number = count_parameters_number();
508 
509  Vector<double> parameters(parameters_number);
510 
511  parameters[0] = bias;
512 
513  const size_t inputs_number = get_inputs_number();
514 
515  for(size_t i = 0; i < inputs_number; i++)
516  {
517  parameters[(size_t)1+i] = synaptic_weights[i];
518  }
519 
520  return(parameters);
521 }
522 
523 
524 // void set_parameters(const Vector<double>&) method
525 
528 
529 void Perceptron::set_parameters(const Vector<double>& new_parameters)
530 {
531  const size_t inputs_number = get_inputs_number();
532 
533  // Control sentence (if debug)
534 
535  #ifndef NDEBUG
536 
537  const size_t size = new_parameters.size();
538 
539  if(size != 1+inputs_number)
540  {
541  std::ostringstream buffer;
542 
543  buffer << "OpenNN Exception: Perceptron class.\n"
544  << "void set_parameters(const Vector<double>&) method.\n"
545  << "Size must be equal to one plus number of inputs.\n";
546 
547  throw std::logic_error(buffer.str());
548  }
549 
550  #endif
551 
552  bias = new_parameters[0];
553 
554  for(size_t i = 0; i < inputs_number; i++)
555  {
556  synaptic_weights[i] = new_parameters[i+1];
557  }
558 }
559 
560 
561 // void initialize_bias(const double&) method
562 
565 
566 void Perceptron::initialize_bias(const double& value)
567 {
568  bias = value;
569 }
570 
571 
572 // void initialize_bias_uniform(const double&, const double&) method
573 
577 
578 void Perceptron::initialize_bias_uniform(const double& minimum, const double& maximum)
579 {
580  // Control sentence (if debug)
581 
582  #ifndef NDEBUG
583 
584  if(minimum > maximum)
585  {
586  std::ostringstream buffer;
587 
588  buffer << "OpenNN Exception: Perceptron class.\n"
589  << "initialize_bias_uniform(const double&, const double&) method.\n"
590  << "Minimum value must be less than maximum value.\n";
591 
592  throw std::logic_error(buffer.str());
593  }
594 
595  #endif
596 
597  bias = calculate_random_uniform(minimum, maximum);
598 }
599 
600 
601 // void initialize_synaptic_weights(const double&) method
602 
605 
607 {
609 }
610 
611 
612 // void initialize_synaptic_weights_uniform(const double&, const double&) method
613 
617 
618 void Perceptron::initialize_synaptic_weights_uniform(const double& minimum, const double& maximum)
619 {
620  synaptic_weights.randomize_uniform(minimum, maximum);
621 }
622 
623 
624 // void initialize_bias_normal(const double&, const double&) method
625 
629 
630 void Perceptron::initialize_bias_normal(const double& mean, const double& standard_deviation)
631 {
632  // Control sentence (if debug)
633 
634  #ifndef NDEBUG
635 
636  if(standard_deviation < 0.0)
637  {
638  std::ostringstream buffer;
639 
640  buffer << "OpenNN Exception: Perceptron class.\n"
641  << "initialize_bias_normal(const double&, const double&) method.\n"
642  << "Standard deviation must be equal or greater than zero.\n";
643 
644  throw std::logic_error(buffer.str());
645  }
646 
647  #endif
648 
649  bias = calculate_random_normal(mean, standard_deviation);
650 }
651 
652 
653 // void initialize_synaptic_weights_normal(const double&, const double&) method
654 
658 
659 void Perceptron::initialize_synaptic_weights_normal(const double& mean, const double& standard_deviation)
660 {
661  synaptic_weights.randomize_normal(mean, standard_deviation);
662 }
663 
664 
665 // void initialize_parameters(const double&) method
666 
669 
670 void Perceptron::initialize_parameters(const double& value)
671 {
672  bias = value;
674 }
675 
676 
677 // double calculate_combination(const Vector<double>&) method
678 
682 
684 {
685  const size_t inputs_number = get_inputs_number();
686 
687  // Control sentence (if debug)
688 
689  #ifndef NDEBUG
690 
691  if(inputs_number == 0)
692  {
693  std::ostringstream buffer;
694 
695  buffer << "OpenNN Exception: Perceptron class.\n"
696  << "calculate_combination(const Vector<double>&) method.\n"
697  << "Number of inputs must be greater than zero.\n";
698 
699  throw std::logic_error(buffer.str());
700  }
701 
702  const size_t inputs_size = inputs.size();
703 
704  if(inputs_size != inputs_number)
705  {
706  std::ostringstream buffer;
707 
708  buffer << "OpenNN Exception: Perceptron class.\n"
709  << "double calculate_combination(const Vector<double>&) method.\n"
710  << "Size of inputs (" << inputs_size << ") must be equal to number of inputs (" << inputs_number << ").\n";
711 
712  throw std::logic_error(buffer.str());
713  }
714 
715  #endif
716 
717  // Calculate combination
718 
719  double combination = bias;
720 
721  for(size_t i = 0; i < inputs_number; i++)
722  {
723  combination += synaptic_weights[i]*inputs[i];
724  }
725 
726  return(combination);
727 }
728 
729 
730 // double calculate_combination(const Vector<double>&, const Vector<double>&) const method
731 
735 
736 double Perceptron::calculate_combination(const Vector<double>& inputs, const Vector<double>& parameters) const
737 {
738  const size_t inputs_number = get_inputs_number();
739 
740  // Control sentence (if debug)
741 
742  #ifndef NDEBUG
743 
744  std::ostringstream buffer;
745 
746  const size_t inputs_size = inputs.size();
747 
748  if(inputs_size != inputs_number)
749  {
750  buffer << "OpenNN Exception: Perceptron class.\n"
751  << "double calculate_combination(const Vector<double>&, const Vector<double>&) const method.\n"
752  << "Size of inputs must be equal to number of inputs.\n";
753 
754  throw std::logic_error(buffer.str());
755  }
756 
757  const size_t parameters_size = parameters.size();
758 
759  const size_t parameters_number = count_parameters_number();
760 
761  if(parameters_size != parameters_number)
762  {
763  buffer << "OpenNN Exception: Perceptron class.\n"
764  << "double calculate_combination(const Vector<double>&, const Vector<double>&) const method.\n"
765  << "Size of potential parameters (" << parameters_size << ") must be equal to number of parameters (" << parameters_number << ").\n";
766 
767  throw std::logic_error(buffer.str());
768  }
769 
770  #endif
771 
772  // Modified for performance.
773 
774  double combination = parameters[0];
775 
776  for(size_t i = 0; i < inputs_number; i++)
777  {
778  combination += parameters[i+1]*inputs[i];
779  }
780 
781  return(combination);
782 }
783 
784 
785 // double calculate_activation(const double&) const method
786 
790 
791 double Perceptron::calculate_activation(const double& combination) const
792 {
793  switch(activation_function)
794  {
795  case Perceptron::Logistic:
796  {
797  return(1.0/(1.0 + exp(-combination)));
798  }
799  break;
800 
801  case Perceptron::HyperbolicTangent:
802  {
803  return(1.0-2.0/(exp(2.0*combination)+1.0));
804  }
805  break;
806 
807  case Perceptron::Threshold:
808  {
809  if(combination < 0)
810  {
811  return(0.0);
812  }
813  else
814  {
815  return(1.0);
816  }
817  }
818  break;
819 
820  case Perceptron::SymmetricThreshold:
821  {
822  if(combination < 0)
823  {
824  return(-1.0);
825  }
826  else
827  {
828  return(1.0);
829  }
830  }
831  break;
832 
833  case Perceptron::Linear:
834  {
835  return(combination);
836  }
837  break;
838 
839  default:
840  {
841  std::ostringstream buffer;
842 
843  buffer << "OpenNN Exception: Perceptron class.\n"
844  << "double calculate_activation(const double&) const method.\n"
845  << "Unknown activation function.\n";
846 
847  throw std::logic_error(buffer.str());
848  }
849  break;
850  }
851 }
852 
853 
854 // double calculate_activation_derivative(const double&) const method
855 
859 
860 double Perceptron::calculate_activation_derivative(const double& combination) const
861 {
862  switch(activation_function)
863  {
864  case Perceptron::Logistic:
865  {
866  const double exponent = exp(-combination);
867 
868  return(exponent/((1.0+exponent)*(1.0+exponent)));
869  }
870  break;
871 
872  case Perceptron::HyperbolicTangent:
873  {
874  const double tanh_combination = tanh(combination);
875 
876  return(1.0 - tanh_combination*tanh_combination);
877  }
878  break;
879 
880  case Perceptron::Threshold:
881  {
882  if(combination != 0.0)
883  {
884  return(0.0);
885  }
886  else
887  {
888  std::ostringstream buffer;
889 
890  buffer << "OpenNN Exception: Perceptron class.\n"
891  << "double calculate_activation_derivative(const double&) const method.\n"
892  << "Threshold activation function is not derivable.\n";
893 
894  throw std::logic_error(buffer.str());
895  }
896  }
897  break;
898 
899  case Perceptron::SymmetricThreshold:
900  {
901  if(combination != 0.0)
902  {
903  return(0.0);
904  }
905  else
906  {
907  std::ostringstream buffer;
908 
909  buffer << "OpenNN Exception: Perceptron class.\n"
910  << "double calculate_activation_derivative(const double&) const method.\n"
911  << "Symmetric threshold activation function is not derivable.\n";
912 
913  throw std::logic_error(buffer.str());
914  }
915  }
916  break;
917 
918  case Perceptron::Linear:
919  {
920  return(1.0);
921  }
922  break;
923 
924  default:
925  {
926  std::ostringstream buffer;
927 
928  buffer << "OpenNN Exception: Perceptron class.\n"
929  << "double calculate_activation_derivative(const double&) const method.\n"
930  << "Unknown activation function.\n";
931 
932  throw std::logic_error(buffer.str());
933  }
934  break;
935  }
936 }
937 
938 
939 // double calculate_activation_second_derivative(const double&) const method
940 
944 
945 double Perceptron::calculate_activation_second_derivative(const double& combination) const
946 {
947  switch(activation_function)
948  {
949  case Perceptron::Logistic:
950  {
951  const double exponent = exp(combination);
952 
953  return(-exponent*(exponent-1.0)/((exponent+1.0)*(exponent+1.0)*(exponent+1.0)));
954  }
955  break;
956 
957  case Perceptron::HyperbolicTangent:
958  {
959  return(-2.0*tanh(combination)*(1.0 - pow(tanh(combination),2)));
960  }
961  break;
962 
963  case Perceptron::Threshold:
964  {
965  if(combination != 0.0)
966  {
967  return(0.0);
968  }
969  else
970  {
971  std::ostringstream buffer;
972 
973  buffer << "OpenNN Exception: Perceptron class.\n"
974  << "double calculate_activation_second_derivative(const double&) const method.\n"
975  << "Threshold activation function is not derivable.\n";
976 
977  throw std::logic_error(buffer.str());
978  }
979  }
980  break;
981 
982  case Perceptron::SymmetricThreshold:
983  {
984  if(combination != 0.0)
985  {
986  return(0.0);
987  }
988  else
989  {
990  std::ostringstream buffer;
991 
992  buffer << "OpenNN Exception: Perceptron class.\n"
993  << "double calculate_activation_second_derivative(const double&) const method.\n"
994  << "Symmetric threshold activation function is not derivable.\n";
995 
996  throw std::logic_error(buffer.str());
997  }
998  }
999  break;
1000 
1001  case Perceptron::Linear:
1002  {
1003  return(0.0);
1004  }
1005  break;
1006 
1007  default:
1008  {
1009  std::ostringstream buffer;
1010 
1011  buffer << "OpenNN Exception: Perceptron class.\n"
1012  << "double calculate_activation_second_derivative(const double&) const method.\n"
1013  << "Unknown activation function.\n";
1014 
1015  throw std::logic_error(buffer.str());
1016  }
1017  break;
1018  }
1019 }
1020 
1021 
1022 // double calculate_output(const Vector<double>&) const method
1023 
1027 
1029 {
1030  // Control sentence (if debug)
1031 
1032  #ifndef NDEBUG
1033 
1034  const size_t size = inputs.size();
1035  const size_t inputs_number = get_inputs_number();
1036 
1037  if(size != inputs_number)
1038  {
1039  std::ostringstream buffer;
1040 
1041  buffer << "OpenNN Exception: Perceptron class.\n"
1042  << "double calculate_output(const Vector<double>&) const method.\n"
1043  << "Size must be equal to number of inputs.\n";
1044 
1045  throw std::logic_error(buffer.str());
1046  }
1047 
1048  #endif
1049 
1050  // Calculate outputs
1051 
1052  return(calculate_activation(calculate_combination(inputs)));
1053 }
1054 
1055 
1056 // double calculate_output(const Vector<double>&, const Vector<double>&) const method
1057 
1062 
1063 double Perceptron::calculate_output(const Vector<double>& inputs, const Vector<double>& parameters) const
1064 {
1065  // Control sentence (if debug)
1066 
1067  #ifndef NDEBUG
1068 
1069  const size_t inputs_size = inputs.size();
1070  const size_t inputs_number = get_inputs_number();
1071 
1072  if(inputs_size != inputs_number)
1073  {
1074  std::ostringstream buffer;
1075 
1076  buffer << "OpenNN Exception: Perceptron class.\n"
1077  << "double calculate_output(const Vector<double>&, const Vector<double>&) const method.\n"
1078  << "Size of inputs must be equal to number of inputs.\n";
1079 
1080  throw std::logic_error(buffer.str());
1081  }
1082 
1083  const size_t parameters_size = parameters.size();
1084 
1085  const size_t parameters_number = count_parameters_number();
1086 
1087  if(parameters_size != parameters_number)
1088  {
1089  std::ostringstream buffer;
1090 
1091  buffer << "OpenNN Exception: Perceptron class.\n"
1092  << "double calculate_output(const Vector<double>&, const Vector<double>&) const method.\n"
1093  << "Size of potential parameters (" << parameters_size << ") must be equal to number of parameters (" << parameters_number << ").\n";
1094 
1095  throw std::logic_error(buffer.str());
1096  }
1097 
1098  #endif
1099 
1100  return(calculate_activation(calculate_combination(inputs, parameters)));
1101 }
1102 
1103 
1104 // Vector<double> calculate_gradient(const Vector<double>&) const method
1105 
1108 
1110 {
1111  // Control sentence (if debug)
1112 
1113  #ifndef NDEBUG
1114 
1115  const size_t size = inputs.size();
1116  const size_t inputs_number = get_inputs_number();
1117 
1118  if(size != inputs_number)
1119  {
1120  std::ostringstream buffer;
1121 
1122  buffer << "OpenNN Exception: Perceptron class.\n"
1123  << "Vector<double> calculate_gradient(const Vector<double>&) const method.\n"
1124  << "Size must be equal to number of inputs.\n";
1125 
1126  throw std::logic_error(buffer.str());
1127  }
1128 
1129  #endif
1130 
1131  // Calculate gradient
1132 
1133  const double combination = calculate_combination(inputs);
1134 
1135  const double activation_derivative = calculate_activation_derivative(combination);
1136 
1137  return(synaptic_weights*activation_derivative);
1138 }
1139 
1140 
1141 // Vector<double> calculate_gradient(const Vector<double>&, const Vector<double>&) const method
1142 
1147 
1149 {
1150  const size_t inputs_number = get_inputs_number();
1151 
1152  // Control sentence (if debug)
1153 
1154  #ifndef NDEBUG
1155 
1156  const size_t size = inputs.size();
1157 
1158  if(size != inputs_number)
1159  {
1160  std::ostringstream buffer;
1161 
1162  buffer << "OpenNN Exception: Perceptron class.\n"
1163  << "double calculate_gradient(const Vector<double>&, const Vector<double>&) const method.\n"
1164  << "Size must be equal to number of inputs.\n";
1165 
1166  throw std::logic_error(buffer.str());
1167  }
1168 
1169  #endif
1170 
1171  // Calculate parameters gradient
1172 
1173  const double combination = calculate_combination(inputs, parameters);
1174 
1175  const double activation_derivative = calculate_activation_derivative(combination);
1176 
1177  Vector<double> gradient(1+inputs_number);
1178 
1179  // Bias
1180 
1181  gradient[0] = activation_derivative;
1182 
1183  // Synaptic weights
1184 
1185  for(size_t i = 1; i < 1+inputs_number; i++)
1186  {
1187  gradient[i] = inputs[i-1]*activation_derivative;
1188  }
1189 
1190  return(gradient);
1191 }
1192 
1193 
1194 // Vector<double> calculate_combination_gradient(const Vector<double>&) const method
1195 
1197 
1199 {
1200  return(synaptic_weights);
1201 }
1202 
1203 
1204 // Vector<double> calculate_combination_gradient(const Vector<double>&, const Vector<double>&) const method
1205 
1208 
1210 {
1211  const size_t inputs_number = get_inputs_number();
1212 
1213  // Control sentence (if debug)
1214 
1215  #ifndef NDEBUG
1216 
1217  const size_t size = inputs.size();
1218 
1219  if(size != inputs_number)
1220  {
1221  std::ostringstream buffer;
1222 
1223  buffer << "OpenNN Exception: Perceptron class.\n"
1224  << "double calculate_combination_gradient(const Vector<double>&, const Vector<double>&) const method.\n"
1225  << "Size must be equal to number of inputs.\n";
1226 
1227  throw std::logic_error(buffer.str());
1228  }
1229 
1230  #endif
1231 
1232  // Calculate combination gradient
1233 
1234  Vector<double> combination_gradient(1+inputs_number);
1235 
1236  // Bias
1237 
1238  combination_gradient[0] = 1.0;
1239 
1240  // Synaptic weights
1241 
1242  for(size_t i = 1; i < 1+inputs_number; i++)
1243  {
1244  combination_gradient[i] = inputs[i-1];
1245  }
1246 
1247  return(combination_gradient);
1248 }
1249 
1250 
1251 // Matrix<double> calculate_Hessian(const Vector<double>&) const method
1252 
1255 
1257 {
1258  // Control sentence (if debug)
1259 
1260  #ifndef NDEBUG
1261 
1262  const size_t inputs_number = get_inputs_number();
1263  const size_t inputs_size = inputs.size();
1264 
1265  if(inputs_size != inputs_number)
1266  {
1267  std::ostringstream buffer;
1268 
1269  buffer << "OpenNN Exception: Perceptron class.\n"
1270  << "Matrix<double> calculate_Hessian(const Vector<double>&) const method.\n"
1271  << "Size of inputs must be equal to number of inputs.\n";
1272 
1273  throw std::logic_error(buffer.str());
1274  }
1275 
1276  #endif
1277 
1278  const double combination = calculate_combination(inputs);
1279  const double activation_second_derivative = calculate_activation_second_derivative(combination);
1280 
1281  return(synaptic_weights.direct(synaptic_weights)*activation_second_derivative);
1282 }
1283 
1284 
1285 // Matrix<double> calculate_Hessian(const Vector<double>&, const Vector<double>&) const method
1286 
1292 
1294 {
1295  // Control sentence (if debug)
1296 
1297  #ifndef NDEBUG
1298 
1299  const size_t inputs_size = inputs.size();
1300  const size_t inputs_number = get_inputs_number();
1301 
1302  if(inputs_size != inputs_number)
1303  {
1304  std::ostringstream buffer;
1305 
1306  buffer << "OpenNN Exception: Perceptron class.\n"
1307  << "Matrix<double> calculate_Hessian(const Vector<double>&, const Vector<double>&) const method.\n"
1308  << "Size of inputs must be equal to number of inputs.\n";
1309 
1310  throw std::logic_error(buffer.str());
1311  }
1312 
1313  #endif
1314 
1315  const double combination = calculate_combination(inputs, parameters);
1316  const double activation_second_derivative = calculate_activation_second_derivative(combination);
1317 
1318  const size_t parameters_number = count_parameters_number();
1319 
1320  Matrix<double> Hessian(parameters_number, parameters_number);
1321 
1322  // Bias - bias derivative
1323 
1324  Hessian(0,0) = activation_second_derivative;
1325 
1326  // Bias - synaptic weight derivative
1327 
1328  for(size_t i = 1; i < parameters_number; i++)
1329  {
1330  Hessian(0,i) = activation_second_derivative*inputs[i-1];
1331  }
1332 
1333  // Synaptic weight -synaptic weight derivative
1334 
1335  for(size_t i = 1; i < parameters_number; i++)
1336  {
1337  for(size_t j = 1; j < parameters_number; j++)
1338  {
1339  Hessian(i,j) = activation_second_derivative*inputs[i-1]*inputs[j-1];
1340  }
1341  }
1342 
1343  // Hessian symmetry
1344 
1345  for(size_t i = 0; i < parameters_number; i++)
1346  {
1347  for(size_t j = 0; j < i; j++)
1348  {
1349  Hessian(i,j) = Hessian(j,i);
1350  }
1351  }
1352 
1353  return(Hessian);
1354 }
1355 
1356 
1357 // Matrix<double> calculate_combination_Hessian(const Vector<double>&) const method
1358 
1360 
1362 {
1363  const size_t inputs_number = get_inputs_number();
1364 
1365  const Matrix<double> combination_Hessian(inputs_number, inputs_number, 0.0);
1366 
1367  return(combination_Hessian);
1368 }
1369 
1370 
1371 // Matrix<double> calculate_combination_Hessian(const Vector<double>&, const Vector<double>&) const method
1372 
1376 
1378 {
1379  const size_t parameters_number = count_parameters_number();
1380 
1381  const Matrix<double> Hessian(parameters_number, parameters_number, 0.0);
1382 
1383  return(Hessian);
1384 }
1385 
1386 
1387 // void grow_input(void) method
1388 
1391 
1393 {
1394  synaptic_weights.push_back(0.0);
1395 }
1396 
1397 
1398 // void prune_input(const size_t&) method
1399 
1402 
1403 void Perceptron::prune_input(const size_t& index)
1404 {
1405  // Control sentence (if debug)
1406 
1407  #ifndef NDEBUG
1408 
1409  const size_t inputs_number = get_inputs_number();
1410 
1411  if(index >= inputs_number)
1412  {
1413  std::ostringstream buffer;
1414 
1415  buffer << "OpenNN Exception: Perceptron class.\n"
1416  << "void prune_input(const size_t&) method.\n"
1417  << "Index of input is equal or greater than number of inputs.\n";
1418 
1419  throw std::logic_error(buffer.str());
1420  }
1421 
1422  #endif
1423 
1424  synaptic_weights.erase(synaptic_weights.begin()+index);
1425 }
1426 
1427 
1428 // std::string write_expression(const Vector<std::string>&, const std::string&) const method
1429 
1433 
1434 std::string Perceptron::write_expression(const Vector<std::string>& inputs_name, const std::string& output_name) const
1435 {
1436  const size_t inputs_number = get_inputs_number();
1437 
1438  std::string activation_function_name = write_activation_function();
1439 
1440  std::ostringstream buffer;
1441 
1442  buffer << output_name << "=" << activation_function_name << "("
1443  << bias << "\n";
1444 
1445  for(size_t i = 0; i < inputs_number; i++)
1446  {
1447  if(synaptic_weights[i] >= 0)
1448  {
1449  buffer << "+";
1450  }
1451 
1452  buffer << synaptic_weights[i] << "*" << inputs_name[i];
1453 
1454  if(i != 0 && i%4 == 0 && i != inputs_number-1)
1455  {
1456  buffer << "\n";
1457  }
1458  }
1459 
1460  buffer << ");\n";
1461 
1462  return(buffer.str());
1463 }
1464 
1465 }
1466 
1467 
1468 // OpenNN: Open Neural Networks Library.
1469 // Copyright (c) 2005-2015 Roberto Lopez.
1470 //
1471 // This library is free software; you can redistribute it and/or
1472 // modify it under the terms of the GNU Lesser General Public
1473 // License as published by the Free Software Foundation; either
1474 // version 2.1 of the License, or any later version.
1475 //
1476 // This library is distributed in the hope that it will be useful,
1477 // but WITHOUT ANY WARRANTY; without even the implied warranty of
1478 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1479 // Lesser General Public License for more details.
1480 
1481 // You should have received a copy of the GNU Lesser General Public
1482 // License along with this library; if not, write to the Free Software
1483 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Vector< double > synaptic_weights
Synaptic weights vector.
Definition: perceptron.h:170
void randomize_uniform(const double &=-1.0, const double &=1.0)
Definition: vector.h:781
void set_display(const bool &)
Definition: perceptron.cpp:467
void prune_input(const size_t &)
void initialize(const T &)
Definition: vector.h:753
double bias
Bias value.
Definition: perceptron.h:166
void initialize_bias_uniform(const double &, const double &)
Definition: perceptron.cpp:578
void initialize_bias_normal(const double &, const double &)
Definition: perceptron.cpp:630
double calculate_activation(const double &) const
Definition: perceptron.cpp:791
void initialize_synaptic_weights(const double &)
Definition: perceptron.cpp:606
void set(void)
Sets the size of a vector to zero.
Definition: vector.h:656
const double & get_synaptic_weight(const size_t &) const
Definition: perceptron.cpp:223
Matrix< double > calculate_Hessian(const Vector< double > &) const
size_t count_parameters_number(void) const
Returns the number of parameters (bias and synaptic weights) in the perceptron.
Definition: perceptron.cpp:493
ActivationFunction
Enumeration of available activation functions for the perceptron neuron model.
Definition: perceptron.h:72
void set_synaptic_weights(const Vector< double > &)
Definition: perceptron.cpp:400
const ActivationFunction & get_activation_function(void) const
Returns the activation function of the neuron.
Definition: perceptron.cpp:129
bool display
Display messages to screen.
Definition: perceptron.h:178
void set_activation_function(const ActivationFunction &)
Definition: perceptron.cpp:337
Perceptron & operator=(const Perceptron &)
Definition: perceptron.cpp:81
std::string write_activation_function(void) const
Returns a string with the name of the activation function of the neuron.
Definition: perceptron.cpp:139
Vector< double > arrange_parameters(void) const
Returns the parameters (bias and synaptic weights) of the perceptron.
Definition: perceptron.cpp:505
void set_bias(const double &)
Definition: perceptron.cpp:389
void set_inputs_number(const size_t &)
Definition: perceptron.cpp:480
void initialize_synaptic_weights_uniform(const double &, const double &)
Definition: perceptron.cpp:618
Matrix< T > direct(const Vector< T > &) const
Definition: vector.h:3697
double calculate_combination(const Vector< double > &) const
Definition: perceptron.cpp:683
Vector< double > calculate_combination_gradient(const Vector< double > &) const
Returns the partial derivatives of the combination with respect to the inputs.
ActivationFunction activation_function
Activation function variable.
Definition: perceptron.h:174
void set(void)
Sets the number of inputs to zero and the rest of members to their default values.
Definition: perceptron.cpp:265
bool operator==(const Perceptron &) const
Definition: perceptron.cpp:107
size_t get_inputs_number(void) const
Returns the number of inputs to the neuron.
Definition: perceptron.cpp:192
double calculate_output(const Vector< double > &) const
void initialize_synaptic_weights_normal(const double &, const double &)
Definition: perceptron.cpp:659
Matrix< double > calculate_combination_Hessian(const Vector< double > &) const
This method retuns the second derivatives of the combination with respect to the inputs.
void grow_input(void)
virtual ~Perceptron(void)
Definition: perceptron.cpp:70
std::string write_expression(const Vector< std::string > &, const std::string &) const
const bool & get_display(void) const
Definition: perceptron.cpp:255
void randomize_normal(const double &=0.0, const double &=1.0)
Definition: vector.h:867
void initialize_bias(const double &)
Definition: perceptron.cpp:566
void initialize_parameters(const double &)
Definition: perceptron.cpp:670
double calculate_activation_derivative(const double &) const
Definition: perceptron.cpp:860
const Vector< double > & arrange_synaptic_weights(void) const
Returns the synaptic weight values of the neuron.
Definition: perceptron.cpp:212
Vector< double > calculate_gradient(const Vector< double > &) const
const double & get_bias(void) const
Returns the bias value of the neuron.
Definition: perceptron.cpp:202
double calculate_activation_second_derivative(const double &) const
Definition: perceptron.cpp:945
void set_synaptic_weight(const size_t &, const double &)
Definition: perceptron.cpp:433
void set_parameters(const Vector< double > &)
Definition: perceptron.cpp:529