16 #include "perceptron_layer.h"
44 set(new_inputs_number, new_perceptrons_number);
56 set(other_perceptron_layer);
78 if(
this != &other_perceptron_layer)
165 const size_t perceptrons_number =
perceptrons.size();
167 return(perceptrons_number);
184 if(index >= perceptrons_number)
186 std::ostringstream buffer;
188 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
189 <<
"const Perceptron& get_perceptron(const size_t&) const method.\n"
190 <<
"Index of perceptron must be less than layer size.\n";
192 throw std::logic_error(buffer.str());
209 size_t parameters_number = 0;
211 for(
size_t i = 0; i < perceptrons_number; i++)
213 parameters_number +=
perceptrons[i].count_parameters_number();
216 return(parameters_number);
231 if(perceptrons_number > 0)
233 cumulative_parameters_number[0] =
perceptrons[0].count_parameters_number();
235 for(
size_t i = 1; i < perceptrons_number; i++)
237 cumulative_parameters_number[i] = cumulative_parameters_number[i-1] +
perceptrons[i].count_parameters_number();
241 return(cumulative_parameters_number);
257 for(
size_t i = 0; i < perceptrons_number; i++)
279 Matrix<double> synaptic_weights(perceptrons_number, inputs_number);
281 for(
size_t i = 0; i < perceptrons_number; i++)
283 for(
size_t j = 0; j < inputs_number; j++)
285 synaptic_weights(i,j) =
perceptrons[i].get_synaptic_weight(j);
289 return(synaptic_weights);
303 if(perceptrons_number == 0)
315 const size_t perceptron_parameters_number =
perceptrons[0].count_parameters_number();
317 Vector<double> perceptron_parameters(perceptron_parameters_number);
321 for(
size_t i = 0; i < perceptrons_number; i++)
323 perceptron_parameters =
perceptrons[i].arrange_parameters();
324 parameters.
tuck_in(position, perceptron_parameters);
325 position += perceptron_parameters_number;
342 return(1 + inputs_number);
359 for(
size_t i = 0; i < perceptrons_number; i++)
361 perceptrons_parameters[i] =
perceptrons[i].arrange_parameters();
364 return(perceptrons_parameters);
377 if(perceptrons_number > 0)
383 std::ostringstream buffer;
385 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
386 <<
"Perceptron::ActivationFunction& get_activation_function(void) method.\n"
387 <<
"PerceptronLayer is empty.\n";
389 throw std::logic_error(buffer.str());
403 switch(activation_function)
405 case Perceptron::Logistic:
411 case Perceptron::HyperbolicTangent:
413 return(
"HyperbolicTangent");
417 case Perceptron::Threshold:
423 case Perceptron::SymmetricThreshold:
425 return(
"SymmetricThreshold");
429 case Perceptron::Linear:
437 std::ostringstream buffer;
439 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
440 <<
"std::string write_activation_function_name(void) const method.\n"
441 <<
"Unknown layer activation function.\n";
443 throw std::logic_error(buffer.str());
498 for(
size_t i = 0; i < new_perceptrons_number; i++)
500 perceptrons[i].set_inputs_number(new_inputs_number);
566 for(
size_t i = 0; i < perceptrons_number; i++)
568 perceptrons[i].set_inputs_number(new_inputs_number);
584 if(perceptrons_number > 0)
612 const size_t new_biases_size = new_biases.size();
614 if(new_biases_size != perceptrons_number)
616 std::ostringstream buffer;
618 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
619 <<
"void set_biases(const Vector<double>&) method.\n"
620 <<
"Size must be equal to number of perceptrons.\n";
622 throw std::logic_error(buffer.str());
629 for(
size_t i = 0; i < perceptrons_number; i++)
656 std::ostringstream buffer;
658 if(rows_number != perceptrons_number)
660 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
661 <<
"void set_synaptic_weights(const Matrix<double>&) method.\n"
662 <<
"Number of rows must be equal to size of layer.\n";
664 throw std::logic_error(buffer.str());
666 else if(columns_number != inputs_number)
668 std::ostringstream buffer;
670 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
671 <<
"void set_synaptic_weights(const Matrix<double>&) method.\n"
672 <<
"Number of columns must be equal to number of inputs.\n";
674 throw std::logic_error(buffer.str());
679 for(
size_t i = 0; i < perceptrons_number; i++)
681 for(
size_t j = 0; j < inputs_number; j++)
683 perceptrons[i].set_synaptic_weight(j, new_synaptic_weights(i,j));
704 const size_t new_parameters_size = new_parameters.size();
706 if(new_parameters_size != parameters_number)
708 std::ostringstream buffer;
710 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
711 <<
"void set_parameters(const Vector<double>&) method.\n"
712 <<
"Size of new parameters vector must be equal to number of parameters.\n";
714 throw std::logic_error(buffer.str());
719 if(perceptrons_number != 0)
721 const size_t perceptron_parameters_number =
perceptrons[0].count_parameters_number();
723 Vector<double> perceptron_parameters(perceptron_parameters_number);
727 for(
size_t i = 0; i < perceptrons_number; i++)
729 perceptron_parameters = new_parameters.
take_out(position, perceptron_parameters_number);
730 perceptrons[i].set_parameters(perceptron_parameters);
731 position += perceptron_parameters_number;
746 for(
size_t i = 0; i < perceptrons_number; i++)
748 perceptrons[i].set_activation_function(new_activation_function);
763 for(
size_t i = 0; i < perceptrons_number; i++)
765 perceptrons[i].set_activation_function(new_activation_function);
790 for(
size_t i = 0; i < inputs_number; i++)
794 for(
size_t i = 0; i < perceptrons_number; i++)
811 for(
size_t i = 0; i < perceptrons_number; i++)
835 if(index >= inputs_number)
837 std::ostringstream buffer;
839 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
840 <<
"void prune_input(const size_t&) method.\n"
841 <<
"Index of input is equal or greater than number of inputs.\n";
843 throw std::logic_error(buffer.str());
850 for(
size_t i = 0; i < perceptrons_number; i++)
870 if(index >= perceptrons_number)
872 std::ostringstream buffer;
874 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
875 <<
"void prune_perceptron(const size_t&) method.\n"
876 <<
"Index of perceptron is equal or greater than number of perceptrons.\n";
878 throw std::logic_error(buffer.str());
894 const size_t inputs_number = rand()%10 + 1;
895 const size_t perceptrons_number = rand()%10 + 1;
897 set(inputs_number, perceptrons_number);
912 for(
size_t i = 0; i < perceptrons_number; i++)
928 for(
size_t i = 0; i < perceptrons_number; i++)
1096 parameters.
randomize_normal(mean_standard_deviation[0], mean_standard_deviation[1]);
1123 const size_t inputs_size = inputs.size();
1127 if(inputs_size != inputs_number)
1129 std::ostringstream buffer;
1131 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1132 <<
"Vector<double> calculate_combinations(const Vector<double>&) const method.\n"
1133 <<
"Size of inputs to layer must be equal to number of layer inputs.\n";
1135 throw std::logic_error(buffer.str());
1146 for(
size_t i = 0; i < perceptrons_number; i++)
1148 combination[i] =
perceptrons[i].calculate_combination(inputs);
1151 return(combination);
1179 for(
size_t i = 0; i < perceptrons_number; i++)
1181 combination_Hessian_form[i].
set(inputs_number, inputs_number, 0.0);
1184 return(combination_Hessian_form);
1200 const size_t inputs_size = inputs.size();
1203 if(inputs_size != inputs_number)
1205 std::ostringstream buffer;
1207 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1208 <<
"Vector<double> calculate_combination_parameters(const Vector<double>&, const Vector<double>&) const method.\n"
1209 <<
"Size of layer inputs (" << inputs_size <<
") must be equal to number of layer inputs (" << inputs_number <<
").\n";
1211 throw std::logic_error(buffer.str());
1214 const size_t parameters_size = parameters.size();
1218 if(parameters_size != parameters_number)
1220 std::ostringstream buffer;
1222 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1223 <<
"Vector<double> calculate_combination_parameters(const Vector<double>&, const Vector<double>&) const method.\n"
1224 <<
"Size of layer parameters (" << parameters_size <<
") must be equal to number of lasyer parameters (" << parameters_number <<
").\n";
1226 throw std::logic_error(buffer.str());
1239 Vector<double> perceptron_parameters(perceptron_parameters_number);
1241 for(
size_t i = 0; i < perceptrons_number; i++)
1243 perceptron_parameters = parameters.
take_out(i*perceptron_parameters_number, perceptron_parameters_number);
1245 combinations[i] =
perceptrons[i].calculate_combination(inputs, perceptron_parameters);
1248 return(combinations);
1264 Matrix<double> combinations_Jacobian(perceptrons_number, parameters_number, 0.0);
1266 size_t column_index;
1268 for(
size_t i = 0; i < perceptrons_number; i++)
1272 column_index = (1 + inputs_number)*i;
1273 combinations_Jacobian(i,column_index) = 1.0;
1277 for(
size_t j = 0; j < inputs_number; j++)
1279 column_index = 1 + (1 + inputs_number)*i + j;
1280 combinations_Jacobian(i,column_index) = inputs[j];
1284 return(combinations_Jacobian);
1303 for(
size_t i = 0; i < perceptrons_number; i++)
1305 combination_parameters_Hessian_form[i].
set(parameters_number, parameters_number, 0.0);
1308 return(combination_parameters_Hessian_form);
1325 const size_t combination_size = combinations.size();
1327 if(combination_size != perceptrons_number)
1329 std::ostringstream buffer;
1331 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1332 <<
"Vector<double> calculate_activation(const Vector<double>&) const method.\n"
1333 <<
"Combination size must be equal to number of neurons.\n";
1335 throw std::logic_error(buffer.str());
1344 for(
size_t i = 0; i < perceptrons_number; i++)
1346 activations[i] =
perceptrons[i].calculate_activation(combinations[i]);
1349 return(activations);
1366 const size_t combination_size = combination.size();
1368 if(combination_size != perceptrons_number)
1370 std::ostringstream buffer;
1372 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1373 <<
"Vector<double> calculate_activations_derivatives(const Vector<double>&) const method.\n"
1374 <<
"Size of combination must be equal to number of neurons.\n";
1376 throw std::logic_error(buffer.str());
1385 for(
size_t i = 0; i < perceptrons_number; i++)
1387 activation_derivatives[i] =
perceptrons[i].calculate_activation_derivative(combination[i]);
1390 return(activation_derivatives);
1407 const size_t combination_size = combination.size();
1409 if(combination_size != perceptrons_number)
1411 std::ostringstream buffer;
1413 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1414 <<
"Vector<double> calculate_activations_second_derivatives(const Vector<double>&) const method.\n"
1415 <<
"Size of combinations must be equal to number of neurons.\n";
1417 throw std::logic_error(buffer.str());
1424 Vector<double> activation_second_derivatives(perceptrons_number);
1426 for(
size_t i = 0; i < perceptrons_number; i++)
1428 activation_second_derivatives[i] =
perceptrons[i].calculate_activation_second_derivative(combination[i]);
1431 return(activation_second_derivatives);
1444 Matrix<double> activation_Jacobian(perceptrons_number, perceptrons_number, 0.0);
1446 activation_Jacobian.
set_diagonal(activation_derivative);
1448 return(activation_Jacobian);
1463 for(
size_t i = 0; i < perceptrons_number; i++)
1465 activation_Hessian_form[i].
set(perceptrons_number, perceptrons_number, 0.0);
1466 activation_Hessian_form[i](i,i) = activation_second_derivative[i];
1469 return(activation_Hessian_form);
1484 const size_t inputs_size = inputs.size();
1488 if(inputs_size != inputs_number)
1490 std::ostringstream buffer;
1492 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1493 <<
"Vector<double> calculate_outputs(const Vector<double>&) const method.\n"
1494 <<
"Size of inputs must be equal to number of inputs to layer.\n";
1496 throw std::logic_error(buffer.str());
1522 const size_t inputs_size = inputs.size();
1524 if(inputs_size != inputs_number)
1526 std::ostringstream buffer;
1528 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1529 <<
"Matrix<double> calculate_Jacobian(const Vector<double>&) const method.\n"
1530 <<
"Size of inputs must be equal to number of inputs to layer.\n";
1532 throw std::logic_error(buffer.str());
1543 return(activations_derivatives*synaptic_weights);
1566 for(
size_t i = 0; i < perceptrons_number; i++)
1568 activation_Hessian_form[i].
set(perceptrons_number, perceptrons_number, 0.0);
1569 activation_Hessian_form[i](i,i) = activations_second_derivatives[i];
1574 return(Hessian_form);
1590 const size_t inputs_size = inputs.size();
1594 if(inputs_size != inputs_number)
1596 std::ostringstream buffer;
1598 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1599 <<
"Vector<double> calculate_outputs(const Vector<double>&, const Vector<double>&) const method.\n"
1600 <<
"Size of layer inputs (" << inputs_size <<
") must be equal to number of layer inputs (" << inputs_number <<
").\n";
1602 throw std::logic_error(buffer.str());
1605 const size_t parameters_size = parameters.size();
1609 if(parameters_size != parameters_number)
1611 std::ostringstream buffer;
1613 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1614 <<
"Vector<double> calculate_outputs(const Vector<double>&, const Vector<double>&) const method.\n"
1615 <<
"Size of parameters (" << parameters_size <<
") must be equal to number of parameters (" << parameters_number <<
").\n";
1617 throw std::logic_error(buffer.str());
1642 const size_t inputs_size = inputs.size();
1644 if(inputs_size != inputs_number)
1646 std::ostringstream buffer;
1648 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1649 <<
"Matrix<double> calculate_parameters_Jacobian(const Vector<double>&, const Vector<double>&) const method.\n"
1650 <<
"Size of inputs must be equal to number of inputs.\n";
1652 throw std::logic_error(buffer.str());
1665 return(activation_Jacobian.
dot(combinations_Jacobian));
1684 const size_t inputs_size = inputs.size();
1686 if(inputs_size != inputs_number)
1688 std::ostringstream buffer;
1690 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1691 <<
"Vector< Matrix<double> > calculate_Hessian_form(const Vector<double>&, const Vector<double>&) const method.\n"
1692 <<
"Size must be equal to number of inputs of layer.\n";
1694 throw std::logic_error(buffer.str());
1713 for(
size_t i = 0; i < perceptrons_number; i++)
1715 parameters_Hessian_form[i] = combination_parameters_Jacobian.
calculate_transpose().
dot(activation_Hessian_form[i]).
dot(combination_parameters_Jacobian);
1718 return(parameters_Hessian_form);
1737 const size_t inputs_name_size = inputs_name.size();
1739 if(inputs_name_size != inputs_number)
1741 std::ostringstream buffer;
1743 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1744 <<
"std::string write_expression(const Vector<std::string>&, const Vector<std::string>&) const method.\n"
1745 <<
"Size of inputs name must be equal to number of layer inputs.\n";
1747 throw std::logic_error(buffer.str());
1750 const size_t outputs_name_size = outputs_name.size();
1752 if(outputs_name_size != perceptrons_number)
1754 std::ostringstream buffer;
1756 buffer <<
"OpenNN Exception: PerceptronLayer class.\n"
1757 <<
"std::string write_expression(const Vector<std::string>&, const Vector<std::string>&) const method.\n"
1758 <<
"Size of outputs name must be equal to number of perceptrons.\n";
1760 throw std::logic_error(buffer.str());
1765 std::ostringstream buffer;
1767 for(
size_t i = 0; i < perceptrons_number; i++)
1769 buffer <<
perceptrons[i].write_expression(inputs_name, outputs_name[i]);
1772 return(buffer.str());
void set_synaptic_weights(const Matrix< double > &)
void randomize_parameters_normal(void)
void randomize_uniform(const double &=-1.0, const double &=1.0)
const bool & get_display(void) const
void set_inputs_number(const size_t &)
Vector< double > arrange_parameters(void) const
Vector< T > take_out(const size_t &, const size_t &) const
Vector< double > calculate_activations_derivatives(const Vector< double > &) const
void prune_perceptron(const size_t &)
PerceptronLayer & operator=(const PerceptronLayer &)
void initialize_random(void)
void set_biases(const Vector< double > &)
std::string write_activation_function(void) const
const Vector< Perceptron > & get_perceptrons(void) const
Returns a constant reference to the vector of perceptrons defining the layer.
size_t count_parameters_number(void) const
Returns the number of parameters (biases and synaptic weights) of the layer.
Matrix< double > calculate_combinations_Jacobian(const Vector< double > &) const
Vector< double > calculate_combinations(const Vector< double > &) const
size_t get_inputs_number(void) const
Returns the number of inputs to the layer.
void set(void)
Sets the size of a vector to zero.
void set_perceptron(const size_t &, const Perceptron &)
virtual ~PerceptronLayer(void)
ActivationFunction
Enumeration of available activation functions for the perceptron neuron model.
void set_diagonal(const T &)
Vector< Matrix< double > > calculate_Hessian_form(const Vector< double > &) const
bool display
Display messages to screen.
const Perceptron::ActivationFunction & get_activation_function(void) const
Vector< Vector< double > > arrange_perceptrons_parameters(void) const
void set_display(const bool &)
const size_t & get_columns_number(void) const
Returns the number of columns in the matrix.
Vector< double > calculate_outputs(const Vector< double > &) const
void initialize_synaptic_weights(const double &)
Vector< double > calculate_activations_second_derivatives(const Vector< double > &) const
void grow_inputs(const size_t &)
const Perceptron & get_perceptron(const size_t &) const
Vector< size_t > count_cumulative_parameters_number(void) const
void set_parameters(const Vector< double > &)
Vector< Matrix< double > > calculate_combinations_Hessian_form(const Vector< double > &) const
void randomize_parameters_uniform(void)
void tuck_in(const size_t &, const Vector< T > &)
void randomize_normal(const double &=0.0, const double &=1.0)
double dot(const Vector< double > &) const
Matrix< double > arrange_activations_Jacobian(const Vector< double > &) const
const size_t & get_rows_number(void) const
Returns the number of rows in the matrix.
Matrix< T > calculate_transpose(void) const
Returns the transpose of the matrix.
Vector< Matrix< double > > arrange_activations_Hessian_form(const Vector< double > &) const
void initialize_parameters(const double &)
void initialize_biases(const double &)
std::string write_expression(const Vector< std::string > &, const Vector< std::string > &) const
double calculate_parameters_norm(void) const
Calculates the norm of a layer parameters vector.
size_t get_perceptrons_number(void) const
Returns the size of the perceptrons vector.
void set_perceptrons_number(const size_t &)
Vector< double > dot(const Vector< double > &) const
void prune_input(const size_t &)
bool operator==(const PerceptronLayer &) const
void grow_perceptrons(const size_t &)
bool is_empty(void) const
Returns true if the size of the layer is zero, and false otherwise.
size_t count_perceptron_parameters_number(void) const
Vector< Perceptron > perceptrons
Matrix< double > calculate_Jacobian(const Vector< double > &) const
void set_perceptrons(const Vector< Perceptron > &)
Vector< double > calculate_activations(const Vector< double > &) const
Matrix< double > arrange_synaptic_weights(void) const
Vector< double > arrange_biases(void) const
void set_activation_function(const Perceptron::ActivationFunction &)
void initialize_parameters(const double &)