OpenNN  2.2
Open Neural Networks Library
neural_parameters_norm.cpp
1 /****************************************************************************************************************/
2 /* */
3 /* OpenNN: Open Neural Networks Library */
4 /* www.artelnics.com/opennn */
5 /* */
6 /* N E U R A L P A R A M E T E R S N O R M C L A S S */
7 /* */
8 /* Roberto Lopez */
9 /* Artelnics - Making intelligent use of data */
11 /* */
12 /****************************************************************************************************************/
13 
14 // OpenNN includes
15 
16 #include "neural_parameters_norm.h"
17 
18 namespace OpenNN
19 {
20 
21 // DEFAULT CONSTRUCTOR
22 
26 
28  : PerformanceTerm()
29 {
30  set_default();
31 }
32 
33 
34 // NEURAL NETWORK CONSTRUCTOR
35 
40 
42 : PerformanceTerm(new_neural_network_pointer)
43 {
44  set_default();
45 }
46 
47 
48 // XML CONSTRUCTOR
49 
55 
56 NeuralParametersNorm::NeuralParametersNorm(const tinyxml2::XMLDocument& neural_parameters_norm_document)
57  : PerformanceTerm()
58 {
59  set_default();
60 
61  from_XML(neural_parameters_norm_document);
62 }
63 
64 
65 // DESTRUCTOR
66 
69 
71 {
72 }
73 
74 
75 // METHODS
76 
77 // const double& get_neural_parameters_norm_weight(void) const method
78 
80 
82 {
84 }
85 
86 
87 // void set_neural_parameters_norm_weight(const double&) method
88 
90 
91 void NeuralParametersNorm::set_neural_parameters_norm_weight(const double& new_neural_parameters_norm_weight)
92 {
93  neural_parameters_norm_weight = new_neural_parameters_norm_weight;
94 }
95 
96 
97 // void set_default(void) method
98 
104 
106 {
108 
109  display = true;
110 }
111 
112 
113 // void check(void) const method
114 
118 
120 {
121  std::ostringstream buffer;
122 
123  // Neural network stuff
124 
126  {
127  buffer << "OpenNN Exception: NeuralParametersNorm class.\n"
128  << "void check(void) const method.\n"
129  << "Pointer to neural network is NULL.\n";
130 
131  throw std::logic_error(buffer.str());
132  }
133 
134  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
135 
136  if(!multilayer_perceptron_pointer)
137  {
138  buffer << "OpenNN Exception: NeuralParametersNorm class.\n"
139  << "void check(void) const method.\n"
140  << "Pointer to multilayer perceptron is NULL.\n";
141 
142  throw std::logic_error(buffer.str());
143  }
144 
145  const size_t inputs_number = multilayer_perceptron_pointer->get_inputs_number();
146  const size_t outputs_number = multilayer_perceptron_pointer->get_outputs_number();
147 
148  if(inputs_number == 0)
149  {
150  buffer << "OpenNN Exception: NeuralParametersNorm class.\n"
151  << "void check(void) const method.\n"
152  << "Number of inputs in multilayer perceptron object is zero.\n";
153 
154  throw std::logic_error(buffer.str());
155  }
156 
157  if(outputs_number == 0)
158  {
159  buffer << "OpenNN Exception: NeuralParametersNorm class.\n"
160  << "void check(void) const method.\n"
161  << "Number of outputs in multilayer perceptron object is zero.\n";
162 
163  throw std::logic_error(buffer.str());
164  }
165 
166 }
167 
168 
169 // double calculate_performance(void) const method
170 
173 
175 {
176  #ifndef NDEBUG
177 
178  check();
179 
180  #endif
181 
182  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
183 
184  const Vector<double> neural_parameters = multilayer_perceptron_pointer->arrange_parameters();
185 
186  const double neural_parameters_norm = neural_parameters.calculate_norm();
187 
188  return(neural_parameters_norm_weight*neural_parameters_norm);
189 }
190 
191 
192 // Vector<double> calculate_gradient(void) const method
193 
196 
198 
200 {
201  // Control sentence (if debug)
202 
203  #ifndef NDEBUG
204 
205  check();
206 
207  #endif
208 
209  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
210 
211  const Vector<double> neural_parameters = multilayer_perceptron_pointer->arrange_parameters();
212 
213  return(neural_parameters.calculate_norm_gradient()*neural_parameters_norm_weight);
214 }
215 
216 
217 // Matrix<double> calculate_Hessian(void) const method
218 
221 
224 
226 {
227  // Control sentence (if debug)
228 
229  #ifndef NDEBUG
230 
231  check();
232 
233  #endif
234 
235  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
236 
237  const Vector<double> neural_parameters = multilayer_perceptron_pointer->arrange_parameters();
238 
239  return(neural_parameters.calculate_norm_Hessian()*neural_parameters_norm_weight);
240 }
241 
242 
243 // double calculate_performance(const Vector<double>&) method
244 
248 
250 {
251  // Control sentence (if debug)
252 
253  #ifndef NDEBUG
254 
255  check();
256 
257  #endif
258 
260  {
261  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
262 
263  const size_t neural_parameters_number = multilayer_perceptron_pointer->count_parameters_number();
264 
265  Vector<double> neural_parameters(parameters);
266  neural_parameters.resize(neural_parameters_number);
267 
268  const double neural_parameters_norm = neural_parameters.calculate_norm();
269 
270  return(neural_parameters_norm*neural_parameters_norm_weight);
271  }
272  else
273  {
274  const double neural_parameters_norm = parameters.calculate_norm();
275 
276  return(neural_parameters_norm*neural_parameters_norm_weight);
277  }
278 }
279 
280 
281 // Vector<double> calculate_gradient(const Vector<double>&) const method
282 
284 {
285  // Control sentence (if debug)
286 
287  #ifndef NDEBUG
288 
289  check();
290 
291  #endif
292 
294  {
295  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
296 
297  const size_t neural_parameters_number = multilayer_perceptron_pointer->count_parameters_number();
298 
299  Vector<double> neural_parameters(parameters);
300  neural_parameters.resize(neural_parameters_number);
301 
302  return(neural_parameters.calculate_norm_gradient()*neural_parameters_norm_weight);
303  }
304  else
305  {
307  }
308 }
309 
310 
311 // Matrix<double> calculate_Hessian(const Vector<double>&) const method
312 
314 {
315  // Control sentence (if debug)
316 
317  #ifndef NDEBUG
318 
319  check();
320 
321  #endif
322 
324  {
325  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
326 
327  const size_t neural_parameters_number = multilayer_perceptron_pointer->count_parameters_number();
328 
329  Vector<double> neural_parameters(parameters);
330  neural_parameters.resize(neural_parameters_number);
331 
332  return(neural_parameters.calculate_norm_Hessian()*neural_parameters_norm_weight);
333  }
334  else
335  {
337  }
338 }
339 
340 /*
341 // double calculate_generalization_performance(void) const method
342 
345 
346 double NeuralParametersNorm::calculate_generalization_performance(void) const
347 {
348  #ifndef NDEBUG
349 
350  check();
351 
352  #endif
353 
354  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
355 
356  const Vector<double> neural_parameters = multilayer_perceptron_pointer->arrange_parameters();
357 
358  return(neural_parameters.calculate_norm()*neural_parameters_norm_weight);
359 }
360 */
361 
362 // std::string write_performance_term_type(void) const method
363 
366 
368 {
369  return("NEURAL_PARAMETERS_NORM");
370 }
371 
372 
373 // std::string write_information(void) const method
374 
376 {
377  std::ostringstream buffer;
378 
379  buffer << "Neural parameters norm: " << calculate_performance() << "\n";
380 
381  return(buffer.str());
382 }
383 
384 
385 // tinyxml2::XMLDocument* to_XML(void) method method
386 
388 
389 tinyxml2::XMLDocument* NeuralParametersNorm::to_XML(void) const
390 {
391  std::ostringstream buffer;
392 
393  tinyxml2::XMLDocument* document = new tinyxml2::XMLDocument;
394 
395  // Neural parameters norm
396 
397  tinyxml2::XMLElement* neural_network_parameters_norm_element = document->NewElement("NeuralParametersNorm");
398 
399  document->InsertFirstChild(neural_network_parameters_norm_element);
400 
401  // Neural parameters norm weight
402  {
403  tinyxml2::XMLElement* weight_element = document->NewElement("NeuralParametersNormWeight");
404  neural_network_parameters_norm_element->LinkEndChild(weight_element);
405 
406  buffer.str("");
408 
409  tinyxml2::XMLText* weight_text = document->NewText(buffer.str().c_str());
410  weight_element->LinkEndChild(weight_text);
411  }
412 
413  // Display
414 
415  {
416  tinyxml2::XMLElement* display_element = document->NewElement("Display");
417  neural_network_parameters_norm_element->LinkEndChild(display_element);
418 
419  buffer.str("");
420  buffer << display;
421 
422  tinyxml2::XMLText* display_text = document->NewText(buffer.str().c_str());
423  display_element->LinkEndChild(display_text);
424  }
425 
426  return(document);
427 }
428 
429 
430 // void from_XML(const tinyxml2::XMLDocument&) method
431 
434 
435 void NeuralParametersNorm::from_XML(const tinyxml2::XMLDocument& document)
436 {
437  const tinyxml2::XMLElement* root_element = document.FirstChildElement("NeuralParametersNorm");
438 
439  if(!root_element)
440  {
441  std::ostringstream buffer;
442 
443  buffer << "OpenNN Exception: NeuralParametersNorm class.\n"
444  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
445  << "Neural parameters norm element is NULL.\n";
446 
447  throw std::logic_error(buffer.str());
448  }
449 
450  // Neural parameters norm weight
451  {
452  const tinyxml2::XMLElement* element = root_element->FirstChildElement("NeuralParametersNormWeight");
453 
454  if(element)
455  {
456  try
457  {
458  const double new_neural_parameters_norm_weight = atof(element->GetText());
459 
460  set_neural_parameters_norm_weight(new_neural_parameters_norm_weight);
461  }
462  catch(const std::logic_error& e)
463  {
464  std::cout << e.what() << std::endl;
465  }
466  }
467  }
468 
469  // Display
470  {
471  const tinyxml2::XMLElement* element = root_element->FirstChildElement("Display");
472 
473  if(element)
474  {
475  try
476  {
477  const std::string new_display_string = element->GetText();
478 
479  set_display(new_display_string != "0");
480  }
481  catch(const std::logic_error& e)
482  {
483  std::cout << e.what() << std::endl;
484  }
485  }
486  }
487 }
488 
489 }
490 
491 
492 // OpenNN: Open Neural Networks Library.
493 // Copyright (c) 2005-2015 Roberto Lopez.
494 //
495 // This library is free software; you can redistribute it and/or
496 // modify it under the terms of the GNU Lesser General Public
497 // License as published by the Free Software Foundation; either
498 // version 2.1 of the License, or any later version.
499 //
500 // This library is distributed in the hope that it will be useful,
501 // but WITHOUT ANY WARRANTY; without even the implied warranty of
502 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
503 // Lesser General Public License for more details.
504 // You should have received a copy of the GNU Lesser General Public
505 // License along with this library; if not, write to the Free Software
506 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
bool has_independent_parameters(void) const
std::string write_performance_term_type(void) const
size_t get_inputs_number(void) const
Returns the number of inputs to the multilayer perceptron.
size_t get_outputs_number(void) const
Returns the number of outputs neurons in the multilayer perceptron.
Matrix< T > calculate_norm_Hessian(void) const
Returns the Hessian of the vector norm.
Definition: vector.h:2410
void from_XML(const tinyxml2::XMLDocument &)
Vector< double > calculate_gradient(void) const
void set_display(const bool &)
std::string write_information(void) const
const double & get_neural_parameters_norm_weight(void) const
Returns the weight value for the neural parameters norm in the performance term expression.
MultilayerPerceptron * get_multilayer_perceptron_pointer(void) const
Returns a pointer to the multilayer perceptron composing this neural network.
double calculate_norm(void) const
Returns the vector norm.
Definition: vector.h:2358
NeuralNetwork * neural_network_pointer
Pointer to a multilayer perceptron object.
void set_neural_parameters_norm_weight(const double &)
Sets a new weight value for the neural parameters norm in the performance term expression.
bool display
Display messages to screen.
tinyxml2::XMLDocument * to_XML(void) const
Returns a representation of the sum squared error object, in XML format.
Vector< double > arrange_parameters(void) const
Returns the values of all the biases and synaptic weights in the multilayer perceptron as a single ve...
Vector< T > calculate_norm_gradient(void) const
Returns the gradient of the vector norm.
Definition: vector.h:2382
double neural_parameters_norm_weight
Weight value for the neural parameters norm regularization term.
Matrix< double > calculate_Hessian(void) const
size_t count_parameters_number(void) const
Returns the number of parameters (biases and synaptic weights) in the multilayer perceptron.