OpenNN  2.2
Open Neural Networks Library
outputs_integrals.cpp
1 /****************************************************************************************************************/
2 /* */
3 /* OpenNN: Open Neural Networks Library */
4 /* www.artelnics.com/opennn */
5 /* */
6 /* O U T P U T S I N T E G R A L S C L A S S */
7 /* */
8 /* Roberto Lopez */
9 /* Artelnics - Making intelligent use of data */
11 /* */
12 /****************************************************************************************************************/
13 
14 // OpenNN includes
15 
16 #include "outputs_integrals.h"
17 
18 namespace OpenNN
19 {
20 
21 // DEFAULT CONSTRUCTOR
22 
26 
28  : PerformanceTerm()
29 {
30  set_default();
31 }
32 
33 
34 // NEURAL NETWORK CONSTRUCTOR
35 
40 
42 : PerformanceTerm(new_neural_network_pointer)
43 {
44  set_default();
45 }
46 
47 
48 // XML CONSTRUCTOR
49 
54 
55 OutputsIntegrals::OutputsIntegrals(const tinyxml2::XMLDocument& outputs_integrals_document)
56  : PerformanceTerm(outputs_integrals_document)
57 {
58  set_default();
59 
60  from_XML(outputs_integrals_document);
61 }
62 
63 
64 // DESTRUCTOR
65 
67 
69 {
70 }
71 
72 
73 // const NumericalIntegration& get_numerical_integration(void) const method
74 
76 
78 {
79  return(numerical_integration);
80 }
81 
82 
83 // NumericalIntegration* get_numerical_integration_pointer(void) method
84 
86 
88 {
89  return(&numerical_integration);
90 }
91 
92 
93 // const Vector<double>& get_outputs_integrals_weights(void) const method
94 
96 
98 {
100 }
101 
102 
103 // const double& get_output_integral_weight(const size_t&) const method
104 
107 
108 const double& OutputsIntegrals::get_output_integral_weight(const size_t& i) const
109 {
110  return(outputs_integrals_weights[i]);
111 }
112 
113 
114 // void set_numerical_integration(const NumericalIntegration&) method
115 
118 
120 {
121  numerical_integration = new_numerical_integration;
122 }
123 
124 
125 // void set_outputs_integrals_weights(const Vector<double>&) method
126 
129 
130 void OutputsIntegrals::set_outputs_integrals_weights(const Vector<double>& new_outputs_integrals_weights)
131 {
132  outputs_integrals_weights = new_outputs_integrals_weights;
133 }
134 
135 
136 // void set_output_integral_weight(const size_t&, const double&) method
137 
141 
142 void OutputsIntegrals::set_output_integral_weight(const size_t& i, const double& new_output_integral_weight)
143 {
144  outputs_integrals_weights[i] = new_output_integral_weight;
145 }
146 
147 
148 // void set_default(void) method
149 
155 
157 {
158  size_t outputs_number = 0;
159 
161  {
163  {
164  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
165 
166  outputs_number = multilayer_perceptron_pointer->get_inputs_number();
167  }
168  }
169 
170  outputs_integrals_weights.set(outputs_number, 1.0);
171 
172  display = true;
173 }
174 
175 
176 // METHODS
177 
178 // void check(void) const method
179 
184 
185 void OutputsIntegrals::check(void) const
186 {
187  std::ostringstream buffer;
188 
189  // Neural network stuff
190 
192  {
193  buffer << "OpenNN Exception: OutputsIntegrals class.\n"
194  << "void check(void) const method.\n"
195  << "Pointer to neural network is NULL.\n";
196 
197  throw std::logic_error(buffer.str());
198  }
199 
200  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
201 
202  if(!multilayer_perceptron_pointer)
203  {
204  buffer << "OpenNN Exception: OutputsIntegrals class.\n"
205  << "void check(void) const method.\n"
206  << "Pointer to multilayer perceptron is NULL.\n";
207 
208  throw std::logic_error(buffer.str());
209  }
210 
211  const size_t inputs_number = multilayer_perceptron_pointer->get_inputs_number();
212  const size_t outputs_number = multilayer_perceptron_pointer->get_outputs_number();
213 
214  if(inputs_number != 1)
215  {
216  buffer << "OpenNN Exception: OutputsIntegrals class.\n"
217  << "void check(void) const method.\n"
218  << "Number of inputs in multilayer perceptron is not one.\n";
219 
220  throw std::logic_error(buffer.str());
221  }
222 
223  if(outputs_number == 0)
224  {
225  buffer << "OpenNN Exception: OutputsIntegrals class.\n"
226  << "void check(void) const method.\n"
227  << "Number of outputs in multilayer perceptron object is zero.\n";
228 
229  throw std::logic_error(buffer.str());
230  }
231 }
232 
233 
234 // double calculate_performance(void) const method
235 
238 
240 {
241  std::ostringstream buffer;
242 
243  buffer << "OpenNN Exception: OutputsIntegrals class.\n"
244  << "double calculate_performance(void) const method.\n"
245  << "This method is under development.\n";
246 
247  throw std::logic_error(buffer.str());
248 
249 /*
250  // Control sentence
251 
252  #ifndef NDEBUG
253 
254  check();
255 
256  #endif
257 
258  // Neural network stuff
259 
260  const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
261 
262  const size_t outputs_number = multilayer_perceptron_pointer->get_outputs_number();
263 
264  // Outputs integrals
265 
266  double performance = 0;
267 
268  for(size_t i = 0; i < outputs_number; i++)
269  {
270  performance += 0.0;
271  }
272 
273  return(performance);
274 */
275 }
276 
277 
278 // double calculate_performance(const Vector<double>&) const method
279 
282 // @param parameters Vector of potential parameters for the neural network associated to the performance functional.
283 
285 {
286  std::ostringstream buffer;
287 
288  buffer << "OpenNN Exception: OutputsIntegrals class.\n"
289  << "double calculate_performance(const Vector<double>&) const method.\n"
290  << "This method is under development.\n";
291 
292  throw std::logic_error(buffer.str());
293 /*
294  // Control sentence (if debug)
295 
296  #ifndef NDEBUG
297 
298  check();
299 
300  #endif
301 
302  #ifndef NDEBUG
303 
304  const size_t size = parameters.size();
305 
306  const size_t parameters_number = neural_network_pointer->count_parameters_number();
307 
308  if(size != parameters_number)
309  {
310  std::ostringstream buffer;
311 
312  buffer << "OpenNN Exception: OutputsIntegrals class." << std::endl
313  << "double calculate_performance(const Vector<double>&) const method." << std::endl
314  << "Size (" << size << ") must be equal to number of parameters (" << parameters_number << ")." << std::endl;
315 
316  throw std::logic_error(buffer.str());
317  }
318 
319  #endif
320 
321  NeuralNetwork neural_network_copy(*neural_network_pointer);
322 
323  neural_network_copy.set_parameters(parameters);
324 
325  OutputsIntegrals sum_squared_error_copy(*this);
326 
327  sum_squared_error_copy.set_neural_network_pointer(&neural_network_copy);
328 
329  return(sum_squared_error_copy.calculate_performance());
330 */
331 }
332 
333 
334 // Vector<double> calculate_gradient(void) const method
335 
339 
341 {
342  // Neural network stuff
343 
344  #ifndef NDEBUG
345 
346  check();
347 
348  #endif
349 
350  Vector<double> gradient;
351 
352  return(gradient);
353 }
354 
355 
356 // Matrix<double> calculate_Hessian(void) const method
357 
361 
363 {
364  // Neural network stuff
365 
366  #ifndef NDEBUG
367 
368  check();
369 
370  #endif
371 
372  Matrix<double> Hessian;
373 
374  return(Hessian);
375 }
376 
377 
378 // std::string write_performance_term_type(void) const method
379 
381 
383 {
384  return("OUTPUTS_INTEGRALS");
385 }
386 
387 
388 // tinyxml2::XMLDocument* to_XML(void) method method
389 
391 
392 tinyxml2::XMLDocument* OutputsIntegrals::to_XML(void) const
393 {
394  std::ostringstream buffer;
395 
396  tinyxml2::XMLDocument* document = new tinyxml2::XMLDocument;
397 
398  // Nueral network outputs integrals
399 
400  tinyxml2::XMLElement* outputs_integrals_element = document->NewElement("OutputsIntegrals");
401 
402  document->InsertFirstChild(outputs_integrals_element);
403 
404  // Numerical differentiation
405 
407  {
408  tinyxml2::XMLElement* element = numerical_differentiation_pointer->to_XML()->FirstChildElement();
409  outputs_integrals_element->LinkEndChild(element);
410  }
411 
412  // Outputs integrals weights
413  {
414  tinyxml2::XMLElement* element = document->NewElement("OutputsIntegralsWeights");
415  outputs_integrals_element->LinkEndChild(element);
416 
417  buffer.str("");
418  buffer << outputs_integrals_weights;
419 
420  tinyxml2::XMLText* text = document->NewText(buffer.str().c_str());
421  element->LinkEndChild(text);
422  }
423 
424  // Display
425  {
426  tinyxml2::XMLElement* element = document->NewElement("Display");
427  outputs_integrals_element->LinkEndChild(element);
428 
429  buffer.str("");
430  buffer << display;
431 
432  tinyxml2::XMLText* text = document->NewText(buffer.str().c_str());
433  element->LinkEndChild(text);
434  }
435 
436  return(document);
437 }
438 
439 
440 // void from_XML(const tinyxml2::XMLDocument&) method
441 
444 
445 void OutputsIntegrals::from_XML(const tinyxml2::XMLDocument& document)
446 {
447  const tinyxml2::XMLElement* root_element = document.FirstChildElement("OutputsIntegrals");
448 
449  if(!root_element)
450  {
451  std::ostringstream buffer;
452 
453  buffer << "OpenNN Exception: OutputsIntegrals class.\n"
454  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
455  << "Outputs integrals element is NULL.\n";
456 
457  throw std::logic_error(buffer.str());
458  }
459 
460  // Display
461  {
462  const tinyxml2::XMLElement* display_element = root_element->FirstChildElement("Display");
463 
464  if(display_element)
465  {
466  const std::string new_display_string = display_element->GetText();
467 
468  try
469  {
470  set_display(new_display_string != "0");
471  }
472  catch(const std::logic_error& e)
473  {
474  std::cout << e.what() << std::endl;
475  }
476  }
477  }
478 
479 }
480 
481 
482 }
483 
484 
485 // OpenNN: Open Neural Networks Library.
486 // Copyright (c) 2005-2015 Roberto Lopez.
487 //
488 // This library is free software; you can redistribute it and/or
489 // modify it under the terms of the GNU Lesser General Public
490 // License as published by the Free Software Foundation; either
491 // version 2.1 of the License, or any later version.
492 //
493 // This library is distributed in the hope that it will be useful,
494 // but WITHOUT ANY WARRANTY; without even the implied warranty of
495 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
496 // Lesser General Public License for more details.
497 // You should have received a copy of the GNU Lesser General Public
498 // License along with this library; if not, write to the Free Software
499 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
const Vector< double > & get_outputs_integrals_weights(void) const
Returns the weights for each integral of the neural network outputs.
size_t get_inputs_number(void) const
Returns the number of inputs to the multilayer perceptron.
void set(void)
Sets the size of a vector to zero.
Definition: vector.h:656
NumericalIntegration numerical_integration
Object for numerical integration of functions.
NumericalIntegration * get_numerical_integration_pointer(void)
Returns a pointer to the numerical integration object inside the outputs integrals object...
Vector< double > outputs_integrals_weights
Weigth for each output integral.
Vector< double > calculate_gradient(void) const
size_t get_outputs_number(void) const
Returns the number of outputs neurons in the multilayer perceptron.
const NumericalIntegration & get_numerical_integration(void) const
Returns a constant reference to the numerical integration object inside the outputs integrals object...
Matrix< double > calculate_Hessian(void) const
void set_display(const bool &)
MultilayerPerceptron * get_multilayer_perceptron_pointer(void) const
Returns a pointer to the multilayer perceptron composing this neural network.
tinyxml2::XMLDocument * to_XML(void) const
Returns a representation of the sum squared error object, in XML format.
NeuralNetwork * neural_network_pointer
Pointer to a multilayer perceptron object.
NumericalDifferentiation * numerical_differentiation_pointer
Numerical differentiation object.
void set_outputs_integrals_weights(const Vector< double > &)
const double & get_output_integral_weight(const size_t &) const
bool display
Display messages to screen.
double calculate_performance(void) const
void set_numerical_integration(const NumericalIntegration &)
std::string write_performance_term_type(void) const
Returns a string with the name of the outputs integrals performance type, "OUTPUTS_INTEGRALS".
virtual ~OutputsIntegrals(void)
Destructor.
bool has_multilayer_perceptron(void) const
tinyxml2::XMLDocument * to_XML(void) const
Serializes this numerical differentiation object into a XML document->
void set_output_integral_weight(const size_t &, const double &)
void from_XML(const tinyxml2::XMLDocument &)