14 #ifndef __NEWTONMETHOD_H__
15 #define __NEWTONMETHOD_H__
29 #include "performance_functional.h"
31 #include "training_algorithm.h"
32 #include "training_rate_algorithm.h"
36 #include "../tinyxml2/tinyxml2.h"
297 tinyxml2::XMLDocument*
to_XML(
void)
const;
298 void from_XML(
const tinyxml2::XMLDocument&);
void set_error_gradient_norm(const double &)
bool reserve_parameters_history
True if the parameters history matrix is to be reserved, false otherwise.
void set_reserve_parameters_norm_history(const bool &)
const bool & get_reserve_inverse_Hessian_history(void) const
Returns true if the inverse Hessian history vector of matrices is to be reserved, and false otherwise...
NewtonMethodResults * perform_training(void)
void set_minimum_parameters_increment_norm(const double &)
tinyxml2::XMLDocument * to_XML(void) const
const bool & get_reserve_elapsed_time_history(void) const
Returns true if the elapsed time history vector is to be reserved, and false otherwise.
void resize_training_history(const size_t &)
double final_training_rate
Final Newton method training rate.
void set_error_parameters_norm(const double &)
double final_generalization_performance
Final generalization performance.
double warning_parameters_norm
Value for the parameters norm at which a warning message is written to the screen.
void set_reserve_generalization_performance_history(const bool &)
double final_parameters_norm
Final neural network parameters norm.
void set_error_training_rate(const double &)
std::string to_string(void) const
Returns a string representation of the current Newton method results structure.
size_t iterations_number
Maximum number of training iterations.
const bool & get_reserve_gradient_history(void) const
Returns true if the gradient history vector of vectors is to be reserved, and false otherwise...
size_t maximum_generalization_performance_decreases
const bool & get_reserve_training_rate_history(void) const
Returns true if the training rate history vector is to be reserved, and false otherwise.
const double & get_error_training_rate(void) const
const double & get_error_parameters_norm(void) const
size_t maximum_iterations_number
Maximum number of iterations to perform_training. It is used as a stopping criterion.
void set_display_period(const size_t &)
void set_maximum_iterations_number(const size_t &)
const bool & get_reserve_gradient_norm_history(void) const
Returns true if the gradient norm history vector is to be reserved, and false otherwise.
void set_reserve_training_direction_history(const bool &)
void set_reserve_inverse_Hessian_history(const bool &)
Matrix< std::string > to_string_matrix(void) const
const double & get_gradient_norm_goal(void) const
void set_warning_gradient_norm(const double &)
void set_reserve_elapsed_time_history(const bool &)
virtual ~NewtonMethodResults(void)
Destructor.
const bool & get_reserve_performance_history(void) const
Returns true if the performance history vector is to be reserved, and false otherwise.
Vector< Vector< double > > gradient_history
History of the performance function gradient over the training iterations.
Vector< double > calculate_gradient_descent_training_direction(const Vector< double > &) const
void set_reserve_performance_history(const bool &)
void from_XML(const tinyxml2::XMLDocument &)
NewtonMethodResults(NewtonMethod *new_Newton_method_pointer)
Newton method constructor.
const bool & get_reserve_generalization_performance_history(void) const
Returns true if the Generalization performance history vector is to be reserved, and false otherwise...
double elapsed_time
Elapsed time of the training process.
double final_performance
Final performance function evaluation.
bool reserve_performance_history
True if the performance history vector is to be reserved, false otherwise.
const double & get_warning_gradient_norm(void) const
TrainingRateAlgorithm training_rate_algorithm
const double & get_performance_goal(void) const
Vector< Vector< double > > training_direction_history
History of the random search training direction over the training iterations.
Vector< double > performance_history
History of the performance function performance over the training iterations.
const double & get_minimum_parameters_increment_norm(void) const
Returns the minimum norm of the parameter increment vector used as a stopping criteria when training...
Vector< double > elapsed_time_history
History of the elapsed time over the training iterations.
Vector< double > final_gradient
Final performance function gradient.
void set_reserve_gradient_norm_history(const bool &)
void set_maximum_time(const double &)
const double & get_error_gradient_norm(void) const
void set_performance_functional_pointer(PerformanceFunctional *)
const bool & get_reserve_parameters_norm_history(void) const
Returns true if the parameters norm history vector is to be reserved, and false otherwise.
NewtonMethod * Newton_method_pointer
Pointer to the Newton method object for which the training results are to be stored.
bool reserve_elapsed_time_history
True if the elapsed time history vector is to be reserved, false otherwise.
std::string write_training_algorithm_type(void) const
This method writes a string with the type of training algoritm.
double performance_goal
Goal value for the performance. It is used as a stopping criterion.
double error_training_rate
Training rate at wich the line minimization algorithm is assumed to be unable to bracket a minimum...
void set_default(void)
Sets the members of the training algorithm object to their default values.
Vector< double > gradient_norm_history
History of the gradient norm over the training iterations.
double error_parameters_norm
Value for the parameters norm at which the training process is assumed to fail.
bool reserve_inverse_Hessian_history
True if the inverse Hessian history vector of matrices is to be reserved, false otherwise.
Vector< double > calculate_training_direction(const Vector< double > &, const Matrix< double > &) const
void set_reserve_parameters_history(const bool &)
bool reserve_gradient_norm_history
True if the gradient norm history vector is to be reserved, false otherwise.
const double & get_maximum_time(void) const
Returns the maximum training time.
void set_reserve_gradient_history(const bool &)
void set_performance_goal(const double &)
Vector< double > final_training_direction
Final Newton method training direction.
bool reserve_training_direction_history
True if the training direction history matrix is to be reserved, false otherwise. ...
Vector< double > training_rate_history
History of the random search training rate over the training iterations.
bool reserve_training_rate_history
True if the training rate history vector is to be reserved, false otherwise.
TrainingRateAlgorithm * get_training_rate_algorithm_pointer(void)
Returns a pointer to the training rate algorithm object inside the Newton method object.
void set_reserve_all_training_history(const bool &)
Makes the training history of all variables to be reseved or not in memory.
const double & get_warning_training_rate(void) const
double final_gradient_norm
Final gradient norm.
void set_reserve_training_rate_history(const bool &)
const bool & get_reserve_parameters_history(void) const
Returns true if the parameters history matrix is to be reserved, and false otherwise.
bool reserve_gradient_history
True if the gradient history matrix is to be reserved, false otherwise.
const size_t & get_maximum_generalization_performance_decreases(void) const
Returns the maximum number of generalization failures during the training process.
void set_gradient_norm_goal(const double &)
double warning_gradient_norm
Value for the gradient norm at which a warning message is written to the screen.
Vector< Vector< double > > parameters_history
History of the neural network parameters over the training iterations.
const size_t & get_maximum_iterations_number(void) const
Returns the maximum number of iterations for training.
void set_warning_training_rate(const double &)
virtual ~NewtonMethod(void)
Destructor.
void set_minimum_performance_increase(const double &)
void set_maximum_generalization_performance_decreases(const size_t &)
const TrainingRateAlgorithm & get_training_rate_algorithm(void) const
Returns a constant reference to the training rate algorithm object inside the Newton method object...
Matrix< std::string > write_final_results(const size_t &precision=3) const
Returns a default (empty) string matrix with the final results from training.
NewtonMethodResults(void)
Default constructor.
void set_warning_parameters_norm(const double &)
Vector< double > final_parameters
Final neural network parameters vector.
double error_gradient_norm
Value for the gradient norm at which the training process is assumed to fail.
const bool & get_reserve_training_direction_history(void) const
Returns true if the training direction history matrix is to be reserved, and false otherwise...
Vector< double > parameters_norm_history
History of the parameters norm over the training iterations.
double gradient_norm_goal
Goal value for the norm of the objective function gradient. It is used as a stopping criterion...
bool reserve_generalization_performance_history
True if the Generalization performance history vector is to be reserved, false otherwise.
Vector< Matrix< double > > inverse_Hessian_history
History of the inverse Hessian over the training iterations.
Vector< double > generalization_performance_history
History of the generalization performance over the training iterations.
double minimum_parameters_increment_norm
Norm of the parameters increment vector at which training stops.
double maximum_time
Maximum training time. It is used as a stopping criterion.
double warning_training_rate
Training rate value at wich a warning message is written to the screen.
const double & get_warning_parameters_norm(void) const
double minimum_performance_increase
Minimum performance improvement between two successive iterations. It is used as a stopping criterion...
const double & get_minimum_performance_increase(void) const
Returns the minimum performance improvement during training.
bool reserve_parameters_norm_history
True if the parameters norm history vector is to be reserved, false otherwise.