[libfann] 211/242: Changed save format to save all parameters
Christian Kastner
chrisk-guest at moszumanska.debian.org
Sat Oct 4 21:10:45 UTC 2014
This is an automated email from the git hooks/post-receive script.
chrisk-guest pushed a commit to tag Version2_0_0
in repository libfann.
commit 5058244ca431a0aa7cf35a9507c0f44732f80303
Author: Steffen Nissen <lukesky at diku.dk>
Date: Wed Nov 23 21:43:45 2005 +0000
Changed save format to save all parameters
---
.project | 20 ++++----
ChangeLog | 2 +
examples/Makefile | 3 ++
examples/cascade_train.c | 2 +
src/fann_io.c | 124 ++++++++++++++++++++++++++++++++++++++++++++---
src/include/fann_io.h | 10 +++-
6 files changed, 143 insertions(+), 18 deletions(-)
diff --git a/.project b/.project
index 94c2b67..30f3ded 100644
--- a/.project
+++ b/.project
@@ -17,11 +17,15 @@
<value>org.eclipse.cdt.core.MakeErrorParser;org.eclipse.cdt.core.GCCErrorParser;org.eclipse.cdt.core.GASErrorParser;org.eclipse.cdt.core.GLDErrorParser;org.eclipse.cdt.core.VCErrorParser;</value>
</dictionary>
<dictionary>
+ <key>org.eclipse.cdt.make.core.enableAutoBuild</key>
+ <value>true</value>
+ </dictionary>
+ <dictionary>
<key>org.eclipse.cdt.make.core.environment</key>
<value></value>
</dictionary>
<dictionary>
- <key>org.eclipse.cdt.make.core.enableAutoBuild</key>
+ <key>org.eclipse.cdt.make.core.enableFullBuild</key>
<value>true</value>
</dictionary>
<dictionary>
@@ -29,10 +33,6 @@
<value>all</value>
</dictionary>
<dictionary>
- <key>org.eclipse.cdt.make.core.enableFullBuild</key>
- <value>true</value>
- </dictionary>
- <dictionary>
<key>org.eclipse.cdt.make.core.enabledIncrementalBuild</key>
<value>true</value>
</dictionary>
@@ -41,14 +41,14 @@
<value>/fann/src</value>
</dictionary>
<dictionary>
- <key>org.eclipse.cdt.make.core.build.command</key>
- <value>make</value>
- </dictionary>
- <dictionary>
<key>org.eclipse.cdt.make.core.build.target.clean</key>
<value>clean</value>
</dictionary>
<dictionary>
+ <key>org.eclipse.cdt.make.core.build.command</key>
+ <value>make</value>
+ </dictionary>
+ <dictionary>
<key>org.eclipse.cdt.make.core.enableCleanBuild</key>
<value>true</value>
</dictionary>
@@ -66,7 +66,7 @@
</dictionary>
<dictionary>
<key>org.eclipse.cdt.make.core.build.target.auto</key>
- <value>floatfann.o</value>
+ <value>-f ../examples/Makefile quickcompiletest</value>
</dictionary>
<dictionary>
<key>org.eclipse.cdt.make.core.stopOnError</key>
diff --git a/ChangeLog b/ChangeLog
index 4bcba60..90cff4e 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -12,6 +12,8 @@ libfann (2.0.0) stable; urgency=low
* More activation functions
* Enums used instead of integers for several parameters.
* Possebility to set default logging facility
+ * All parameters are saved to configuration file and not just the parameters needed to execute the ANN
+ * More readable configuration file
libfann (1.2.0) stable; urgency=low
* Fixes for better compability with different compilers
diff --git a/examples/Makefile b/examples/Makefile
index 05c4f80..cc15c01 100644
--- a/examples/Makefile
+++ b/examples/Makefile
@@ -64,6 +64,9 @@ compiletest:
gcc -O3 -ggdb -lm -DDEBUG -Wall -Wformat-security -Wfloat-equal -Wshadow -Wpointer-arith -Wcast-qual -Wsign-compare -pedantic -ansi -DFIXEDFANN -I../src/ -I../src/include/ ../src/fixedfann.c xor_test.c -o xor_test
g++ -O3 -ggdb -lm -DDEBUG -Wall -Wformat-security -Wfloat-equal -Wpointer-arith -Wcast-qual -Wsign-compare -pedantic -ansi -I../src/ -I../src/include/ ../src/floatfann.c xor_train.c -o xor_train
+quickcompiletest:
+ gcc -O -ggdb -lm -DDEBUG -Wall -Wformat-security -Wfloat-equal -Wshadow -Wpointer-arith -Wcast-qual -Wsign-compare -pedantic -ansi -I../src/ -I../src/include/ ../src/floatfann.c ../examples/xor_train.c -o ../examples/xor_train
+
debug: $(DEBUG_TARGETS)
%_debug: %.c Makefile ../src/*c ../src/include/*h
diff --git a/examples/cascade_train.c b/examples/cascade_train.c
index b575d4b..f126999 100644
--- a/examples/cascade_train.c
+++ b/examples/cascade_train.c
@@ -48,6 +48,7 @@ int main()
const float desired_error = (const float) 0.001;
unsigned int max_neurons = 40;
unsigned int neurons_between_reports = 1;
+ fann_type steepnesses[] = {0.1,0.2,0.4,0.5,0.6,0.7,0.8,0.9,1.0,1.1};
printf("Reading data.\n");
@@ -125,6 +126,7 @@ int main()
fann_set_cascade_candidate_limit(ann, 1000.0);
fann_set_cascade_max_out_epochs(ann, 150);
fann_set_cascade_max_cand_epochs(ann, 150);
+ fann_set_cascade_activation_steepnesses(ann, steepnesses, 10);
fann_set_cascade_num_candidate_groups(ann, 1);
/*fann_set_callback(ann, print_callback);*/
diff --git a/src/fann_io.c b/src/fann_io.c
index 72dddbd..f678dcd 100644
--- a/src/fann_io.c
+++ b/src/fann_io.c
@@ -178,6 +178,59 @@ int fann_save_internal_fd(struct fann *ann, FILE * conf, const char *configurati
fprintf(conf, "learning_rate=%f\n", ann->learning_rate);
fprintf(conf, "connection_rate=%f\n", ann->connection_rate);
fprintf(conf, "shortcut_connections=%u\n", ann->shortcut_connections);
+
+ fprintf(conf, "learning_momentum=%f\n", ann->learning_momentum);
+ fprintf(conf, "training_algorithm=%u\n", ann->training_algorithm);
+ fprintf(conf, "train_error_function=%u\n", ann->train_error_function);
+ fprintf(conf, "train_stop_function=%u\n", ann->train_stop_function);
+ fprintf(conf, "cascade_output_change_fraction=%f\n", ann->cascade_output_change_fraction);
+ fprintf(conf, "quickprop_decay=%f\n", ann->quickprop_decay);
+ fprintf(conf, "quickprop_mu=%f\n", ann->quickprop_mu);
+ fprintf(conf, "rprop_increase_factor=%f\n", ann->rprop_increase_factor);
+ fprintf(conf, "rprop_decrease_factor=%f\n", ann->rprop_decrease_factor);
+ fprintf(conf, "rprop_delta_min=%f\n", ann->rprop_delta_min);
+ fprintf(conf, "rprop_delta_max=%f\n", ann->rprop_delta_max);
+ fprintf(conf, "rprop_delta_zero=%f\n", ann->rprop_delta_zero);
+ fprintf(conf, "cascade_output_stagnation_epochs=%u\n", ann->cascade_output_stagnation_epochs);
+ fprintf(conf, "cascade_candidate_change_fraction=%f\n", ann->cascade_candidate_change_fraction);
+ fprintf(conf, "cascade_candidate_stagnation_epochs=%u\n", ann->cascade_candidate_stagnation_epochs);
+ fprintf(conf, "cascade_max_out_epochs=%u\n", ann->cascade_max_out_epochs);
+ fprintf(conf, "cascade_max_cand_epochs=%u\n", ann->cascade_max_cand_epochs);
+ fprintf(conf, "cascade_num_candidate_groups=%u\n", ann->cascade_num_candidate_groups);
+
+#ifndef FIXEDFANN
+ if(save_as_fixed)
+ {
+ fprintf(conf, "bit_fail_limit=%u\n", (int) floor((ann->bit_fail_limit * fixed_multiplier) + 0.5));
+ fprintf(conf, "cascade_candidate_limit=%u\n", (int) floor((ann->cascade_candidate_limit * fixed_multiplier) + 0.5));
+ fprintf(conf, "cascade_weight_multiplier=%u\n", (int) floor((ann->cascade_weight_multiplier * fixed_multiplier) + 0.5));
+ }
+ else
+#endif
+ {
+ fprintf(conf, "bit_fail_limit="FANNPRINTF"\n", ann->bit_fail_limit);
+ fprintf(conf, "cascade_candidate_limit="FANNPRINTF"\n", ann->cascade_candidate_limit);
+ fprintf(conf, "cascade_weight_multiplier="FANNPRINTF"\n", ann->cascade_weight_multiplier);
+ }
+
+ fprintf(conf, "cascade_activation_functions_count=%u\n", ann->cascade_activation_functions_count);
+ fprintf(conf, "cascade_activation_functions=");
+ for(i = 0; i < ann->cascade_activation_functions_count; i++)
+ fprintf(conf, "%u ", ann->cascade_activation_functions[i]);
+ fprintf(conf, "\n");
+
+ fprintf(conf, "cascade_activation_steepnesses_count=%u\n", ann->cascade_activation_steepnesses_count);
+ fprintf(conf, "cascade_activation_steepnesses=");
+ for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
+ {
+#ifndef FIXEDFANN
+ if(save_as_fixed)
+ fprintf(conf, "%u ", (int) floor((ann->cascade_activation_steepnesses[i] * fixed_multiplier) + 0.5));
+ else
+#endif
+ fprintf(conf, FANNPRINTF" ", ann->cascade_activation_steepnesses[i]);
+ }
+ fprintf(conf, "\n");
fprintf(conf, "layer_sizes=");
for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
@@ -258,7 +311,7 @@ struct fann *fann_create_from_fd_1_1(FILE * conf, const char *configuration_file
#define fann_scanf(type, name, val) \
{ \
- if(fscanf(conf, name"="type"\n", &val) != 1) \
+ if(fscanf(conf, name"="type"\n", val) != 1) \
{ \
fann_error(NULL, FANN_E_CANT_READ_CONFIG, name, configuration_file); \
fann_destroy(ann); \
@@ -279,7 +332,7 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
struct fann_neuron *first_neuron, *neuron_it, *last_neuron, **connected_neurons;
fann_type *weights;
struct fann_layer *layer_it;
- struct fann *ann;
+ struct fann *ann = NULL;
char *read_version;
@@ -315,11 +368,11 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
free(read_version);
#ifdef FIXEDFANN
- fann_scanf("%u", "decimal_point", decimal_point);
+ fann_scanf("%u", "decimal_point", &decimal_point);
multiplier = 1 << decimal_point;
#endif
- fann_scanf("%u", "num_layers", num_layers);
+ fann_scanf("%u", "num_layers", &num_layers);
ann = fann_allocate_structure(num_layers);
if(ann == NULL)
@@ -327,9 +380,66 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
return NULL;
}
- fann_scanf("%f", "learning_rate", ann->learning_rate);
- fann_scanf("%f", "connection_rate", ann->connection_rate);
- fann_scanf("%u", "shortcut_connections", ann->shortcut_connections);
+ fann_scanf("%f", "learning_rate", &ann->learning_rate);
+ fann_scanf("%f", "connection_rate", &ann->connection_rate);
+ fann_scanf("%u", "shortcut_connections", &ann->shortcut_connections);
+ fann_scanf("%f", "learning_momentum", &ann->learning_momentum);
+ fann_scanf("%u", "training_algorithm", (unsigned int *)&ann->training_algorithm);
+ fann_scanf("%u", "train_error_function", (unsigned int *)&ann->train_error_function);
+ fann_scanf("%u", "train_stop_function", (unsigned int *)&ann->train_stop_function);
+ fann_scanf("%f", "cascade_output_change_fraction", &ann->cascade_output_change_fraction);
+ fann_scanf("%f", "quickprop_decay", &ann->quickprop_decay);
+ fann_scanf("%f", "quickprop_mu", &ann->quickprop_mu);
+ fann_scanf("%f", "rprop_increase_factor", &ann->rprop_increase_factor);
+ fann_scanf("%f", "rprop_decrease_factor", &ann->rprop_decrease_factor);
+ fann_scanf("%f", "rprop_delta_min", &ann->rprop_delta_min);
+ fann_scanf("%f", "rprop_delta_max", &ann->rprop_delta_max);
+ fann_scanf("%f", "rprop_delta_zero", &ann->rprop_delta_zero);
+ fann_scanf("%u", "cascade_output_stagnation_epochs", &ann->cascade_output_stagnation_epochs);
+ fann_scanf("%f", "cascade_candidate_change_fraction", &ann->cascade_candidate_change_fraction);
+ fann_scanf("%u", "cascade_candidate_stagnation_epochs", &ann->cascade_candidate_stagnation_epochs);
+ fann_scanf("%u", "cascade_max_out_epochs", &ann->cascade_max_out_epochs);
+ fann_scanf("%u", "cascade_max_cand_epochs", &ann->cascade_max_cand_epochs);
+ fann_scanf("%u", "cascade_num_candidate_groups", &ann->cascade_num_candidate_groups);
+
+ fann_scanf(FANNSCANF, "bit_fail_limit", &ann->bit_fail_limit);
+ fann_scanf(FANNSCANF, "cascade_candidate_limit", &ann->cascade_candidate_limit);
+ fann_scanf(FANNSCANF, "cascade_weight_multiplier", &ann->cascade_weight_multiplier);
+
+
+ fann_scanf("%u", "cascade_activation_functions_count", &ann->cascade_activation_functions_count);
+
+ /* reallocate mem */
+ ann->cascade_activation_functions =
+ (enum fann_activationfunc_enum *)realloc(ann->cascade_activation_functions,
+ ann->cascade_activation_functions_count * sizeof(enum fann_activationfunc_enum));
+ if(ann->cascade_activation_functions == NULL)
+ {
+ fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
+ fann_destroy(ann);
+ return NULL;
+ }
+
+ fscanf(conf, "cascade_activation_functions=");
+ for(i = 0; i < ann->cascade_activation_functions_count; i++)
+ fscanf(conf, "%u ", (unsigned int *)&ann->cascade_activation_functions[i]);
+
+ fann_scanf("%u", "cascade_activation_steepnesses_count", &ann->cascade_activation_steepnesses_count);
+
+ /* reallocate mem */
+ ann->cascade_activation_steepnesses =
+ (fann_type *)realloc(ann->cascade_activation_steepnesses,
+ ann->cascade_activation_steepnesses_count * sizeof(fann_type));
+ if(ann->cascade_activation_steepnesses == NULL)
+ {
+ fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
+ fann_destroy(ann);
+ return NULL;
+ }
+
+ fscanf(conf, "cascade_activation_steepnesses=");
+ for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
+ fscanf(conf, FANNSCANF" ", &ann->cascade_activation_steepnesses[i]);
#ifdef FIXEDFANN
ann->decimal_point = decimal_point;
diff --git a/src/include/fann_io.h b/src/include/fann_io.h
index 4d96847..0220b51 100644
--- a/src/include/fann_io.h
+++ b/src/include/fann_io.h
@@ -29,7 +29,7 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
/* Function: fann_create_from_file
- Constructs a backpropagation neural network from a configuration file.
+ Constructs a backpropagation neural network from a configuration file, which have been saved by <fann_save>.
See also:
<fann_save>, <fann_save_to_fixed>
@@ -43,6 +43,14 @@ FANN_EXTERNAL struct fann *FANN_API fann_create_from_file(const char *configurat
Save the entire network to a configuration file.
+ The configuration file contains all information about the neural network and enables
+ <fann_create_from_file> to create an exact copy of the neural network and all of the
+ parameters associated with the neural network.
+
+ These two parameters (<fann_set_callback>, <fann_set_error_log>) are *NOT* saved
+ to the file because they cannot safely be ported to a different location. Also temporary
+ parameters generated during training like <fann_get_MSE> is not saved.
+
See also:
<fann_create_from_file>, <fann_save_to_fixed>
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/libfann.git
More information about the debian-science-commits
mailing list