[libfann] 181/242: more work on reordering activation functions
Christian Kastner
chrisk-guest at moszumanska.debian.org
Sat Oct 4 21:10:41 UTC 2014
This is an automated email from the git hooks/post-receive script.
chrisk-guest pushed a commit to tag Version2_0_0
in repository libfann.
commit e6d17e85653288c4663af65ff001237638efcba1
Author: Steffen Nissen <lukesky at diku.dk>
Date: Tue May 31 22:55:43 2005 +0000
more work on reordering activation functions
---
examples/cascade_train.c | 53 ++++++++++++++++++++++++++++++++++++++++---
src/fann.c | 8 ++++++-
src/fann_cascade.c | 2 +-
src/fann_train.c | 46 +++++++++++++++++++++++++++++++++++--
src/include/fann_activation.h | 50 ++++++++++++++++++++++++++++++++++++++++
src/include/fann_internal.h | 4 +++-
6 files changed, 155 insertions(+), 8 deletions(-)
diff --git a/examples/cascade_train.c b/examples/cascade_train.c
index 5be2657..ea3d5b4 100644
--- a/examples/cascade_train.c
+++ b/examples/cascade_train.c
@@ -31,6 +31,8 @@ int main()
unsigned int neurons_between_reports = 1;
struct fann *ann;
struct fann_train_data *train_data, *test_data;
+ int i;
+ fann_type number, steepness, v1, v2;
printf("Reading data.\n");
@@ -79,10 +81,55 @@ int main()
ann = fann_create_shortcut(learning_rate, 2, train_data->num_input, train_data->num_output);
fann_set_training_algorithm(ann, FANN_TRAIN_RPROP);
- fann_set_activation_steepness_hidden(ann, 0.5);
- fann_set_activation_steepness_output(ann, 1);
- fann_set_activation_function_hidden(ann, FANN_GAUSSIAN_SYMMETRIC);
+ fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
fann_set_activation_function_output(ann, FANN_LINEAR);
+ fann_set_activation_steepness_hidden(ann, 1);
+ fann_set_activation_steepness_output(ann, 1);
+
+ /*
+ for(i = 0; i < 6; i++){
+ printf("%.20e, ", ann->activation_values_hidden[i]);
+ }
+ for(i = 0; i < 6; i++){
+ printf("%.20e, ", ann->activation_results_hidden[i]);
+ }
+ printf("\n");
+
+ for(i = 0; i < 100000; i++)
+ {
+ number = fann_rand(-10.0,10.0);
+ steepness = fann_rand(0.0,2.0);
+ fann_set_activation_steepness_hidden(ann, steepness);
+ fann_set_activation_steepness_output(ann, steepness);
+ v1 = fann_stepwise(
+ ann->activation_values_hidden[0],
+ ann->activation_values_hidden[1],
+ ann->activation_values_hidden[2],
+ ann->activation_values_hidden[3],
+ ann->activation_values_hidden[4],
+ ann->activation_values_hidden[5],
+ ann->activation_results_hidden[0],
+ ann->activation_results_hidden[1],
+ ann->activation_results_hidden[2],
+ ann->activation_results_hidden[3],
+ ann->activation_results_hidden[4],
+ ann->activation_results_hidden[5],
+ -1, 1, number);
+ v1 = fann_activation_new(ann, ann->activation_function_hidden, ann->activation_steepness_hidden, number);
+ number = number*steepness;
+ v2 = fann_stepwise(-2.64665246009826660156e+00, -1.47221946716308593750e+00, -5.49306154251098632812e-01, 5.49306154251098632812e-01, 1.47221934795379638672e+00, 2.64665293693542480469e+00, 4.99999988824129104614e-03, 5.00000007450580596924e-02, 2.50000000000000000000e-01, 7.50000000000000000000e-01, 9.49999988079071044922e-01, 9.95000004768371582031e-01, 0, 1, number);
+ v2 = fann_stepwise(-2.64665293693542480469e+00, -1.47221934795379638672e+00, -5.49306154251098632812e-01, 5.49306154251098632812e-01, 1.47221934795379638672e+00, 2.64665293693542480469e+00, -9.90000009536743164062e-01, -8.99999976158142089844e-01, -5.00000000000000000000e-01, 5.00000000000000000000e-01, 8.99999976158142089844e-01, 9.90000009536743164062e-01, -1, 1, number);
+ if((int)floor(v1*10000.0+0.5) != (int)floor(v2*10000.0+0.5))
+ {
+ printf("steepness = %f, number = %f, v1 = %f, v2 = %f", steepness, number, v1, v2);
+ printf(" **********************");
+ printf("\n");
+ }
+ }
+
+ exit(0);
+ */
+
fann_set_train_error_function(ann, FANN_ERRORFUNC_LINEAR);
fann_set_rprop_increase_factor(ann, 1.2);
fann_set_rprop_decrease_factor(ann, 0.5);
diff --git a/src/fann.c b/src/fann.c
index ea2b78a..6afaa7e 100644
--- a/src/fann.c
+++ b/src/fann.c
@@ -622,7 +622,10 @@ FANN_EXTERNAL fann_type * FANN_API fann_run(struct fann *ann, fann_type *input)
}
neuron_it->sum = neuron_sum;
-
+
+ neuron_sum = fann_mult(steepness, neuron_sum);
+ fann_activation_switch(ann, activation_function, neuron_sum, neuron_it->value);
+ /*
switch(activation_function){
#ifdef FIXEDFANN
case FANN_SIGMOID:
@@ -674,6 +677,9 @@ FANN_EXTERNAL fann_type * FANN_API fann_run(struct fann *ann, fann_type *input)
default:
fann_error((struct fann_error *)ann, FANN_E_CANT_USE_ACTIVATION);
}
+ */
+ /*if((int)(neuron_it->value*1000.0) != (int)(fann_activation_new(ann, activation_function, steepness, neuron_sum)*1000.0))
+ printf("Wrong activation calculated %f != %f\n", neuron_it->value, fann_activation_new(ann, activation_function, steepness, neuron_sum));*/
}
}
diff --git a/src/fann_cascade.c b/src/fann_cascade.c
index 435db7a..4397433 100644
--- a/src/fann_cascade.c
+++ b/src/fann_cascade.c
@@ -574,7 +574,7 @@ void fann_update_candidate_slopes(struct fann *ann)
*/
/* unrolled loop end */
- activation = fann_activation(ann, 0, cand_sum);
+ activation = fann_activation(ann, ann->activation_function_hidden, ann->activation_steepness_hidden, cand_sum);
/* printf("%f = sigmoid(%f);\n", activation, cand_sum);*/
cand_it->sum = cand_sum;
diff --git a/src/fann_train.c b/src/fann_train.c
index 8ee931f..5d3cae2 100644
--- a/src/fann_train.c
+++ b/src/fann_train.c
@@ -62,7 +62,7 @@ fann_type fann_activation_derived(unsigned int activation_function,
Calculates the activation of a value, given an activation function
and a steepness
*/
-fann_type fann_activation(struct fann *ann, unsigned int is_output_layer,
+fann_type fann_activation_old(struct fann *ann, unsigned int is_output_layer,
fann_type value)
{
/* values used for the stepwise linear sigmoid function */
@@ -140,6 +140,47 @@ fann_type fann_activation(struct fann *ann, unsigned int is_output_layer,
}
}
+/* INTERNAL FUNCTION
+ Calculates the activation of a value, given an activation function
+ and a steepness
+*/
+fann_type fann_activation(struct fann *ann, unsigned int activation_function, fann_type steepness,
+ fann_type value)
+{
+ value = fann_mult(steepness, value);
+ fann_activation_switch(ann, activation_function, value, value);
+ return value;
+ /*
+ switch(activation_function){
+ case FANN_LINEAR:
+ return value;
+ case FANN_SIGMOID:
+ return (fann_type)fann_sigmoid_real(value);
+ case FANN_SIGMOID_SYMMETRIC:
+ return (fann_type)fann_sigmoid_symmetric_real(value);
+ case FANN_SIGMOID_SYMMETRIC_STEPWISE:
+ return (fann_type)fann_stepwise(-2.64665293693542480469e+00, -1.47221934795379638672e+00, -5.49306154251098632812e-01, 5.49306154251098632812e-01, 1.47221934795379638672e+00, 2.64665293693542480469e+00, -9.90000009536743164062e-01, -8.99999976158142089844e-01, -5.00000000000000000000e-01, 5.00000000000000000000e-01, 8.99999976158142089844e-01, 9.90000009536743164062e-01, -1, 1, value);
+ case FANN_SIGMOID_STEPWISE:
+ return (fann_type)fann_stepwise(-2.64665246009826660156e+00, -1.47221946716308593750e+00, -5.49306154251098632812e-01, 5.49306154251098632812e-01, 1.47221934795379638672e+00, 2.64665293693542480469e+00, 4.99999988824129104614e-03, 5.00000007450580596924e-02, 2.50000000000000000000e-01, 7.50000000000000000000e-01, 9.49999988079071044922e-01, 9.95000004768371582031e-01, 0, 1, value);
+ case FANN_THRESHOLD:
+ return (fann_type)((value < 0) ? 0 : 1);
+ case FANN_THRESHOLD_SYMMETRIC:
+ return (fann_type)((value < 0) ? -1 : 1);
+ case FANN_GAUSSIAN:
+ return (fann_type)fann_gaussian_real(value);
+ case FANN_GAUSSIAN_SYMMETRIC:
+ return (fann_type)fann_gaussian_symmetric_real(value);
+ case FANN_ELLIOT:
+ return (fann_type)fann_elliot_real(value);
+ case FANN_ELLIOT_SYMMETRIC:
+ return (fann_type)fann_elliot_symmetric_real(value);
+ default:
+ fann_error((struct fann_error *)ann, FANN_E_CANT_USE_ACTIVATION);
+ return 0;
+ }
+ */
+}
+
/* Trains the network with the backpropagation algorithm.
*/
FANN_EXTERNAL void FANN_API fann_train(struct fann *ann, fann_type *input, fann_type *desired_output)
@@ -366,7 +407,8 @@ void fann_backpropagate_MSE(struct fann *ann)
for(neuron_it = (layer_it-1)->first_neuron;
neuron_it != last_neuron; neuron_it++){
neuron_value = neuron_it->value;
- *error_prev_layer *= fann_activation(ann, 0, neuron_value);
+ /* *error_prev_layer *= fann_activation(ann, 0, neuron_value); */
+ *error_prev_layer *= fann_activation(ann, ann->activation_function_hidden, activation_steepness_hidden, neuron_value);
}
/*
diff --git a/src/include/fann_activation.h b/src/include/fann_activation.h
index c6d04f8..efbc65b 100644
--- a/src/include/fann_activation.h
+++ b/src/include/fann_activation.h
@@ -139,32 +139,82 @@ static char const * const FANN_ACTIVATION_NAMES[] = {
#define fann_linear_func(v1, r1, v2, r2, sum) ((((r2-r1) * (sum-v1))/(v2-v1)) + r1)
#define fann_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6, min, max, sum) (sum < v5 ? (sum < v3 ? (sum < v2 ? (sum < v1 ? min : fann_linear_func(v1, r1, v2, r2, sum)) : fann_linear_func(v2, r2, v3, r3, sum)) : (sum < v4 ? fann_linear_func(v3, r3, v4, r4, sum) : fann_linear_func(v4, r4, v5, r5, sum))) : (sum < v6 ? fann_linear_func(v5, r5, v6, r6, sum) : max))
+
+
/* FANN_LINEAR */
#define fann_linear(steepness, sum) fann_mult(steepness, sum)
#define fann_linear_derive(steepness, value) (steepness)
/* FANN_SIGMOID */
#define fann_sigmoid(steepness, sum) (1.0f/(1.0f + exp(-2.0f * steepness * sum)))
+#define fann_sigmoid_real(sum) (1.0f/(1.0f + exp(-2.0f * sum)))
#define fann_sigmoid_derive(steepness, value) (2.0f * steepness * value * (1.0f - value))
/* FANN_SIGMOID_SYMMETRIC */
#define fann_sigmoid_symmetric(steepness, sum) (2.0f/(1.0f + exp(-2.0f * steepness * sum)) - 1.0f)
+#define fann_sigmoid_symmetric_real(sum) (2.0f/(1.0f + exp(-2.0f * sum)) - 1.0f)
#define fann_sigmoid_symmetric_derive(steepness, value) steepness * (1.0f - (value*value))
/* FANN_GAUSSIAN */
#define fann_gaussian(steepness, sum) (exp(-sum * steepness * sum * steepness))
+#define fann_gaussian_real(sum) (exp(-sum * sum))
#define fann_gaussian_derive(steepness, value, sum) (-2.0f * sum * value * steepness)
/* FANN_GAUSSIAN_SYMMETRIC */
#define fann_gaussian_symmetric(steepness, sum) ((exp(-sum * steepness * sum * steepness)*2.0)-1.0)
+#define fann_gaussian_symmetric_real(sum) ((exp(-sum * sum)*2.0)-1.0)
#define fann_gaussian_symmetric_derive(steepness, value, sum) (-4.0f * sum * value * steepness)
/* FANN_ELLIOT */
#define fann_elliot(steepness, sum) (((sum * steepness) / 2.0f) / (1.0f + abs(sum * steepness)) + 0.5f)
+#define fann_elliot_real(sum) (((sum) / 2.0f) / (1.0f + abs(sum)) + 0.5f)
#define fann_elliot_derive(steepness, value, sum) (steepness * 1.0f / (2.0f * (1.0f + abs(sum)) * (1.0f + abs(sum))))
/* FANN_ELLIOT_SYMMETRIC */
#define fann_elliot_symmetric(steepness, sum) ((sum * steepness) / (1.0f + abs(sum * steepness)))
+#define fann_elliot_symmetric_real(sum) ((sum) / (1.0f + abs(steepness)))
#define fann_elliot_symmetric_derive(steepness, value, sum) (steepness * 1.0f / ((1.0f + abs(sum)) * (1.0f + abs(sum))))
+#define fann_activation_switch(ann, activation_function, value, result) \
+switch(activation_function) \
+{ \
+ case FANN_LINEAR: \
+ result = value; \
+ break; \
+ case FANN_SIGMOID: \
+ result = (fann_type)fann_sigmoid_real(value); \
+ break; \
+ case FANN_SIGMOID_SYMMETRIC: \
+ result = (fann_type)fann_sigmoid_symmetric_real(value); \
+ break; \
+ case FANN_SIGMOID_SYMMETRIC_STEPWISE: \
+ result = (fann_type)fann_stepwise(-2.64665293693542480469e+00, -1.47221934795379638672e+00, -5.49306154251098632812e-01, 5.49306154251098632812e-01, 1.47221934795379638672e+00, 2.64665293693542480469e+00, -9.90000009536743164062e-01, -8.99999976158142089844e-01, -5.00000000000000000000e-01, 5.00000000000000000000e-01, 8.99999976158142089844e-01, 9.90000009536743164062e-01, -1, 1, value); \
+ break; \
+ case FANN_SIGMOID_STEPWISE: \
+ result = (fann_type)fann_stepwise(-2.64665246009826660156e+00, -1.47221946716308593750e+00, -5.49306154251098632812e-01, 5.49306154251098632812e-01, 1.47221934795379638672e+00, 2.64665293693542480469e+00, 4.99999988824129104614e-03, 5.00000007450580596924e-02, 2.50000000000000000000e-01, 7.50000000000000000000e-01, 9.49999988079071044922e-01, 9.95000004768371582031e-01, 0, 1, value); \
+ break; \
+ case FANN_THRESHOLD: \
+ result = (fann_type)((value < 0) ? 0 : 1); \
+ break; \
+ case FANN_THRESHOLD_SYMMETRIC: \
+ result = (fann_type)((value < 0) ? -1 : 1); \
+ break; \
+ case FANN_GAUSSIAN: \
+ result = (fann_type)fann_gaussian_real(value); \
+ break; \
+ case FANN_GAUSSIAN_SYMMETRIC: \
+ result = (fann_type)fann_gaussian_symmetric_real(value); \
+ break; \
+ case FANN_ELLIOT: \
+ result = (fann_type)fann_elliot_real(value); \
+ break; \
+ case FANN_ELLIOT_SYMMETRIC: \
+ result = (fann_type)fann_elliot_symmetric_real(value); \
+ break; \
+ default: \
+ fann_error((struct fann_error *)ann, FANN_E_CANT_USE_ACTIVATION); \
+ result = 0; \
+ break; \
+}
+
#endif
diff --git a/src/include/fann_internal.h b/src/include/fann_internal.h
index 63ce38c..e6f1ff8 100644
--- a/src/include/fann_internal.h
+++ b/src/include/fann_internal.h
@@ -68,7 +68,9 @@ void fann_update_weights_irpropm(struct fann *ann, unsigned int first_weight, un
void fann_clear_train_arrays(struct fann *ann);
-fann_type fann_activation(struct fann *ann, unsigned int is_output_layer,
+fann_type fann_activation_old(struct fann *ann, unsigned int is_output_layer,
+ fann_type value);
+fann_type fann_activation_new(struct fann *ann, unsigned int activation_function, fann_type steepness,
fann_type value);
fann_type fann_activation_derived(unsigned int activation_function,
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/libfann.git
More information about the debian-science-commits
mailing list