[libfann] 171/242: just a commit during development of cascade
Christian Kastner
chrisk-guest at moszumanska.debian.org
Sat Oct 4 21:10:39 UTC 2014
This is an automated email from the git hooks/post-receive script.
chrisk-guest pushed a commit to tag Version2_0_0
in repository libfann.
commit b59fe20c553e35cdb99154c9d80829a750c3322a
Author: Steffen Nissen <lukesky at diku.dk>
Date: Tue Jan 4 07:18:18 2005 +0000
just a commit during development of cascade
---
benchmarks/Makefile | 5 +-
benchmarks/benchmark.sh | 7 +-
benchmarks/datasets/two-spiral.test | 1156 +++++++++++-----------------------
benchmarks/datasets/two-spiral.train | 580 +++++++++++------
benchmarks/gnuplot | 39 ++
benchmarks/quality.cc | 100 ++-
examples/cascade_train.c | 60 +-
examples/xor_train.c | 1 +
ltmain.sh | 6 +-
src/Makefile.am | 2 +-
src/Makefile.in | 7 +-
src/fann.c | 87 ++-
src/fann_cascade.c | 251 ++++++--
src/fann_train.c | 31 +-
src/fann_train_data.c | 8 +-
src/include/fann_activation.h | 15 +-
src/include/fann_data.h | 18 +
src/include/fann_internal.h | 4 +-
18 files changed, 1278 insertions(+), 1099 deletions(-)
diff --git a/benchmarks/Makefile b/benchmarks/Makefile
index 061e6e4..7d54c0a 100644
--- a/benchmarks/Makefile
+++ b/benchmarks/Makefile
@@ -37,4 +37,7 @@ parity: parity.c
$(GCC) $(CFLAGS) $(LFLAGS) $< -o $@
clean:
- rm -rf -- $(TARGETS) *.net *.out *fixed_test* *fixed_train* *~ *ps
+ rm -rf -- $(TARGETS) *.net *fixed_test* *fixed_train* *~
+
+distclean: clean
+ rm -rf *.ps *.out
diff --git a/benchmarks/benchmark.sh b/benchmarks/benchmark.sh
index ccef77f..d3d6162 100755
--- a/benchmarks/benchmark.sh
+++ b/benchmarks/benchmark.sh
@@ -2,7 +2,7 @@
date;
-max_seconds_training=200;
+max_seconds_training=300;
secs_between_reports=0.01;
function benchmark_algorithm() {
@@ -12,6 +12,7 @@ function benchmark_algorithm() {
function benchmark_problem() {
#rm -f *_fixed.net
+ algo="fann_cascade"; benchmark_algorithm;
algo="fann_rprop"; benchmark_algorithm;
#./quality_fixed $prob.$algo.train.out_fixed_train $prob.$algo.train.out_fixed_test $prob.$algo.fixed_train.out $prob.$algo.fixed_test.out *_fixed.net
algo="fann_rprop_stepwise"; benchmark_algorithm;
@@ -29,6 +30,10 @@ function benchmark_problem() {
#comment out some of the lines below if some of the problems should not be benchmarked
+#prob="two-spiral"; n1=20; n2=0; sec_train=20;
+#benchmark_problem;
+#exit;
+
prob="building"; n1=16; n2=0; sec_train=$max_seconds_training;
benchmark_problem;
diff --git a/benchmarks/datasets/two-spiral.test b/benchmarks/datasets/two-spiral.test
index e9fd7ac..225cf79 100755
--- a/benchmarks/datasets/two-spiral.test
+++ b/benchmarks/datasets/two-spiral.test
@@ -1,771 +1,385 @@
-770 2 1
- 6.50000 0.00000 1
--6.50000 -0.00000 0
- 6.47656 0.31817 1
--6.47656 -0.31817 0
- 6.43760 0.63405 1
--6.43760 -0.63405 0
- 6.38328 0.94687 1
--6.38328 -0.94687 0
- 6.31380 1.25590 1
--6.31380 -1.25590 0
- 6.22942 1.56039 1
--6.22942 -1.56039 0
- 6.13040 1.85964 1
--6.13040 -1.85964 0
- 6.01705 2.15294 1
--6.01705 -2.15294 0
- 5.88973 2.43961 1
--5.88973 -2.43961 0
- 5.74880 2.71899 1
--5.74880 -2.71899 0
- 5.59468 2.99043 1
--5.59468 -2.99043 0
- 5.42781 3.25331 1
--5.42781 -3.25331 0
- 5.24865 3.50704 1
--5.24865 -3.50704 0
- 5.05769 3.75105 1
--5.05769 -3.75105 0
- 4.85547 3.98479 1
--4.85547 -3.98479 0
- 4.64251 4.20774 1
--4.64251 -4.20774 0
- 4.41941 4.41943 1
--4.41941 -4.41943 0
- 4.18674 4.61938 1
--4.18674 -4.61938 0
- 3.94512 4.80717 1
--3.94512 -4.80717 0
- 3.69519 4.98240 1
--3.69519 -4.98240 0
- 3.43758 5.14473 1
--3.43758 -5.14473 0
- 3.17297 5.29380 1
--3.17297 -5.29380 0
- 2.90202 5.42934 1
--2.90202 -5.42934 0
- 2.62544 5.55107 1
--2.62544 -5.55107 0
- 2.34392 5.65877 1
--2.34392 -5.65877 0
- 2.05817 5.75225 1
--2.05817 -5.75225 0
- 1.76890 5.83136 1
--1.76890 -5.83136 0
- 1.47685 5.89598 1
--1.47685 -5.89598 0
- 1.18272 5.94601 1
--1.18272 -5.94601 0
- 0.88724 5.98143 1
--0.88724 -5.98143 0
- 0.59115 6.00221 1
--0.59115 -6.00221 0
- 0.29515 6.00838 1
--0.29515 -6.00838 0
--0.00002 6.00000 1
- 0.00002 -6.00000 0
--0.29366 5.97717 1
- 0.29366 -5.97717 0
--0.58506 5.94001 1
- 0.58506 -5.94001 0
--0.87353 5.88869 1
- 0.87353 -5.88869 0
--1.15837 5.82341 1
- 1.15837 -5.82341 0
--1.43892 5.74440 1
- 1.43892 -5.74440 0
--1.71452 5.65192 1
- 1.71452 -5.65192 0
--1.98452 5.54627 1
- 1.98452 -5.54627 0
--2.24829 5.42778 1
- 2.24829 -5.42778 0
--2.50523 5.29680 1
- 2.50523 -5.29680 0
--2.75475 5.15371 1
- 2.75475 -5.15371 0
--2.99628 4.99893 1
- 2.99628 -4.99893 0
--3.22928 4.83290 1
- 3.22928 -4.83290 0
--3.45322 4.65608 1
- 3.45322 -4.65608 0
--3.66761 4.46895 1
- 3.66761 -4.46895 0
--3.87198 4.27203 1
- 3.87198 -4.27203 0
--4.06589 4.06584 1
- 4.06589 -4.06584 0
--4.24891 3.85095 1
- 4.24891 -3.85095 0
--4.42067 3.62791 1
- 4.42067 -3.62791 0
--4.58081 3.39732 1
- 4.58081 -3.39732 0
--4.72900 3.15978 1
- 4.72900 -3.15978 0
--4.86495 2.91590 1
- 4.86495 -2.91590 0
--4.98838 2.66631 1
- 4.98838 -2.66631 0
--5.09908 2.41165 1
- 5.09908 -2.41165 0
--5.19684 2.15256 1
- 5.19684 -2.15256 0
--5.28149 1.88971 1
- 5.28149 -1.88971 0
--5.35290 1.62374 1
- 5.35290 -1.62374 0
--5.41096 1.35534 1
- 5.41096 -1.35534 0
--5.45563 1.08515 1
- 5.45563 -1.08515 0
--5.48684 0.81386 1
- 5.48684 -0.81386 0
--5.50462 0.54212 1
- 5.50462 -0.54212 0
--5.50898 0.27060 1
- 5.50898 -0.27060 0
--5.50000 -0.00004 1
- 5.50000 0.00004 0
--5.47777 -0.26915 1
- 5.47777 0.26915 0
--5.44241 -0.53607 1
- 5.44241 0.53607 0
--5.39410 -0.80018 1
- 5.39410 0.80018 0
--5.33301 -1.06085 1
- 5.33301 1.06085 0
--5.25938 -1.31745 1
- 5.25938 1.31745 0
--5.17345 -1.56939 1
- 5.17345 1.56939 0
--5.07550 -1.81609 1
- 5.07550 1.81609 0
--4.96584 -2.05696 1
- 4.96584 2.05696 0
--4.84480 -2.29147 1
- 4.84480 2.29147 0
--4.71275 -2.51907 1
- 4.71275 2.51907 0
--4.57006 -2.73924 1
- 4.57006 2.73924 0
--4.41716 -2.95151 1
- 4.41716 2.95151 0
--4.25446 -3.15538 1
- 4.25446 3.15538 0
--4.08243 -3.35043 1
- 4.08243 3.35043 0
--3.90154 -3.53621 1
- 3.90154 3.53621 0
--3.71228 -3.71234 1
- 3.71228 3.71234 0
--3.51516 -3.87845 1
- 3.51516 3.87845 0
--3.31070 -4.03418 1
- 3.31070 4.03418 0
--3.09946 -4.17922 1
- 3.09946 4.17922 0
--2.88198 -4.31328 1
- 2.88198 4.31328 0
--2.65883 -4.43609 1
- 2.65883 4.43609 0
--2.43059 -4.54743 1
- 2.43059 4.54743 0
--2.19785 -4.64709 1
- 2.19785 4.64709 0
--1.96120 -4.73490 1
- 1.96120 4.73490 0
--1.72125 -4.81072 1
- 1.72125 4.81072 0
--1.47859 -4.87443 1
- 1.47859 4.87443 0
--1.23383 -4.92595 1
- 1.23383 4.92595 0
--0.98759 -4.96524 1
- 0.98759 4.96524 0
--0.74048 -4.99226 1
- 0.74048 4.99226 0
--0.49309 -5.00703 1
- 0.49309 5.00703 0
--0.24605 -5.00959 1
- 0.24605 5.00959 0
- 0.00006 -5.00000 1
--0.00006 5.00000 0
- 0.24463 -4.97837 1
--0.24463 4.97837 0
- 0.48708 -4.94482 1
--0.48708 4.94482 0
- 0.72683 -4.89951 1
--0.72683 4.89951 0
- 0.96331 -4.84262 1
--0.96331 4.84262 0
- 1.19597 -4.77436 1
--1.19597 4.77436 0
- 1.42426 -4.69497 1
--1.42426 4.69497 0
- 1.64766 -4.60472 1
--1.64766 4.60472 0
- 1.86564 -4.50389 1
--1.86564 4.50389 0
- 2.07770 -4.39280 1
--2.07770 4.39280 0
- 2.28338 -4.27178 1
--2.28338 4.27178 0
- 2.48220 -4.14119 1
--2.48220 4.14119 0
- 2.67373 -4.00141 1
--2.67373 4.00141 0
- 2.85754 -3.85285 1
--2.85754 3.85285 0
- 3.03324 -3.69592 1
--3.03324 3.69592 0
- 3.20044 -3.53105 1
--3.20044 3.53105 0
- 3.35880 -3.35871 1
--3.35880 3.35871 0
- 3.50798 -3.17937 1
--3.50798 3.17937 0
- 3.64768 -2.99350 1
--3.64768 2.99350 0
- 3.77762 -2.80160 1
--3.77762 2.80160 0
- 3.89755 -2.60418 1
--3.89755 2.60418 0
- 4.00723 -2.40177 1
--4.00723 2.40177 0
- 4.10648 -2.19489 1
--4.10648 2.19489 0
- 4.19510 -1.98407 1
--4.19510 1.98407 0
- 4.27297 -1.76985 1
--4.27297 1.76985 0
- 4.33995 -1.55279 1
--4.33995 1.55279 0
- 4.39596 -1.33343 1
--4.39596 1.33343 0
- 4.44094 -1.11233 1
--4.44094 1.11233 0
- 4.47485 -0.89004 1
--4.47485 0.89004 0
- 4.49767 -0.66710 1
--4.49767 0.66710 0
- 4.50944 -0.44407 1
--4.50944 0.44407 0
- 4.51019 -0.22151 1
--4.51019 0.22151 0
- 4.50000 0.00007 1
--4.50000 -0.00007 0
- 4.47897 0.22010 1
--4.47897 -0.22010 0
- 4.44723 0.43808 1
--4.44723 -0.43808 0
- 4.40492 0.65348 1
--4.40492 -0.65348 0
- 4.35222 0.86578 1
--4.35222 -0.86578 0
- 4.28934 1.07449 1
--4.28934 -1.07449 0
- 4.21650 1.27913 1
--4.21650 -1.27913 0
- 4.13394 1.47922 1
--4.13394 -1.47922 0
- 4.04195 1.67430 1
--4.04195 -1.67430 0
- 3.94080 1.86393 1
--3.94080 -1.86393 0
- 3.83081 2.04769 1
--3.83081 -2.04769 0
- 3.71232 2.22516 1
--3.71232 -2.22516 0
- 3.58567 2.39595 1
--3.58567 -2.39595 0
- 3.45124 2.55970 1
--3.45124 -2.55970 0
- 3.30941 2.71605 1
--3.30941 -2.71605 0
- 3.16057 2.86467 1
--3.16057 -2.86467 0
- 3.00515 3.00525 1
--3.00515 -3.00525 0
- 2.84358 3.13751 1
--2.84358 -3.13751 0
- 2.67629 3.26118 1
--2.67629 -3.26118 0
- 2.50374 3.37602 1
--2.50374 -3.37602 0
- 2.32639 3.48182 1
--2.32639 -3.48182 0
- 2.14471 3.57837 1
--2.14471 -3.57837 0
- 1.95918 3.66552 1
--1.95918 -3.66552 0
- 1.77028 3.74311 1
--1.77028 -3.74311 0
- 1.57850 3.81103 1
--1.57850 -3.81103 0
- 1.38434 3.86918 1
--1.38434 -3.86918 0
- 1.18828 3.91750 1
--1.18828 -3.91750 0
- 0.99083 3.95593 1
--0.99083 -3.95593 0
- 0.79248 3.98445 1
--0.79248 -3.98445 0
- 0.59373 4.00308 1
--0.59373 -4.00308 0
- 0.39506 4.01185 1
--0.39506 -4.01185 0
- 0.19696 4.01079 1
--0.19696 -4.01079 0
--0.00007 4.00000 1
- 0.00007 -4.00000 0
--0.19558 3.97957 1
- 0.19558 -3.97957 0
--0.38908 3.94963 1
- 0.38908 -3.94963 0
--0.58012 3.91033 1
- 0.58012 -3.91033 0
--0.76824 3.86183 1
- 0.76824 -3.86183 0
--0.95301 3.80432 1
- 0.95301 -3.80432 0
--1.13400 3.73803 1
- 1.13400 -3.73803 0
--1.31078 3.66317 1
- 1.31078 -3.66317 0
--1.48297 3.58000 1
- 1.48297 -3.58000 0
--1.65016 3.48880 1
- 1.65016 -3.48880 0
--1.81200 3.38985 1
- 1.81200 -3.38985 0
--1.96811 3.28345 1
- 1.96811 -3.28345 0
--2.11817 3.16994 1
- 2.11817 -3.16994 0
--2.26186 3.04963 1
- 2.26186 -3.04963 0
--2.39886 2.92290 1
- 2.39886 -2.92290 0
--2.52890 2.79009 1
- 2.52890 -2.79009 0
--2.65170 2.65160 1
- 2.65170 -2.65160 0
--2.76704 2.50780 1
- 2.76704 -2.50780 0
--2.87468 2.35909 1
- 2.87468 -2.35909 0
--2.97442 2.20589 1
- 2.97442 -2.20589 0
--3.06609 2.04860 1
- 3.06609 -2.04860 0
--3.14951 1.88766 1
- 3.14951 -1.88766 0
--3.22456 1.72348 1
- 3.22456 -1.72348 0
--3.29112 1.55650 1
- 3.29112 -1.55650 0
--3.34909 1.38716 1
- 3.34909 -1.38716 0
--3.39841 1.21589 1
- 3.39841 -1.21589 0
--3.43903 1.04314 1
- 3.43903 -1.04314 0
--3.47091 0.86934 1
- 3.47091 -0.86934 0
--3.49406 0.69493 1
- 3.49406 -0.69493 0
--3.50850 0.52036 1
- 3.50850 -0.52036 0
--3.51425 0.34605 1
- 3.51425 -0.34605 0
--3.51139 0.17243 1
- 3.51139 -0.17243 0
--3.50000 -0.00008 1
- 3.50000 0.00008 0
--3.48017 -0.17105 1
- 3.48017 0.17105 0
--3.45204 -0.34007 1
- 3.45204 0.34007 0
--3.41574 -0.50676 1
- 3.41574 0.50676 0
--3.37143 -0.67070 1
- 3.37143 0.67070 0
--3.31931 -0.83152 1
- 3.31931 0.83152 0
--3.25956 -0.98886 1
- 3.25956 0.98886 0
--3.19240 -1.14234 1
- 3.19240 1.14234 0
--3.11806 -1.29163 1
- 3.11806 1.29163 0
--3.03681 -1.43639 1
- 3.03681 1.43639 0
--2.94889 -1.57630 1
- 2.94889 1.57630 0
--2.85459 -1.71106 1
- 2.85459 1.71106 0
--2.75420 -1.84039 1
- 2.75420 1.84039 0
--2.64803 -1.96401 1
- 2.64803 1.96401 0
--2.53639 -2.08166 1
- 2.53639 2.08166 0
--2.41962 -2.19312 1
- 2.41962 2.19312 0
--2.29804 -2.29815 1
- 2.29804 2.29815 0
--2.17202 -2.39657 1
- 2.17202 2.39657 0
--2.04189 -2.48818 1
- 2.04189 2.48818 0
--1.90804 -2.57282 1
- 1.90804 2.57282 0
--1.77082 -2.65035 1
- 1.77082 2.65035 0
--1.63060 -2.72065 1
- 1.63060 2.72065 0
--1.48778 -2.78360 1
- 1.48778 2.78360 0
--1.34272 -2.83912 1
- 1.34272 2.83912 0
--1.19581 -2.88715 1
- 1.19581 2.88715 0
--1.04744 -2.92764 1
- 1.04744 2.92764 0
--0.89799 -2.96056 1
- 0.89799 2.96056 0
--0.74785 -2.98590 1
- 0.74785 2.98590 0
--0.59739 -3.00367 1
- 0.59739 3.00367 0
--0.44699 -3.01391 1
- 0.44699 3.01391 0
--0.29704 -3.01666 1
- 0.29704 3.01666 0
--0.14789 -3.01200 1
- 0.14789 3.01200 0
- 0.00008 -3.00000 1
--0.00008 3.00000 0
- 0.14651 -2.98078 1
--0.14651 2.98078 0
- 0.29107 -2.95445 1
--0.29107 2.95445 0
- 0.43339 -2.92115 1
--0.43339 2.92115 0
- 0.57315 -2.88104 1
--0.57315 2.88104 0
- 0.71003 -2.83429 1
--0.71003 2.83429 0
- 0.84371 -2.78109 1
--0.84371 2.78109 0
- 0.97389 -2.72162 1
--0.97389 2.72162 0
- 1.10029 -2.65612 1
--1.10029 2.65612 0
- 1.22261 -2.58481 1
--1.22261 2.58481 0
- 1.34060 -2.50793 1
--1.34060 2.50793 0
- 1.45401 -2.42572 1
--1.45401 2.42572 0
- 1.56260 -2.33847 1
--1.56260 2.33847 0
- 1.66616 -2.24643 1
--1.66616 2.24643 0
- 1.76447 -2.14989 1
--1.76447 2.14989 0
- 1.85734 -2.04914 1
--1.85734 2.04914 0
- 1.94460 -1.94449 1
--1.94460 1.94449 0
- 2.02609 -1.83624 1
--2.02609 1.83624 0
- 2.10167 -1.72470 1
--2.10167 1.72470 0
- 2.17122 -1.61019 1
--2.17122 1.61019 0
- 2.23462 -1.49303 1
--2.23462 1.49303 0
- 2.29178 -1.37355 1
--2.29178 1.37355 0
- 2.34264 -1.25208 1
--2.34264 1.25208 0
- 2.38713 -1.12894 1
--2.38713 1.12894 0
- 2.42521 -1.00447 1
--2.42521 1.00447 0
- 2.45687 -0.87900 1
--2.45687 0.87900 0
- 2.48209 -0.75285 1
--2.48209 0.75285 0
- 2.50088 -0.62636 1
--2.50088 0.62636 0
- 2.51328 -0.49985 1
--2.51328 0.49985 0
- 2.51932 -0.37363 1
--2.51932 0.37363 0
- 2.51907 -0.24803 1
--2.51907 0.24803 0
- 2.51260 -0.12336 1
--2.51260 0.12336 0
- 2.50000 0.00007 1
--2.50000 -0.00007 0
- 2.48138 0.12198 1
--2.48138 -0.12198 0
- 2.45686 0.24205 1
--2.45686 -0.24205 0
- 2.42656 0.36002 1
--2.42656 -0.36002 0
- 2.39065 0.47560 1
--2.39065 -0.47560 0
- 2.34928 0.58854 1
--2.34928 -0.58854 0
- 2.30262 0.69857 1
--2.30262 -0.69857 0
- 2.25085 0.80545 1
--2.25085 -0.80545 0
- 2.19419 0.90894 1
--2.19419 -0.90894 0
- 2.13282 1.00883 1
--2.13282 -1.00883 0
- 2.06697 1.10490 1
--2.06697 -1.10490 0
- 1.99686 1.19696 1
--1.99686 -1.19696 0
- 1.92273 1.28482 1
--1.92273 -1.28482 0
- 1.84483 1.36830 1
--1.84483 -1.36830 0
- 1.76339 1.44726 1
--1.76339 -1.44726 0
- 1.67867 1.52155 1
--1.67867 -1.52155 0
- 1.59094 1.59104 1
--1.59094 -1.59104 0
- 1.50046 1.65561 1
--1.50046 -1.65561 0
- 1.40751 1.71516 1
--1.40751 -1.71516 0
- 1.31234 1.76961 1
--1.31234 -1.76961 0
- 1.21525 1.81888 1
--1.21525 -1.81888 0
- 1.11651 1.86291 1
--1.11651 -1.86291 0
- 1.01639 1.90168 1
--1.01639 -1.90168 0
- 0.91517 1.93513 1
--0.91517 -1.93513 0
- 0.81314 1.96327 1
--0.81314 -1.96327 0
- 0.71056 1.98609 1
--0.71056 -1.98609 0
- 0.60772 2.00361 1
--0.60772 -2.00361 0
- 0.50488 2.01586 1
--0.50488 -2.01586 0
- 0.40231 2.02288 1
--0.40231 -2.02288 0
- 0.30027 2.02473 1
--0.30027 -2.02473 0
- 0.19903 2.02148 1
--0.19903 -2.02148 0
- 0.09884 2.01320 1
--0.09884 -2.01320 0
--0.00007 2.00000 1
- 0.00007 -2.00000 0
--0.09743 1.98198 1
- 0.09743 -1.98198 0
--0.19304 1.95926 1
- 0.19304 -1.95926 0
--0.28665 1.93198 1
- 0.28665 -1.93198 0
--0.37805 1.90026 1
- 0.37805 -1.90026 0
--0.46704 1.86426 1
- 0.46704 -1.86426 0
--0.55342 1.82415 1
- 0.55342 -1.82415 0
--0.63699 1.78009 1
- 0.63699 -1.78009 0
--0.71759 1.73225 1
- 0.71759 -1.73225 0
--0.79504 1.68083 1
- 0.79504 -1.68083 0
--0.86919 1.62601 1
- 0.86919 -1.62601 0
--0.93990 1.56800 1
- 0.93990 -1.56800 0
--1.00702 1.50700 1
- 1.00702 -1.50700 0
--1.07045 1.44323 1
- 1.07045 -1.44323 0
--1.13006 1.37689 1
- 1.13006 -1.37689 0
--1.18577 1.30820 1
- 1.18577 -1.30820 0
--1.23748 1.23739 1
- 1.23748 -1.23739 0
--1.28513 1.16469 1
- 1.28513 -1.16469 0
--1.32865 1.09032 1
- 1.32865 -1.09032 0
--1.36800 1.01450 1
- 1.36800 -1.01450 0
--1.40314 0.93748 1
- 1.40314 -0.93748 0
--1.43405 0.85946 1
- 1.43405 -0.85946 0
--1.46071 0.78070 1
- 1.46071 -0.78070 0
--1.48313 0.70140 1
- 1.48313 -0.70140 0
--1.50133 0.62181 1
- 1.50133 -0.62181 0
--1.51532 0.54213 1
- 1.51532 -0.54213 0
--1.52514 0.46259 1
- 1.52514 -0.46259 0
--1.53084 0.38340 1
- 1.53084 -0.38340 0
--1.53249 0.30477 1
- 1.53249 -0.30477 0
--1.53014 0.22692 1
- 1.53014 -0.22692 0
--1.52388 0.15003 1
- 1.52388 -0.15003 0
--1.51380 0.07431 1
- 1.51380 -0.07431 0
--1.50000 -0.00006 1
- 1.50000 0.00006 0
--1.48258 -0.07289 1
- 1.48258 0.07289 0
--1.46167 -0.14402 1
- 1.46167 0.14402 0
--1.43739 -0.21327 1
- 1.43739 0.21327 0
--1.40987 -0.28049 1
- 1.40987 0.28049 0
--1.37925 -0.34554 1
- 1.37925 0.34554 0
--1.34568 -0.40826 1
- 1.34568 0.40826 0
--1.30932 -0.46854 1
- 1.30932 0.46854 0
--1.27031 -0.52624 1
- 1.27031 0.52624 0
--1.22884 -0.58125 1
- 1.22884 0.58125 0
--1.18506 -0.63348 1
- 1.18506 0.63348 0
--1.13914 -0.68284 1
- 1.13914 0.68284 0
--1.09128 -0.72923 1
- 1.09128 0.72923 0
--1.04163 -0.77259 1
- 1.04163 0.77259 0
--0.99039 -0.81285 1
- 0.99039 0.81285 0
--0.93773 -0.84998 1
- 0.93773 0.84998 0
--0.88385 -0.88392 1
- 0.88385 0.88392 0
--0.82892 -0.91464 1
- 0.82892 0.91464 0
--0.77313 -0.94214 1
- 0.77313 0.94214 0
--0.71666 -0.96639 1
- 0.71666 0.96639 0
--0.65970 -0.98740 1
- 0.65970 0.98740 0
--0.60242 -1.00517 1
- 0.60242 1.00517 0
--0.54501 -1.01974 1
- 0.54501 1.01974 0
--0.48764 -1.03113 1
- 0.48764 1.03113 0
--0.43048 -1.03938 1
- 0.43048 1.03938 0
--0.37370 -1.04454 1
- 0.37370 1.04454 0
--0.31746 -1.04667 1
- 0.31746 1.04667 0
--0.26192 -1.04583 1
- 0.26192 1.04583 0
--0.20724 -1.04209 1
- 0.20724 1.04209 0
--0.15357 -1.03555 1
- 0.15357 1.03555 0
--0.10104 -1.02629 1
- 0.10104 1.02629 0
--0.04979 -1.01440 1
- 0.04979 1.01440 0
- 0.00004 -1.00000 1
--0.00004 1.00000 0
- 0.04834 -0.98319 1
--0.04834 0.98319 0
- 0.09499 -0.96408 1
--0.09499 0.96408 0
- 0.13989 -0.94280 1
--0.13989 0.94280 0
- 0.18293 -0.91948 1
--0.18293 0.91948 0
- 0.22403 -0.89424 1
--0.22403 0.89424 0
- 0.26311 -0.86722 1
--0.26311 0.86722 0
- 0.30008 -0.83855 1
--0.30008 0.83855 0
- 0.33488 -0.80838 1
--0.33488 0.80838 0
- 0.36746 -0.77685 1
--0.36746 0.77685 0
- 0.39777 -0.74410 1
--0.39777 0.74410 0
- 0.42577 -0.71029 1
--0.42577 0.71029 0
- 0.45143 -0.67555 1
--0.45143 0.67555 0
- 0.47472 -0.64004 1
--0.47472 0.64004 0
- 0.49565 -0.60389 1
--0.49565 0.60389 0
- 0.51419 -0.56727 1
--0.51419 0.56727 0
- 0.53035 -0.53031 1
--0.53035 0.53031 0
- 0.54416 -0.49315 1
--0.54416 0.49315 0
- 0.55562 -0.45595 1
--0.55562 0.45595 0
- 0.56477 -0.41883 1
--0.56477 0.41883 0
- 0.57165 -0.38193 1
--0.57165 0.38193 0
- 0.57630 -0.34539 1
--0.57630 0.34539 0
- 0.57877 -0.30933 1
--0.57877 0.30933 0
- 0.57913 -0.27388 1
--0.57913 0.27388 0
- 0.57744 -0.23915 1
--0.57744 0.23915 0
- 0.57376 -0.20527 1
--0.57376 0.20527 0
- 0.56819 -0.17233 1
--0.56819 0.17233 0
- 0.56081 -0.14045 1
--0.56081 0.14045 0
- 0.55170 -0.10971 1
--0.55170 0.10971 0
- 0.54096 -0.08022 1
--0.54096 0.08022 0
- 0.52869 -0.05205 1
--0.52869 0.05205 0
- 0.51501 -0.02528 1
--0.51501 0.02528 0
- 0.50000 0.00002 1
--0.50000 -0.00002 0
+192 2 1
+-2.594750 2.698700
+0
+-1.445560 2.258190
+1
+-1.318680 -1.892050
+0
+-3.835440 -1.501160
+0
+1.039800 0.185680
+0
+-0.088230 -4.492880
+0
+-0.795940 0.348150
+1
+3.839910 -0.842500
+1
+-0.225520 0.576190
+1
+-1.837090 -1.176000
+0
+-3.943050 2.748160
+1
+-4.460690 1.951150
+1
+1.973010 0.772220
+1
+1.318930 -3.369830
+1
+1.318680 1.892050
+1
+-0.515480 0.536130
+1
+-0.449380 2.516440
+1
+3.544500 -1.550400
+1
+-4.363740 -2.793410
+1
+3.605840 5.173660
+1
+-3.992980 0.078410
+0
+3.122650 -2.176370
+1
+-1.208570 1.256990
+0
+0.795940 -0.348150
+0
+-1.481850 1.032790
+0
+0.661450 -0.461000
+0
+-0.896560 -0.862030
+1
+-2.047870 5.232260
+0
+0.625180 -3.500870
+1
+0.029330 1.493460
+0
+-4.766660 -1.865630
+1
+-1.350020 -3.086410
+1
+2.594750 -2.698700
+1
+2.523840 -3.942630
+0
+2.863150 -0.628190
+0
+-4.977510 -0.888870
+1
+-2.024230 -0.361480
+0
+-1.890470 -2.712450
+1
+2.024230 0.361480
+1
+1.901660 -1.977840
+0
+-1.617420 -1.555110
+0
+-2.523840 3.942630
+1
+1.445560 -2.258190
+0
+4.816680 -1.056810
+0
+-0.909610 0.199570
+1
+-4.763460 3.319940
+0
+-0.107860 -5.492690
+1
+-3.521520 -2.254270
+0
+2.679300 1.715140
+0
+-0.273580 1.532010
+0
+-0.906430 1.415980
+0
+0.993560 -0.019510
+0
+0.949270 2.170220
+1
+3.521520 2.254270
+1
+-1.993370 0.039140
+0
+3.008650 0.537280
+0
+-0.589990 1.507400
+0
+-3.122650 2.176370
+0
+-0.097790 0.547590
+1
+1.041800 0.407750
+0
+-3.839910 0.842500
+0
+-1.378280 -6.281830
+0
+0.906430 -1.415980
+1
+3.287830 -3.419550
+0
+-1.318930 3.369830
+0
+0.735350 3.351530
+0
+0.127500 6.492500
+1
+-1.039800 -0.185680
+1
+-0.949270 -2.170220
+0
+3.779980 3.634380
+0
+3.835440 1.501160
+1
+3.993080 0.713080
+1
+0.088230 4.492880
+1
+3.980920 -4.140400
+1
+-0.735350 -3.351530
+1
+1.163970 5.305060
+0
+5.376880 -2.351900
+1
+0.048960 2.493270
+1
+-5.376880 2.351900
+0
+-3.008650 -0.537280
+1
+-5.793440 1.271120
+0
+2.462260 3.532850
+1
+-1.163970 -5.305060
+1
+-2.863150 0.628190
+1
+-3.993080 -0.713080
+0
+1.984700 -3.100410
+1
+-1.750770 -4.002600
+0
+-1.901660 1.977840
+1
+0.589990 -1.507400
+1
+-5.992590 0.117680
+0
+-0.625180 3.500870
+0
+-0.521040 -2.374760
+0
+-2.679300 -1.715140
+1
+4.500830 4.327460
+1
+-3.034050 -4.353250
+1
+0.225520 -0.576190
+0
+-1.683400 4.301050
+1
+-0.306730 -1.398000
+1
+-2.302250 1.604580
+1
+-2.993170 0.058780
+1
+-3.779980 -3.634380
+1
+-0.048960 -2.493270
+0
+2.904230 1.136690
+0
+-1.984700 3.100410
+0
+2.628310 -1.149650
+0
+-0.661450 0.461000
+1
+-0.954460 2.438620
+1
+-0.949660 -4.328290
+0
+4.766660 1.865630
+0
+0.306730 1.398000
+0
+0.949660 4.328290
+1
+5.992590 -0.117680
+1
+1.712130 -0.748900
+1
+1.886380 -0.413880
+1
+-2.628310 1.149650
+1
+-1.973010 -0.772220
+0
+0.909610 -0.199570
+0
+-3.605840 -5.173660
+0
+0.068590 3.493080
+0
+0.994870 0.636860
+0
+-3.062980 4.784850
+0
+-2.904230 -1.136690
+1
+1.378280 6.281830
+1
+2.302250 -1.604580
+0
+-5.961930 -1.064670
+0
+-0.367290 0.573760
+1
+0.746900 1.071650
+0
+-5.697870 -2.230100
+0
+0.515480 -0.536130
+0
+1.890470 2.712450
+0
+-0.993560 0.019510
+1
+5.961930 1.064670
+1
+0.449380 -2.516440
+0
+-0.994870 -0.636860
+1
+2.552270 5.834970
+1
+0.896560 0.862030
+0
+5.205960 3.332550
+1
+-1.712130 0.748900
+0
+-0.029330 -1.493460
+1
+-2.338270 -2.248200
+1
+1.750770 4.002600
+1
+1.350020 3.086410
+0
+3.062980 -4.784850
+1
+0.548520 1.254030
+0
+0.107860 5.492690
+0
+1.208570 -1.256990
+1
+3.943050 -2.748160
+0
+4.763460 -3.319940
+1
+-1.886380 0.413880
+0
+-0.746900 -1.071650
+1
+2.993170 -0.058780
+0
+-0.800970 4.485290
+1
+1.837090 1.176000
+1
+0.367290 -0.573760
+0
+3.992980 -0.078410
+1
+5.793440 -1.271120
+1
+1.993370 -0.039140
+1
+2.338270 2.248200
+0
+1.481850 -1.032790
+1
+4.977510 0.888870
+0
+4.363740 2.793410
+0
+-5.205960 -3.332550
+0
+-3.287830 3.419550
+1
+2.047870 -5.232260
+1
+-4.500830 -4.327460
+0
+3.034050 4.353250
+0
+0.800970 -4.485290
+0
+-1.041800 -0.407750
+1
+0.273580 -1.532010
+1
+3.059120 2.941290
+1
+-0.976770 5.469720
+0
+5.697870 2.230100
+1
+0.954460 -2.438620
+0
+4.460690 -1.951150
+0
+1.617420 1.555110
+1
+-3.544500 1.550400
+0
+-2.462260 -3.532850
+0
+-2.552270 -5.834970
+0
+1.683400 -4.301050
+0
+0.521040 2.374760
+1
+-4.816680 1.056810
+1
+-0.127500 -6.492500
+0
+-3.059120 -2.941290
+0
+-3.980920 4.140400
+0
+0.976770 -5.469720
+1
+2.151520 4.918780
+0
+-4.992790 0.098050
+1
+-0.548520 -1.254030
+1
+0.097790 -0.547590
+0
+-0.068590 -3.493080
+1
+4.992790 -0.098050
+0
+-2.151520 -4.918780
+1
diff --git a/benchmarks/datasets/two-spiral.train b/benchmarks/datasets/two-spiral.train
index b9326b8..f162ef7 100755
--- a/benchmarks/datasets/two-spiral.train
+++ b/benchmarks/datasets/two-spiral.train
@@ -1,195 +1,385 @@
-194 2 1
- 6.50000 0.00000 1
--6.50000 -0.00000 0
- 6.31380 1.25590 1
--6.31380 -1.25590 0
- 5.88973 2.43961 1
--5.88973 -2.43961 0
- 5.24865 3.50704 1
--5.24865 -3.50704 0
- 4.41941 4.41943 1
--4.41941 -4.41943 0
- 3.43758 5.14473 1
--3.43758 -5.14473 0
- 2.34392 5.65877 1
--2.34392 -5.65877 0
- 1.18272 5.94601 1
--1.18272 -5.94601 0
--0.00002 6.00000 1
- 0.00002 -6.00000 0
--1.15837 5.82341 1
- 1.15837 -5.82341 0
--2.24829 5.42778 1
- 2.24829 -5.42778 0
--3.22928 4.83290 1
- 3.22928 -4.83290 0
--4.06589 4.06584 1
- 4.06589 -4.06584 0
--4.72900 3.15978 1
- 4.72900 -3.15978 0
--5.19684 2.15256 1
- 5.19684 -2.15256 0
--5.45563 1.08515 1
- 5.45563 -1.08515 0
--5.50000 -0.00004 1
- 5.50000 0.00004 0
--5.33301 -1.06085 1
- 5.33301 1.06085 0
--4.96584 -2.05696 1
- 4.96584 2.05696 0
--4.41716 -2.95151 1
- 4.41716 2.95151 0
--3.71228 -3.71234 1
- 3.71228 3.71234 0
--2.88198 -4.31328 1
- 2.88198 4.31328 0
--1.96120 -4.73490 1
- 1.96120 4.73490 0
--0.98759 -4.96524 1
- 0.98759 4.96524 0
- 0.00006 -5.00000 1
--0.00006 5.00000 0
- 0.96331 -4.84262 1
--0.96331 4.84262 0
- 1.86564 -4.50389 1
--1.86564 4.50389 0
- 2.67373 -4.00141 1
--2.67373 4.00141 0
- 3.35880 -3.35871 1
--3.35880 3.35871 0
- 3.89755 -2.60418 1
--3.89755 2.60418 0
- 4.27297 -1.76985 1
--4.27297 1.76985 0
- 4.47485 -0.89004 1
--4.47485 0.89004 0
- 4.50000 0.00007 1
--4.50000 -0.00007 0
- 4.35222 0.86578 1
--4.35222 -0.86578 0
- 4.04195 1.67430 1
--4.04195 -1.67430 0
- 3.58567 2.39595 1
--3.58567 -2.39595 0
- 3.00515 3.00525 1
--3.00515 -3.00525 0
- 2.32639 3.48182 1
--2.32639 -3.48182 0
- 1.57850 3.81103 1
--1.57850 -3.81103 0
- 0.79248 3.98445 1
--0.79248 -3.98445 0
--0.00007 4.00000 1
- 0.00007 -4.00000 0
--0.76824 3.86183 1
- 0.76824 -3.86183 0
--1.48297 3.58000 1
- 1.48297 -3.58000 0
--2.11817 3.16994 1
- 2.11817 -3.16994 0
--2.65170 2.65160 1
- 2.65170 -2.65160 0
--3.06609 2.04860 1
- 3.06609 -2.04860 0
--3.34909 1.38716 1
- 3.34909 -1.38716 0
--3.49406 0.69493 1
- 3.49406 -0.69493 0
--3.50000 -0.00008 1
- 3.50000 0.00008 0
--3.37143 -0.67070 1
- 3.37143 0.67070 0
--3.11806 -1.29163 1
- 3.11806 1.29163 0
--2.75420 -1.84039 1
- 2.75420 1.84039 0
--2.29804 -2.29815 1
- 2.29804 2.29815 0
--1.77082 -2.65035 1
- 1.77082 2.65035 0
--1.19581 -2.88715 1
- 1.19581 2.88715 0
--0.59739 -3.00367 1
- 0.59739 3.00367 0
- 0.00008 -3.00000 1
--0.00008 3.00000 0
- 0.57315 -2.88104 1
--0.57315 2.88104 0
- 1.10029 -2.65612 1
--1.10029 2.65612 0
- 1.56260 -2.33847 1
--1.56260 2.33847 0
- 1.94460 -1.94449 1
--1.94460 1.94449 0
- 2.23462 -1.49303 1
--2.23462 1.49303 0
- 2.42521 -1.00447 1
--2.42521 1.00447 0
- 2.51328 -0.49985 1
--2.51328 0.49985 0
- 2.50000 0.00007 1
--2.50000 -0.00007 0
- 2.39065 0.47560 1
--2.39065 -0.47560 0
- 2.19419 0.90894 1
--2.19419 -0.90894 0
- 1.92273 1.28482 1
--1.92273 -1.28482 0
- 1.59094 1.59104 1
--1.59094 -1.59104 0
- 1.21525 1.81888 1
--1.21525 -1.81888 0
- 0.81314 1.96327 1
--0.81314 -1.96327 0
- 0.40231 2.02288 1
--0.40231 -2.02288 0
--0.00007 2.00000 1
- 0.00007 -2.00000 0
--0.37805 1.90026 1
- 0.37805 -1.90026 0
--0.71759 1.73225 1
- 0.71759 -1.73225 0
--1.00702 1.50700 1
- 1.00702 -1.50700 0
--1.23748 1.23739 1
- 1.23748 -1.23739 0
--1.40314 0.93748 1
- 1.40314 -0.93748 0
--1.50133 0.62181 1
- 1.50133 -0.62181 0
--1.53249 0.30477 1
- 1.53249 -0.30477 0
--1.50000 -0.00006 1
- 1.50000 0.00006 0
--1.40987 -0.28049 1
- 1.40987 0.28049 0
--1.27031 -0.52624 1
- 1.27031 0.52624 0
--1.09128 -0.72923 1
- 1.09128 0.72923 0
--0.88385 -0.88392 1
- 0.88385 0.88392 0
--0.65970 -0.98740 1
- 0.65970 0.98740 0
--0.43048 -1.03938 1
- 0.43048 1.03938 0
--0.20724 -1.04209 1
- 0.20724 1.04209 0
- 0.00004 -1.00000 1
--0.00004 1.00000 0
- 0.18293 -0.91948 1
--0.18293 0.91948 0
- 0.33488 -0.80838 1
--0.33488 0.80838 0
- 0.45143 -0.67555 1
--0.45143 0.67555 0
- 0.53035 -0.53031 1
--0.53035 0.53031 0
- 0.57165 -0.38193 1
--0.57165 0.38193 0
- 0.57744 -0.23915 1
--0.57744 0.23915 0
- 0.55170 -0.10971 1
--0.55170 0.10971 0
- 0.50000 0.00002 1
--0.50000 -0.00002 0
+192 2 1
+-2.651650 2.651650
+0
+-1.493100 2.234570
+1
+-1.284760 -1.922770
+0
+-3.811000 -1.578570
+0
+1.042080 0.207280
+0
+0 -4.500000
+0
+-0.808390 0.334850
+1
+3.861840 -0.768170
+1
+-0.239180 0.577420
+1
+-1.818840 -1.215310
+0
+-4.001450 2.673680
+1
+-4.503910 1.865580
+1
+1.963240 0.813200
+1
+1.387230 -3.349060
+1
+1.284760 1.922770
+1
+-0.530330 0.530330
+1
+-0.499920 2.513260
+1
+3.580030 -1.482900
+1
+-4.313250 -2.882020
+1
+3.507040 5.248650
+1
+-4 0
+0
+3.169980 -2.118110
+1
+-1.237440 1.237440
+0
+0.808390 -0.334850
+0
+-1.507040 1.006970
+0
+0.675570 -0.451400
+0
+-0.883880 -0.883880
+1
+-2.152590 5.196820
+0
+0.695010 -3.494050
+1
+0 1.500000
+0
+-4.734880 -1.961250
+1
+-1.291560 -3.118090
+1
+2.651650 -2.651650
+1
+2.604240 -3.897510
+0
+2.881060 -0.573080
+0
+-4.965230 -0.987650
+1
+-2.022870 -0.402370
+0
+-1.840330 -2.754240
+1
+2.022870 0.402370
+1
+1.944540 -1.944540
+0
+-1.590990 -1.590990
+0
+-2.604240 3.897510
+1
+1.493100 -2.234570
+0
+4.842630 -0.963260
+0
+-0.919490 0.182900
+1
+-4.832920 3.229250
+0
+0 -5.500000
+1
+-3.481780 -2.326450
+0
+2.650310 1.770880
+0
+-0.304830 1.532480
+0
+-0.937520 1.403100
+0
+1 0
+0
+0.908870 2.194210
+1
+3.481780 2.326450
+1
+-2 0
+0
+3.003650 0.597460
+0
+-0.621860 1.501300
+0
+-3.169980 2.118110
+0
+-0.109740 0.551690
+1
+1.039360 0.430520
+0
+-3.861840 0.768170
+0
+-1.255890 -6.313810
+0
+0.937520 -1.403100
+1
+3.358760 -3.358760
+0
+-1.387230 3.349060
+0
+0.670620 3.371450
+0
+0 6.500000
+1
+-1.042080 -0.207280
+1
+-0.908870 -2.194210
+0
+3.712310 3.712310
+0
+3.811000 1.578570
+1
+3.984440 0.792560
+1
+0 4.500000
+1
+4.065860 -4.065860
+1
+-0.670620 -3.371450
+1
+1.060800 5.333020
+0
+5.427790 -2.248270
+1
+0 2.500000
+1
+-5.427790 2.248270
+0
+-3.003650 -0.597460
+1
+-5.823410 1.158350
+0
+2.395900 3.585710
+1
+-1.060800 -5.333020
+1
+-2.881060 0.573080
+1
+-3.984440 -0.792560
+0
+2.048660 -3.066040
+1
+-1.674240 -4.041970
+0
+-1.944540 1.944540
+1
+0.621860 -1.501300
+1
+-6 0
+0
+-0.695010 3.494050
+0
+-0.475530 -2.390660
+0
+-2.650310 -1.770880
+1
+4.419420 4.419420
+1
+-2.951470 -4.417180
+1
+0.239180 -0.577420
+0
+-1.769910 4.272940
+1
+-0.280440 -1.409880
+1
+-2.338510 1.562540
+1
+-3 0
+1
+-3.712310 -3.712310
+1
+0 -2.500000
+0
+2.887120 1.195890
+0
+-2.048660 3.066040
+0
+2.656150 -1.100210
+0
+-0.675570 0.451400
+1
+-1.004540 2.425180
+1
+-0.865710 -4.352230
+0
+4.734880 1.961250
+0
+0.280440 1.409880
+0
+0.865710 4.352230
+1
+6 0
+1
+1.732270 -0.717530
+1
+1.900270 -0.377990
+1
+-2.656150 1.100210
+1
+-1.963240 -0.813200
+0
+0.919490 -0.182900
+0
+-3.507040 -5.248650
+0
+0 3.500000
+0
+0.987370 0.659740
+0
+-3.159810 4.728980
+0
+-2.887120 -1.195890
+1
+1.255890 6.313810
+1
+2.338510 -1.562540
+0
+-5.946010 -1.182740
+0
+-0.381950 0.571640
+1
+0.729190 1.091300
+0
+-5.658760 -2.343940
+0
+0.530330 -0.530330
+0
+1.840330 2.754240
+0
+-1 0
+1
+5.946010 1.182740
+1
+0.499920 -2.513260
+0
+-0.987370 -0.659740
+1
+2.439610 5.889730
+1
+0.883880 0.883880
+0
+5.144720 3.437590
+1
+-1.732270 0.717530
+0
+0 -1.500000
+1
+-2.298100 -2.298100
+1
+1.674240 4.041970
+1
+1.291560 3.118090
+0
+3.159810 -4.728980
+1
+0.526190 1.270330
+0
+0 5.500000
+0
+1.237440 -1.237440
+1
+4.001450 -2.673680
+0
+4.832920 -3.229250
+1
+-1.900270 0.377990
+0
+-0.729190 -1.091300
+1
+3 0
+0
+-0.890100 4.474830
+1
+1.818840 1.215310
+1
+0.381950 -0.571640
+0
+4 0
+1
+5.823410 -1.158350
+1
+2 0
+1
+2.298100 2.298100
+0
+1.507040 -1.006970
+1
+4.965230 0.987650
+0
+4.313250 2.882020
+0
+-5.144720 -3.437590
+0
+-3.358760 3.358760
+1
+2.152590 -5.196820
+1
+-4.419420 -4.419420
+0
+2.951470 4.417180
+0
+0.890100 -4.474830
+0
+-1.039360 -0.430520
+1
+0.304830 -1.532480
+1
+3.005200 3.005200
+1
+-1.085190 5.455620
+0
+5.658760 2.343940
+1
+1.004540 -2.425180
+0
+4.503910 -1.865580
+0
+1.590990 1.590990
+1
+-3.580030 1.482900
+0
+-2.395900 -3.585710
+0
+-2.439610 -5.889730
+0
+1.769910 -4.272940
+0
+0.475530 2.390660
+1
+-4.842630 0.963260
+1
+0 -6.500000
+0
+-3.005200 -3.005200
+0
+-4.065860 4.065860
+0
+1.085190 -5.455620
+1
+2.056920 4.965850
+0
+-5 0
+1
+-0.526190 -1.270330
+1
+0.109740 -0.551690
+0
+0 -3.500000
+1
+5 0
+0
+-2.056920 -4.965850
+1
diff --git a/benchmarks/gnuplot b/benchmarks/gnuplot
index 9c2612a..5d1f1d0 100644
--- a/benchmarks/gnuplot
+++ b/benchmarks/gnuplot
@@ -25,6 +25,7 @@ set logscale
set title "building"
plot "building.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"building.fann_rprop_stepwise.test.out" title "fann rprop (stepwise) test" with lines 2, \
+ "building.fann_cascade.test.out" title "fann cascade test" with lines 1, \
"building.fann_quickprop.test.out" title "fann quickprop test" with lines 3, \
"building.fann_batch.test.out" title "fann batch test" with lines 4, \
"building.fann_incremental.test.out" title "fann incremental test" with lines 5, \
@@ -32,6 +33,7 @@ plot "building.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"building.jneural.test.out" title "jneural incremental test" with lines 9, \
"building.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"building.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "building.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"building.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"building.fann_batch.train.out" title "fann batch train" with lines 4, \
"building.fann_incremental.train.out" title "fann incremental train" with lines 5, \
@@ -39,11 +41,33 @@ plot "building.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"building.jneural.train.out" title "jneural incremental train" with lines 9
set nologscale
+set logscale
+#set output "diabetes.ps"
+set title "diabetes"
+plot "diabetes.fann_rprop.test.out" title "fann rprop test" with lines -1, \
+ "diabetes.fann_rprop_stepwise.test.out" title "fann rprop (stepwise) test" with lines 2, \
+ "diabetes.fann_cascade.test.out" title "fann cascade test" with lines 1, \
+ "diabetes.fann_quickprop.test.out" title "fann quickprop test" with lines 3, \
+ "diabetes.fann_batch.test.out" title "fann batch test" with lines 4, \
+ "diabetes.fann_incremental.test.out" title "fann incremental test" with lines 5, \
+ "diabetes.lwnn.test.out" title "lwnn incremental test" with lines 8, \
+ "diabetes.jneural.test.out" title "jneural incremental test" with lines 9, \
+ "diabetes.fann_rprop.train.out" title "fann rprop train" with lines -1, \
+ "diabetes.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "diabetes.fann_cascade.train.out" title "fann cascade train" with lines 1, \
+ "diabetes.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
+ "diabetes.fann_batch.train.out" title "fann batch train" with lines 4, \
+ "diabetes.fann_incremental.train.out" title "fann incremental train" with lines 5, \
+ "diabetes.lwnn.train.out" title "lwnn incremental train" with lines 8, \
+ "diabetes.jneural.train.out" title "jneural incremental train" with lines 9
+set nologscale
+
set logscale x
#set output "gene.ps"
set title "gene"
plot "gene.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"gene.fann_rprop_stepwise.test.out" title "fann rprop (stepwise) test" with lines 2, \
+ "gene.fann_cascade.test.out" title "fann cascade test" with lines 1, \
"gene.fann_quickprop.test.out" title "fann quickprop test" with lines 3, \
"gene.fann_batch.test.out" title "fann batch test" with lines 4, \
"gene.fann_incremental.test.out" title "fann incremental test" with lines 5, \
@@ -51,6 +75,7 @@ plot "gene.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"gene.jneural.test.out" title "jneural incremental test" with lines 9, \
"gene.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"gene.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "gene.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"gene.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"gene.fann_batch.train.out" title "fann batch train" with lines 4, \
"gene.fann_incremental.train.out" title "fann incremental train" with lines 5, \
@@ -63,6 +88,7 @@ set logscale x
set title "mushroom"
plot "mushroom.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"mushroom.fann_rprop_stepwise.test.out" title "fann rprop (stepwise) test" with lines 2, \
+ "mushroom.fann_cascade.test.out" title "fann cascade test" with lines 1, \
"mushroom.fann_quickprop.test.out" title "fann quickprop test" with lines 3, \
"mushroom.fann_batch.test.out" title "fann batch test" with lines 4, \
"mushroom.fann_incremental.test.out" title "fann incremental test" with lines 5, \
@@ -70,6 +96,7 @@ plot "mushroom.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"mushroom.jneural.test.out" title "jneural incremental test" with lines 9, \
"mushroom.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"mushroom.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "mushroom.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"mushroom.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"mushroom.fann_batch.train.out" title "fann batch train" with lines 4, \
"mushroom.fann_incremental.train.out" title "fann incremental train" with lines 5, \
@@ -82,6 +109,7 @@ set logscale x
set title "parity8"
plot "parity8.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"parity8.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "parity8.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"parity8.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"parity8.fann_batch.train.out" title "fann batch train" with lines 4, \
"parity8.fann_incremental.train.out" title "fann incremental train" with lines 5, \
@@ -94,6 +122,7 @@ set logscale x
set title "parity13"
plot "parity13.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"parity13.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "parity13.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"parity13.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"parity13.fann_batch.train.out" title "fann batch train" with lines 4, \
"parity13.fann_incremental.train.out" title "fann incremental train" with lines 5, \
@@ -106,6 +135,7 @@ set logscale
set title "pumadyn-32fm"
plot "pumadyn-32fm.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"pumadyn-32fm.fann_rprop_stepwise.test.out" title "fann rprop (stepwise) test" with lines 2, \
+ "pumadyn-32fm.fann_cascade.test.out" title "fann cascade test" with lines 1, \
"pumadyn-32fm.fann_quickprop.test.out" title "fann quickprop test" with lines 3, \
"pumadyn-32fm.fann_batch.test.out" title "fann batch test" with lines 4, \
"pumadyn-32fm.fann_incremental.test.out" title "fann incremental test" with lines 5, \
@@ -113,6 +143,7 @@ plot "pumadyn-32fm.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"pumadyn-32fm.jneural.test.out" title "jneural incremental test" with lines 9, \
"pumadyn-32fm.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"pumadyn-32fm.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "pumadyn-32fm.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"pumadyn-32fm.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"pumadyn-32fm.fann_batch.train.out" title "fann batch train" with lines 4, \
"pumadyn-32fm.fann_incremental.train.out" title "fann incremental train" with lines 5, \
@@ -125,6 +156,7 @@ set logscale x
set title "robot"
plot "robot.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"robot.fann_rprop_stepwise.test.out" title "fann rprop (stepwise) test" with lines 2, \
+ "robot.fann_cascade.test.out" title "fann cascade test" with lines 1, \
"robot.fann_quickprop.test.out" title "fann quickprop test" with lines 3, \
"robot.fann_batch.test.out" title "fann batch test" with lines 4, \
"robot.fann_incremental.test.out" title "fann incremental test" with lines 5, \
@@ -132,6 +164,7 @@ plot "robot.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"robot.jneural.test.out" title "jneural incremental test" with lines 9, \
"robot.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"robot.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "robot.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"robot.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"robot.fann_batch.train.out" title "fann batch train" with lines 4, \
"robot.fann_incremental.train.out" title "fann incremental train" with lines 5, \
@@ -143,6 +176,7 @@ set logscale
set title "soybean"
plot "soybean.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"soybean.fann_rprop_stepwise.test.out" title "fann rprop (stepwise) test" with lines 2, \
+ "soybean.fann_cascade.test.out" title "fann cascade test" with lines 1, \
"soybean.fann_quickprop.test.out" title "fann quickprop test" with lines 3, \
"soybean.fann_batch.test.out" title "fann batch test" with lines 4, \
"soybean.fann_incremental.test.out" title "fann incremental test" with lines 5, \
@@ -150,6 +184,7 @@ plot "soybean.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"soybean.jneural.test.out" title "jneural incremental test" with lines 9, \
"soybean.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"soybean.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "soybean.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"soybean.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"soybean.fann_batch.train.out" title "fann batch train" with lines 4, \
"soybean.fann_incremental.train.out" title "fann incremental train" with lines 5, \
@@ -162,6 +197,7 @@ set logscale y
set title "thyroid"
plot "thyroid.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"thyroid.fann_rprop_stepwise.test.out" title "fann rprop (stepwise) test" with lines 2, \
+ "thyroid.fann_cascade.test.out" title "fann cascade test" with lines 1, \
"thyroid.fann_quickprop.test.out" title "fann quickprop test" with lines 3, \
"thyroid.fann_batch.test.out" title "fann batch test" with lines 4, \
"thyroid.fann_incremental.test.out" title "fann incremental test" with lines 5, \
@@ -169,6 +205,7 @@ plot "thyroid.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"thyroid.jneural.test.out" title "jneural incremental test" with lines 9, \
"thyroid.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"thyroid.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "thyroid.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"thyroid.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"thyroid.fann_batch.train.out" title "fann batch train" with lines 4, \
"thyroid.fann_incremental.train.out" title "fann incremental train" with lines 5, \
@@ -180,6 +217,7 @@ plot "thyroid.fann_rprop.test.out" title "fann rprop test" with lines -1, \
set title "two-spiral"
plot "two-spiral.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"two-spiral.fann_rprop_stepwise.test.out" title "fann rprop (stepwise) test" with lines 2, \
+ "two-spiral.fann_cascade.test.out" title "fann cascade test" with lines 1, \
"two-spiral.fann_quickprop.test.out" title "fann quickprop test" with lines 3, \
"two-spiral.fann_batch.test.out" title "fann batch test" with lines 4, \
"two-spiral.fann_incremental.test.out" title "fann incremental test" with lines 5, \
@@ -187,6 +225,7 @@ plot "two-spiral.fann_rprop.test.out" title "fann rprop test" with lines -1, \
"two-spiral.jneural.test.out" title "jneural incremental test" with lines 9, \
"two-spiral.fann_rprop.train.out" title "fann rprop train" with lines -1, \
"two-spiral.fann_rprop_stepwise.train.out" title "fann rprop (stepwise) train" with lines 2, \
+ "two-spiral.fann_cascade.train.out" title "fann cascade train" with lines 1, \
"two-spiral.fann_quickprop.train.out" title "fann quickprop train" with lines 3, \
"two-spiral.fann_batch.train.out" title "fann batch train" with lines 4, \
"two-spiral.fann_incremental.train.out" title "fann incremental train" with lines 5, \
diff --git a/benchmarks/quality.cc b/benchmarks/quality.cc
index 4b5fc72..0e433af 100644
--- a/benchmarks/quality.cc
+++ b/benchmarks/quality.cc
@@ -38,20 +38,28 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#include "floatfann.h"
unsigned int num_errors = 0;
+unsigned int num_bit_fail = 0;
double error_value = 0;
void clear_error()
{
num_errors = 0;
error_value = 0;
+ num_bit_fail = 0;
}
void update_error(fann_type *output, fann_type *desired_output, unsigned int num_output)
{
unsigned int i = 0;
+ double error_val = 0;
/* calculate the error */
for(i = 0; i < num_output; i++){
- error_value += (desired_output[i] - output[i]) * (desired_output[i] - output[i]);
+ error_val = (desired_output[i] - output[i]) * (desired_output[i] - output[i]);
+ error_value += error_val;
+
+ if(error_val >= 0.25){
+ num_bit_fail++;
+ }
}
num_errors++;
}
@@ -72,7 +80,7 @@ void quality_benchmark_jneural(
unsigned int seconds_of_training, double seconds_between_reports)
{
float train_error, test_error;
- unsigned int i;
+ unsigned int i, train_bit_fail, test_bit_fail;
unsigned int epochs = 0;
double elapsed = 0;
double total_elapsed = 0;
@@ -111,6 +119,7 @@ void quality_benchmark_jneural(
update_error(output, train_data->output[i], train_data->num_output);
}
train_error = mean_error();
+ train_bit_fail = num_bit_fail;
clear_error();
for(i = 0; i != test_data->num_data; i++){
@@ -119,10 +128,11 @@ void quality_benchmark_jneural(
update_error(output, test_data->output[i], test_data->num_output);
}
test_error = mean_error();
+ test_bit_fail = num_bit_fail;
fprintf(train_out, "%f %.20e %d\n", total_elapsed, train_error, epochs);
fprintf(test_out, "%f %.20e %d\n", total_elapsed, test_error, epochs);
- fprintf(stderr, "secs: %8.2f, train: %8.6f, test: %8.6f, epochs: %5d\r", total_elapsed, train_error, test_error, epochs);
+ fprintf(stderr, "secs: %8.2f, train: %8.6f (%4d), test: %8.6f (%4d), epochs: %5d\r", total_elapsed, train_error, train_bit_fail, test_error, test_bit_fail, epochs);
}
fprintf(stdout, "\nepochs: %d, epochs/sec: %f\n", epochs, epochs/total_elapsed);
@@ -131,7 +141,7 @@ void quality_benchmark_jneural(
}
#endif
-void quality_benchmark_fann(bool stepwise, unsigned int training_algorithm,
+void quality_benchmark_fann(bool stepwise, int training_algorithm,
char *filename,
struct fann_train_data *train_data,
struct fann_train_data *test_data,
@@ -141,7 +151,7 @@ void quality_benchmark_fann(bool stepwise, unsigned int training_algorithm,
unsigned int seconds_of_training, double seconds_between_reports)
{
float train_error, test_error;
- unsigned int i, decimal_point, j;
+ unsigned int i, decimal_point, j, train_bit_fail, test_bit_fail;
unsigned int epochs = 0;
double elapsed = 0;
double total_elapsed = 0;
@@ -193,6 +203,7 @@ void quality_benchmark_fann(bool stepwise, unsigned int training_algorithm,
update_error(output, train_data->output[i], train_data->num_output);
}
train_error = mean_error();
+ train_bit_fail = num_bit_fail;
clear_error();
for(i = 0; i != test_data->num_data; i++){
@@ -200,10 +211,11 @@ void quality_benchmark_fann(bool stepwise, unsigned int training_algorithm,
update_error(output, test_data->output[i], test_data->num_output);
}
test_error = mean_error();
+ test_bit_fail = num_bit_fail;
fprintf(train_out, "%f %.20e %d\n", total_elapsed, train_error, epochs);
fprintf(test_out, "%f %.20e %d\n", total_elapsed, test_error, epochs);
- fprintf(stderr, "secs: %8.2f, train: %8.6f, test: %8.6f, epochs: %5d\r", total_elapsed, train_error, test_error, epochs);
+ fprintf(stderr, "secs: %8.2f, train: %8.6f (%4d), test: %8.6f (%4d), epochs: %5d\r", total_elapsed, train_error, train_bit_fail, test_error, test_bit_fail, epochs);
/* Save the data as fixed point, to allow for drawing of
a fixed point graph */
@@ -225,6 +237,71 @@ void quality_benchmark_fann(bool stepwise, unsigned int training_algorithm,
fann_destroy(ann);
}
+void quality_benchmark_cascade(
+ struct fann_train_data *train_data,
+ struct fann_train_data *test_data,
+ FILE *train_out, FILE *test_out,
+ unsigned int num_input, unsigned int num_output,
+ unsigned int seconds_of_training, double seconds_between_reports)
+{
+ float train_error = 0;
+ float test_error = 0;
+ unsigned int i, train_bit_fail, test_bit_fail;
+ unsigned int epochs = 0;
+ double elapsed = 0;
+ double total_elapsed = 0;
+ fann_type *output;
+ struct fann *ann;
+
+ ann = fann_create_shortcut(0.7, 2, num_input, num_output);
+ fann_set_activation_steepness_hidden(ann, 1);
+ fann_set_activation_steepness_output(ann, 1);
+ fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
+ fann_set_activation_function_output(ann, FANN_SIGMOID);
+ calibrate_timer();
+
+ while(total_elapsed < (double)seconds_of_training){
+ /* train */
+ elapsed = 0;
+ start_timer();
+ while(elapsed < (double)seconds_between_reports){
+ fann_cascadetrain_on_data_callback(ann, train_data, 0, NULL, 150, 150, 1, 0);
+
+ elapsed = time_elapsed();
+ epochs++;
+ }
+ stop_timer();
+ total_elapsed += getSecs();
+
+ /* make report */
+
+ clear_error();
+ for(i = 0; i != train_data->num_data; i++){
+ output = fann_run(ann, train_data->input[i]);
+ update_error(output, train_data->output[i], train_data->num_output);
+ }
+ train_error = mean_error();
+ train_bit_fail = num_bit_fail;
+
+ clear_error();
+ for(i = 0; i != test_data->num_data; i++){
+ output = fann_run(ann, test_data->input[i]);
+ update_error(output, test_data->output[i], test_data->num_output);
+ }
+ test_error = mean_error();
+ test_bit_fail = num_bit_fail;
+
+
+ fprintf(train_out, "%f %.20e %d\n", total_elapsed, train_error, epochs);
+ fprintf(test_out, "%f %.20e %d\n", total_elapsed, test_error, epochs);
+ fprintf(stderr, "secs: %8.2f, train: %8.6f (%4d), test: %8.6f (%4d), epochs: %5d\r", total_elapsed, train_error, train_bit_fail, test_error, test_bit_fail, epochs);
+ }
+
+ fprintf(stdout, "\nepochs: %d, epochs/sec: %f\n", epochs, epochs/total_elapsed);
+
+ fann_destroy(ann);
+}
+
#ifdef LWNN
void quality_benchmark_lwnn(
struct fann_train_data *train_data,
@@ -236,7 +313,7 @@ void quality_benchmark_lwnn(
{
float train_error = 0;
float test_error = 0;
- unsigned int i;
+ unsigned int i, train_bit_fail, test_bit_fail;
unsigned int epochs = 0;
double elapsed = 0;
double total_elapsed = 0;
@@ -285,6 +362,7 @@ void quality_benchmark_lwnn(
update_error(output, train_data->output[i], train_data->num_output);
}
train_error = mean_error();
+ train_bit_fail = num_bit_fail;
clear_error();
for(i = 0; i != test_data->num_data; i++){
@@ -292,11 +370,12 @@ void quality_benchmark_lwnn(
update_error(output, test_data->output[i], test_data->num_output);
}
test_error = mean_error();
+ test_bit_fail = num_bit_fail;
fprintf(train_out, "%f %.20e %d\n", total_elapsed, train_error, epochs);
fprintf(test_out, "%f %.20e %d\n", total_elapsed, test_error, epochs);
- fprintf(stderr, "secs: %8.2f, train: %8.6f, test: %8.6f, epochs: %5d\r", total_elapsed, train_error, test_error, epochs);
+ fprintf(stderr, "secs: %8.2f, train: %8.6f (%4d), test: %8.6f (%4d), epochs: %5d\r", total_elapsed, train_error, train_bit_fail, test_error, test_bit_fail, epochs);
}
fprintf(stdout, "\nepochs: %d, epochs/sec: %f\n", epochs, epochs/total_elapsed);
@@ -391,6 +470,11 @@ int main(int argc, char* argv[])
train_data->num_input, num_neurons_hidden1,
num_neurons_hidden2, train_data->num_output,
seconds_of_training, seconds_between_reports);
+ }else if(strcmp(argv[1], "fann_cascade") == 0){
+ quality_benchmark_cascade(train_data, test_data,
+ train_out, test_out,
+ train_data->num_input, train_data->num_output,
+ seconds_of_training, seconds_between_reports);
#ifdef LWNN
}else if(strcmp(argv[1], "lwnn") == 0){
quality_benchmark_lwnn(train_data, test_data,
diff --git a/examples/cascade_train.c b/examples/cascade_train.c
index 1ef9b8d..b11d415 100644
--- a/examples/cascade_train.c
+++ b/examples/cascade_train.c
@@ -25,63 +25,57 @@ int main()
{
const float learning_rate = (const float)0.7;
const float desired_error = (const float)0.001;
- unsigned int max_out_epochs = 10000;
- unsigned int max_cand_epochs = 10000;
- unsigned int max_neurons = 50;
+ unsigned int max_out_epochs = 500;
+ unsigned int max_cand_epochs = 500;
+ unsigned int max_neurons = 20;
unsigned int neurons_between_reports = 1;
- unsigned int i = 0;
- fann_type *calc_out;
struct fann *ann;
struct fann_train_data *train_data, *test_data;
printf("Reading data.\n");
- train_data = fann_read_train_from_file("../benchmarks/datasets/building.train");
- test_data = fann_read_train_from_file("../benchmarks/datasets/building.test");
+ train_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral2.train");
+ test_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral2.test");
+
+ /* this is in range -1 to 1 */
+ /*
+ train_data = fann_read_train_from_file("../benchmarks/datasets/parity4.train");
+ test_data = fann_read_train_from_file("../benchmarks/datasets/parity4.test");
+ */
+
+
+ train_data = fann_read_train_from_file("xor.data");
+ test_data = fann_read_train_from_file("xor.data");
+
printf("Creating network.\n");
ann = fann_create_shortcut(learning_rate, 2, train_data->num_input, train_data->num_output);
-
- fann_set_activation_function_hidden(ann, FANN_SIGMOID);
- fann_set_activation_function_output(ann, FANN_SIGMOID);
- /*fann_print_connections(ann);*/
+ fann_set_training_algorithm(ann, FANN_TRAIN_QUICKPROP);
+ fann_set_activation_steepness_hidden(ann, 1);
+ fann_set_activation_steepness_output(ann, 1);
+ fann_set_activation_function_hidden(ann, FANN_SIGMOID_SYMMETRIC);
+ fann_set_activation_function_output(ann, FANN_SIGMOID_SYMMETRIC);
+
fann_print_parameters(ann);
+ /*fann_print_connections(ann);*/
printf("Training network.\n");
- /*fann_train_on_data(ann, train_data, 300, 1, desired_error);*/
- printf("\nTrain error: %f, Test error: %f\n\n", fann_test_data(ann, train_data), fann_test_data(ann, test_data));
-
fann_cascadetrain_on_data_callback(ann, train_data, desired_error, NULL, max_out_epochs, max_cand_epochs, max_neurons, neurons_between_reports);
- printf("\nTrain error: %f, Test error: %f\n\n", fann_test_data(ann, train_data), fann_test_data(ann, test_data));
+ /*fann_train_on_data(ann, train_data, 300, 1, desired_error);*/
+ /*printf("\nTrain error: %f, Test error: %f\n\n", fann_test_data(ann, train_data), fann_test_data(ann, test_data));*/
fann_print_connections(ann);
/*fann_print_parameters(ann);*/
- /*
- printf("\nTesting network.\n");
-
- for(i = 0; i < test_data->num_data; i++){
- calc_out = fann_run(ann, test_data->input[i]);
- printf("XOR test (%f,%f) -> %f, should be %f, difference=%f\n",
- test_data->input[i][0], test_data->input[i][1], *calc_out, test_data->output[i][0], fann_abs(*calc_out - test_data->output[i][0]));
- }
- */
-
+ printf("\nTrain error: %f, Test error: %f\n\n", fann_test_data(ann, train_data), fann_test_data(ann, test_data));
+
printf("Saving network.\n");
fann_save(ann, "xor_float.net");
-
- /*fann_randomize_weights(ann, -0.1, 0.1);
- fann_train_on_data(ann, train_data, max_out_epochs, 1, desired_error);
-
- printf("\nTrain error: %f, Test error: %f\n\n", fann_test_data(ann, train_data), fann_test_data(ann, test_data));
-
- fann_print_connections(ann);
- fann_print_parameters(ann);*/
printf("Cleaning up.\n");
fann_destroy_train(train_data);
diff --git a/examples/xor_train.c b/examples/xor_train.c
index 3878e6c..e720c00 100644
--- a/examples/xor_train.c
+++ b/examples/xor_train.c
@@ -66,6 +66,7 @@ int main()
fann_init_weights(ann, data);
fann_set_training_algorithm(ann, FANN_TRAIN_QUICKPROP);
+
fann_train_on_data(ann, data, max_iterations, iterations_between_reports, desired_error);
/*fann_train_on_data_callback(ann, data, max_iterations, iterations_between_reports, desired_error, print_callback);*/
diff --git a/ltmain.sh b/ltmain.sh
index ea9adba..274587e 100644
--- a/ltmain.sh
+++ b/ltmain.sh
@@ -44,7 +44,7 @@ EXIT_FAILURE=1
PROGRAM=ltmain.sh
PACKAGE=libtool
VERSION=1.5.6
-TIMESTAMP=" (1.1220.2.95 2004/04/11 05:50:42) Debian$Rev: 215 $"
+TIMESTAMP=" (1.1220.2.95 2004/04/11 05:50:42) Debian$Rev: 220 $"
# Check that we have a working $echo.
@@ -1867,10 +1867,6 @@ EOF
$echo "$modename: warning: \`-l' is ignored for archives/objects" 1>&2
continue
fi
- if test "$pass" = conv; then
- deplibs="$deplib $deplibs"
- continue
- fi
name=`$echo "X$deplib" | $Xsed -e 's/^-l//'`
for searchdir in $newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path; do
for search_ext in .la $std_shrext .so .a; do
diff --git a/src/Makefile.am b/src/Makefile.am
index 86a79aa..ab7dc28 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -8,4 +8,4 @@ AM_CFLAGS = -D_REENTRANT
libfloatfann_la_SOURCES = floatfann.c
libdoublefann_la_SOURCES = doublefann.c
libfixedfann_la_SOURCES = fixedfann.c
-libfann_la_SOURCES = fann.c fann_io.c fann_train.c fann_train_data.c fann_options.c fann_error.c
+libfann_la_SOURCES = fann.c fann_io.c fann_train.c fann_train_data.c fann_options.c fann_error.c fann_cascade.c
diff --git a/src/Makefile.in b/src/Makefile.in
index 6be8292..8ae8dde 100644
--- a/src/Makefile.in
+++ b/src/Makefile.in
@@ -139,7 +139,7 @@ AM_CFLAGS = -D_REENTRANT
libfloatfann_la_SOURCES = floatfann.c
libdoublefann_la_SOURCES = doublefann.c
libfixedfann_la_SOURCES = fixedfann.c
-libfann_la_SOURCES = fann.c fann_io.c fann_train.c fann_train_data.c fann_options.c fann_error.c
+libfann_la_SOURCES = fann.c fann_io.c fann_train.c fann_train_data.c fann_options.c fann_error.c fann_cascade.c
subdir = src
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs
@@ -154,7 +154,8 @@ libdoublefann_la_OBJECTS = $(am_libdoublefann_la_OBJECTS)
libfann_la_LDFLAGS =
libfann_la_LIBADD =
am_libfann_la_OBJECTS = fann.lo fann_io.lo fann_train.lo \
- fann_train_data.lo fann_options.lo fann_error.lo
+ fann_train_data.lo fann_options.lo fann_error.lo \
+ fann_cascade.lo
libfann_la_OBJECTS = $(am_libfann_la_OBJECTS)
libfixedfann_la_LDFLAGS =
libfixedfann_la_LIBADD =
@@ -169,6 +170,7 @@ DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir)/src/include
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
@AMDEP_TRUE at DEP_FILES = ./$(DEPDIR)/doublefann.Plo ./$(DEPDIR)/fann.Plo \
+ at AMDEP_TRUE@ ./$(DEPDIR)/fann_cascade.Plo \
@AMDEP_TRUE@ ./$(DEPDIR)/fann_error.Plo ./$(DEPDIR)/fann_io.Plo \
@AMDEP_TRUE@ ./$(DEPDIR)/fann_options.Plo \
@AMDEP_TRUE@ ./$(DEPDIR)/fann_train.Plo \
@@ -247,6 +249,7 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/doublefann.Plo at am__quote@
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/fann.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/fann_cascade.Plo at am__quote@
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/fann_error.Plo at am__quote@
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/fann_io.Plo at am__quote@
@AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/fann_options.Plo at am__quote@
diff --git a/src/fann.c b/src/fann.c
index 7058564..305c642 100644
--- a/src/fann.c
+++ b/src/fann.c
@@ -442,7 +442,7 @@ FANN_EXTERNAL fann_type * FANN_API fann_run(struct fann *ann, fann_type *input)
{
struct fann_neuron *neuron_it, *last_neuron, *neurons, **neuron_pointers;
unsigned int activation_function, i, num_connections, num_input, num_output;
- fann_type neuron_value, *output;
+ fann_type neuron_sum, *output;
fann_type *weights;
struct fann_layer *layer_it, *last_layer;
@@ -558,7 +558,7 @@ FANN_EXTERNAL fann_type * FANN_API fann_run(struct fann *ann, fann_type *input)
continue;
}
- neuron_value = 0;
+ neuron_sum = 0;
num_connections = neuron_it->last_con - neuron_it->first_con;
weights = ann->weights + neuron_it->first_con;
@@ -568,43 +568,52 @@ FANN_EXTERNAL fann_type * FANN_API fann_run(struct fann *ann, fann_type *input)
} else {
neurons = (layer_it-1)->first_neuron;
}
+
+ /* unrolled loop start */
i = num_connections & 3; /* same as modulo 4 */
switch(i) {
case 3:
- neuron_value += fann_mult(weights[2], neurons[2].value);
+ neuron_sum += fann_mult(weights[2], neurons[2].value);
case 2:
- neuron_value += fann_mult(weights[1], neurons[1].value);
+ neuron_sum += fann_mult(weights[1], neurons[1].value);
case 1:
- neuron_value += fann_mult(weights[0], neurons[0].value);
+ neuron_sum += fann_mult(weights[0], neurons[0].value);
case 0:
break;
}
for(;i != num_connections; i += 4){
- neuron_value +=
+ neuron_sum +=
fann_mult(weights[i], neurons[i].value) +
fann_mult(weights[i+1], neurons[i+1].value) +
fann_mult(weights[i+2], neurons[i+2].value) +
fann_mult(weights[i+3], neurons[i+3].value);
}
+
+ /*
+ for(i = 0;i != num_connections; i++){
+ neuron_sum += fann_mult(weights[i], neurons[i].value);
+ }
+ */
+ /* unrolled loop end */
} else {
neuron_pointers = ann->connections + neuron_it->first_con;
i = num_connections & 3; /* same as modulo 4 */
switch(i) {
case 3:
- neuron_value += fann_mult(weights[2], neuron_pointers[2]->value);
+ neuron_sum += fann_mult(weights[2], neuron_pointers[2]->value);
case 2:
- neuron_value += fann_mult(weights[1], neuron_pointers[1]->value);
+ neuron_sum += fann_mult(weights[1], neuron_pointers[1]->value);
case 1:
- neuron_value += fann_mult(weights[0], neuron_pointers[0]->value);
+ neuron_sum += fann_mult(weights[0], neuron_pointers[0]->value);
case 0:
break;
}
for(;i != num_connections; i += 4){
- neuron_value +=
+ neuron_sum +=
fann_mult(weights[i], neuron_pointers[i]->value) +
fann_mult(weights[i+1], neuron_pointers[i+1]->value) +
fann_mult(weights[i+2], neuron_pointers[i+2]->value) +
@@ -612,41 +621,43 @@ FANN_EXTERNAL fann_type * FANN_API fann_run(struct fann *ann, fann_type *input)
}
}
+ neuron_it->sum = neuron_sum;
+
switch(activation_function){
#ifdef FIXEDFANN
case FANN_SIGMOID:
case FANN_SIGMOID_STEPWISE:
- neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, 0, multiplier, neuron_value);
+ neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, 0, multiplier, neuron_sum);
break;
case FANN_SIGMOID_SYMMETRIC:
case FANN_SIGMOID_SYMMETRIC_STEPWISE:
- neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, -multiplier, multiplier, neuron_value);
+ neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, -multiplier, multiplier, neuron_sum);
break;
#else
case FANN_LINEAR:
- neuron_it->value = (fann_type)fann_linear(steepness, neuron_value);
+ neuron_it->value = (fann_type)fann_linear(steepness, neuron_sum);
break;
case FANN_SIGMOID:
- neuron_it->value = (fann_type)fann_sigmoid(steepness, neuron_value);
+ neuron_it->value = (fann_type)fann_sigmoid(steepness, neuron_sum);
break;
case FANN_SIGMOID_SYMMETRIC:
- neuron_it->value = (fann_type)fann_sigmoid_symmetric(steepness, neuron_value);
+ neuron_it->value = (fann_type)fann_sigmoid_symmetric(steepness, neuron_sum);
break;
case FANN_SIGMOID_STEPWISE:
- neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, 0, 1, neuron_value);
+ neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, 0, 1, neuron_sum);
break;
case FANN_SIGMOID_SYMMETRIC_STEPWISE:
- neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, -1, 1, neuron_value);
+ neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, -1, 1, neuron_sum);
break;
#endif
case FANN_THRESHOLD:
- neuron_it->value = (fann_type)((neuron_value < 0) ? 0 : 1);
+ neuron_it->value = (fann_type)((neuron_sum < 0) ? 0 : 1);
break;
case FANN_THRESHOLD_SYMMETRIC:
- neuron_it->value = (fann_type)((neuron_value < 0) ? -1 : 1);
+ neuron_it->value = (fann_type)((neuron_sum < 0) ? -1 : 1);
break;
default:
fann_error((struct fann_error *)ann, FANN_E_CANT_USE_ACTIVATION);
@@ -690,13 +701,18 @@ FANN_EXTERNAL void FANN_API fann_randomize_weights(struct fann *ann, fann_type m
for(;weights != last_weight; weights++){
*weights = (fann_type)(fann_rand(min_weight, max_weight));
}
+
+ if(ann->prev_train_slopes != NULL){
+ fann_clear_train_arrays(ann);
+ }
}
FANN_EXTERNAL void FANN_API fann_print_connections(struct fann *ann)
{
struct fann_layer *layer_it;
struct fann_neuron *neuron_it;
- unsigned int i, value;
+ unsigned int i;
+ int value;
char *neurons;
unsigned int num_neurons = fann_get_total_neurons(ann) - fann_get_num_output(ann);
neurons = (char *)malloc(num_neurons+1);
@@ -718,14 +734,23 @@ FANN_EXTERNAL void FANN_API fann_print_connections(struct fann *ann)
memset(neurons, (int)'.', num_neurons);
for(i = neuron_it->first_con; i < neuron_it->last_con; i++){
+ if(ann->weights[i] < 0){
#ifdef FIXEDFANN
- value = (unsigned int)(fann_abs(ann->weights[i]/(double)ann->multiplier)+0.5);
+ value = (int)((ann->weights[i]/(double)ann->multiplier)-0.5);
#else
- value = (unsigned int)(fann_abs(ann->weights[i])+0.5);
+ value = (int)((ann->weights[i])-0.5);
#endif
-
- if(value > 25) value = 25;
- neurons[ann->connections[i] - ann->first_layer->first_neuron] = 'a' + value;
+ if(value < -25) value = -25;
+ neurons[ann->connections[i] - ann->first_layer->first_neuron] = 'a' - value;
+ }else{
+#ifdef FIXEDFANN
+ value = (int)((ann->weights[i]/(double)ann->multiplier)+0.5);
+#else
+ value = (int)((ann->weights[i])+0.5);
+#endif
+ if(value > 25) value = 25;
+ neurons[ann->connections[i] - ann->first_layer->first_neuron] = 'A' + value;
+ }
}
printf("L %3d / N %4d %s\n", layer_it - ann->first_layer,
neuron_it - ann->first_layer->first_neuron, neurons);
@@ -790,6 +815,10 @@ FANN_EXTERNAL void FANN_API fann_init_weights(struct fann *ann, struct fann_trai
}
}
}
+
+ if(ann->prev_train_slopes != NULL){
+ fann_clear_train_arrays(ann);
+ }
}
/* INTERNAL FUNCTION
@@ -828,13 +857,16 @@ struct fann * fann_allocate_structure(float learning_rate, unsigned int num_laye
ann->training_algorithm = FANN_TRAIN_RPROP;
ann->num_MSE = 0;
ann->MSE_value = 0;
+ ann->num_bit_fail = 0;
ann->shortcut_connections = 0;
ann->train_error_function = FANN_ERRORFUNC_TANH;
/* variables used for cascade correlation (reasonable defaults) */
- ann->cascade_change_fraction = 0.001;
- ann->cascade_stagnation_epochs = 64;
+ ann->cascade_change_fraction = 0.01;
+ ann->cascade_stagnation_epochs = 8;
ann->cascade_num_candidates = 8;
+ ann->cascade_weight_multiplier = 1.0;
+ ann->cascade_candidate_limit = 1000.0;
ann->cascade_candidate_scores = NULL;
/* Variables for use with with Quickprop training (reasonable defaults) */
@@ -846,6 +878,7 @@ struct fann * fann_allocate_structure(float learning_rate, unsigned int num_laye
ann->rprop_decrease_factor = 0.5;
ann->rprop_delta_min = 0.0;
ann->rprop_delta_max = 50.0;
+ ann->rprop_delta_zero = 0.5;
fann_init_error_data((struct fann_error *)ann);
diff --git a/src/fann_cascade.c b/src/fann_cascade.c
index 1551d59..0d355f9 100644
--- a/src/fann_cascade.c
+++ b/src/fann_cascade.c
@@ -40,6 +40,85 @@ int fann_initialize_candidates(struct fann *ann);
void fann_set_shortcut_connections(struct fann *ann);
+void fann_update_weights_special_quickprop(struct fann *ann, unsigned int num_data, unsigned int first_weight, unsigned int past_end)
+{
+ fann_type *train_slopes = ann->train_slopes;
+ fann_type *weights = ann->weights;
+ fann_type *prev_steps = ann->prev_steps;
+ fann_type *prev_train_slopes = ann->prev_train_slopes;
+
+ fann_type w, prev_step, slope, prev_slope, next_step;
+
+ float epsilon = ann->learning_rate/num_data;
+ float decay = ann->quickprop_decay; /*-0.0001;*/
+ float mu = ann->quickprop_mu; /*1.75;*/
+ float shrink_factor = (float)(mu / (1.0 + mu));
+
+ unsigned int i = first_weight;
+
+ for(;i != past_end; i++){
+ w = weights[i];
+ prev_step = prev_steps[i];
+ slope = train_slopes[i] + decay * w;
+ prev_slope = prev_train_slopes[i];
+ next_step = 0.0;
+
+ /* The step must always be in direction opposite to the slope. */
+ if(prev_step > 0.001) {
+ /* If last step was positive... */
+ if(slope < 0.0) {
+ /* Add in linear term if current slope is still positive.*/
+ next_step -= epsilon * slope;
+ }
+
+ /*If current slope is close to or larger than prev slope... */
+ if(slope <= (shrink_factor * prev_slope)) {
+ next_step += mu * prev_step; /* Take maximum size negative step. */
+ } else {
+ next_step += prev_step * slope / (prev_slope - slope); /* Else, use quadratic estimate. */
+ }
+ } else if(prev_step < -0.001){
+ /* If last step was negative... */
+ if(slope > 0.0){
+ /* Add in linear term if current slope is still negative.*/
+ next_step -= epsilon * slope;
+ }
+
+ /* If current slope is close to or more neg than prev slope... */
+ if(slope >= (shrink_factor * prev_slope)){
+ next_step += mu * prev_step; /* Take maximum size negative step. */
+ } else {
+ next_step += prev_step * slope / (prev_slope - slope); /* Else, use quadratic estimate. */
+ }
+ } else {
+ /* Last step was zero, so use only linear term. */
+ next_step -= epsilon * slope;
+ }
+
+ if(next_step > 100 || next_step < -100){
+ printf("special[%d] weight=%f, slope=%f, next_step=%f, prev_step=%f\n", i, weights[i], slope, next_step, prev_step);
+ }
+
+ /* update global data arrays */
+ prev_steps[i] = next_step;
+ weights[i] = w + next_step;
+ prev_train_slopes[i] = slope;
+ train_slopes[i] = 0.0;
+ }
+}
+
+void fann_print_connections_raw(struct fann *ann)
+{
+ int i;
+ for(i = 0; i < ann->total_connections_allocated; i++){
+ if(i == ann->total_connections){
+ printf("* ");
+ }
+ printf("%f ", ann->weights[i]);
+ }
+ printf("\n\n");
+}
+
/* Cascade training directly on the training data.
The connected_neurons pointers are not valid during training,
but they will be again after training.
@@ -55,11 +134,25 @@ void fann_cascadetrain_on_data_callback(struct fann *ann, struct fann_train_data
}
for(i = 1; i <= max_neurons; i++){
- /* train output neurons */
+ /* DEBUG TODO to print out connections */
+ fann_set_shortcut_connections(ann); /* update connections before printout */
+#ifdef CASCADE_DEBUG
+ fann_print_connections(ann);
+ fann_print_connections_raw(ann);
+#endif
+
+ /* train output neurons */
+#ifdef CASCADE_DEBUG
+ printf("training outputs\n");
+#endif
total_epochs += fann_train_outputs(ann, data, desired_error, max_out_epochs);
error = fann_get_MSE(ann);
+#ifdef CASCADE_DEBUG
+ printf("\n");
+#endif
+
/* print current error */
if(neurons_between_reports &&
(i % neurons_between_reports == 0
@@ -67,13 +160,20 @@ void fann_cascadetrain_on_data_callback(struct fann *ann, struct fann_train_data
|| i == 1
|| error < desired_error)){
if (callback == NULL) {
- printf("Neurons %6d. Current error: %.6f. Epochs %6d\n", i, error, total_epochs);
+ printf("Neurons %6d. Current error: %.6f. Epochs %6d. Bit fail %d.\n", i, error, total_epochs, ann->num_bit_fail);
} else if((*callback)(i, error) == -1){
/* you can break the training by returning -1 */
break;
}
}
+ /* DEBUG TODO to print out connections */
+ fann_set_shortcut_connections(ann); /* update connections before printout */
+#ifdef CASCADE_DEBUG
+ fann_print_connections(ann);
+ fann_print_connections_raw(ann);
+#endif
+
if(error < desired_error){
break;
}
@@ -84,9 +184,22 @@ void fann_cascadetrain_on_data_callback(struct fann *ann, struct fann_train_data
}
/* train new candidates */
+#ifdef CASCADE_DEBUG
+ printf("training candidates\n");
+#endif
total_epochs += fann_train_candidates(ann, data, max_cand_epochs);
+ /* DEBUG TODO to print out connections */
+ fann_set_shortcut_connections(ann); /* update connections before printout */
+#ifdef CASCADE_DEBUG
+ fann_print_connections(ann);
+ fann_print_connections_raw(ann);
+#endif
+
/* this installs the best candidate */
+#ifdef CASCADE_DEBUG
+ printf("install candidate\n");
+#endif
fann_install_candidate(ann);
}
@@ -108,10 +221,11 @@ int fann_train_outputs(struct fann *ann, struct fann_train_data *data, float des
{
float error, initial_error, error_improvement;
float target_improvement = 0.0;
- float backslide_improvement = 0.0;
+ float backslide_improvement = -1.0e20;
unsigned int i;
unsigned int stagnation = max_epochs;
+ /* TODO should perhaps not clear all arrays */
fann_clear_train_arrays(ann);
/* run an initial epoch to set the initital error */
@@ -124,6 +238,8 @@ int fann_train_outputs(struct fann *ann, struct fann_train_data *data, float des
for(i = 1; i < max_epochs; i++){
error = fann_train_outputs_epoch(ann, data);
+ /*printf("Epoch %6d. Current error: %.6f. Bit fail %d.\n", i, error, ann->num_bit_fail);*/
+
if(error < desired_error){
#ifdef CASCADE_DEBUG
printf("Error %f < %f\n", error, desired_error);
@@ -141,8 +257,8 @@ int fann_train_outputs(struct fann *ann, struct fann_train_data *data, float des
{
/*printf("error_improvement=%f, target_improvement=%f, backslide_improvement=%f, stagnation=%d\n", error_improvement, target_improvement, backslide_improvement, stagnation);*/
- target_improvement = error_improvement * (ann->cascade_change_fraction + 1);
- backslide_improvement = error_improvement * (ann->cascade_change_fraction - 1);
+ target_improvement = error_improvement * (1.0 + ann->cascade_change_fraction);
+ backslide_improvement = error_improvement * (1.0 - ann->cascade_change_fraction);
stagnation = i + ann->cascade_stagnation_epochs;
}
@@ -169,6 +285,7 @@ float fann_train_outputs_epoch(struct fann *ann, struct fann_train_data *data)
/* TODO this should actually use the algorithm selected by
ann->training_algorithm
*/
+ /*fann_update_weights_irpropm(ann, (ann->last_layer-1)->first_neuron->first_con, ann->total_connections);*/
fann_update_weights_quickprop(ann, data->num_data, (ann->last_layer-1)->first_neuron->first_con, ann->total_connections);
return fann_get_MSE(ann);
@@ -179,6 +296,10 @@ int fann_reallocate_connections(struct fann *ann, unsigned int total_connections
/* The connections are allocated, but the pointers inside are
first moved in the end of the cascade training session.
*/
+
+#ifdef CASCADE_DEBUG
+ printf("realloc from %d to %d\n", ann->total_connections_allocated, total_connections);
+#endif
ann->connections = (struct fann_neuron **)realloc(ann->connections, total_connections * sizeof(struct fann_neuron *));
if(ann->connections == NULL){
fann_error((struct fann_error *)ann, FANN_E_CANT_ALLOCATE_MEM);
@@ -273,6 +394,7 @@ int fann_initialize_candidates(struct fann *ann)
unsigned int first_candidate_neuron = ann->total_neurons + 1;
unsigned int connection_it, i;
struct fann_neuron *neurons;
+ fann_type initial_slope;
/* First make sure that there is enough room, and if not then allocate a
bit more so that we do not need to allocate more room each time.
@@ -315,6 +437,7 @@ int fann_initialize_candidates(struct fann *ann)
the last layer before the output layer, and in a new layer.
*/
neurons[i].value = 0;
+ neurons[i].sum = 0;
neurons[i].first_con = connection_it;
connection_it += candidate_connections_in;
neurons[i].last_con = connection_it;
@@ -329,11 +452,16 @@ int fann_initialize_candidates(struct fann *ann)
#ifdef CASCADE_DEBUG
printf("random cand weight [%d ... %d]\n", first_candidate_connection, num_connections-1);
#endif
+ if(ann->training_algorithm == FANN_TRAIN_RPROP){
+ initial_slope = ann->rprop_delta_zero;
+ }else{
+ initial_slope = 0.0;
+ }
for(i = first_candidate_connection; i < num_connections; i++){
ann->weights[i] = fann_random_weight();
ann->train_slopes[i] = 0;
ann->prev_steps[i] = 0;
- ann->prev_train_slopes[i] = 0;
+ ann->prev_train_slopes[i] = initial_slope;
}
return 0;
@@ -343,7 +471,7 @@ int fann_train_candidates(struct fann *ann, struct fann_train_data *data, unsign
{
float best_cand_score;
float target_cand_score = 0.0;
- float backslide_cand_score = 0.0;
+ float backslide_cand_score = -1.0e20;
unsigned int i;
unsigned int stagnation = max_epochs;
@@ -358,6 +486,11 @@ int fann_train_candidates(struct fann *ann, struct fann_train_data *data, unsign
for(i = 0; i < max_epochs; i++){
best_cand_score = fann_train_candidates_epoch(ann, data);
+ if(best_cand_score/ann->MSE_value > ann->cascade_candidate_limit){
+ printf("above candidate limit %f/%f > %f", best_cand_score, ann->MSE_value, ann->cascade_candidate_limit);
+ return i+1;
+ }
+
if ((best_cand_score > target_cand_score) ||
(best_cand_score < backslide_cand_score))
{
@@ -365,7 +498,7 @@ int fann_train_candidates(struct fann *ann, struct fann_train_data *data, unsign
printf("best_cand_score=%f, target_cand_score=%f, backslide_cand_score=%f, stagnation=%d\n", best_cand_score, target_cand_score, backslide_cand_score, stagnation);
#endif
- target_cand_score = best_cand_score * (ann->cascade_change_fraction + 1.0);
+ target_cand_score = best_cand_score * (1.0 + ann->cascade_change_fraction);
backslide_cand_score = best_cand_score * (1.0 - ann->cascade_change_fraction);
stagnation = i + ann->cascade_stagnation_epochs;
}
@@ -388,17 +521,17 @@ void fann_update_candidate_slopes(struct fann *ann)
struct fann_neuron *cand_it;
unsigned int i, j, num_connections;
unsigned int num_output = ann->num_output;
- fann_type cand_value, activation, derived, error_value, diff, cand_score;
- fann_type *weights, *out_weights, *cand_slopes;
+ fann_type cand_sum, activation, derived, error_value, diff, cand_score;
+ fann_type *weights, *cand_out_weights, *cand_slopes, *cand_out_slopes;
fann_type *output_train_errors = ann->train_errors + (ann->total_neurons - ann->num_output);
for(cand_it = first_cand; cand_it < last_cand; cand_it++){
- cand_score = 0.0;
+ cand_score = ann->cascade_candidate_scores[cand_it - first_cand];
error_value = 0.0;
/* code more or less stolen from fann_run to fast forward pass
*/
- cand_value = 0.0;
+ cand_sum = 0.0;
num_connections = cand_it->last_con - cand_it->first_con;
weights = ann->weights + cand_it->first_con;
@@ -406,43 +539,57 @@ void fann_update_candidate_slopes(struct fann *ann)
i = num_connections & 3; /* same as modulo 4 */
switch(i) {
case 3:
- cand_value += weights[2] * neurons[2].value;
+ cand_sum += weights[2] * neurons[2].value;
case 2:
- cand_value += weights[1] * neurons[1].value;
+ cand_sum += weights[1] * neurons[1].value;
case 1:
- cand_value += weights[0] * neurons[0].value;
+ cand_sum += weights[0] * neurons[0].value;
case 0:
break;
}
for(;i != num_connections; i += 4){
- cand_value +=
+ cand_sum +=
weights[i] * neurons[i].value +
weights[i+1] * neurons[i+1].value +
weights[i+2] * neurons[i+2].value +
weights[i+3] * neurons[i+3].value;
}
+ /*
+ for(i = 0; i < num_connections; i++){
+ cand_sum += weights[i] * neurons[i].value;
+ }
+ */
/* unrolled loop end */
- activation = fann_activation(ann, 0, cand_value);
+ activation = fann_activation(ann, 0, cand_sum);
+ /* printf("%f = sigmoid(%f);\n", activation, cand_sum);*/
+
+ cand_it->sum = cand_sum;
+ cand_it->value = activation;
+
derived = fann_activation_derived(ann->activation_function_hidden,
ann->activation_steepness_hidden, activation);
/* The output weights is located right after the input weights in
the weight array.
*/
- out_weights = weights + num_connections;
+ cand_out_weights = weights + num_connections;
+ cand_out_slopes = ann->train_slopes + cand_it->first_con + num_connections;
for(j = 0; j < num_output; j++){
- diff = (activation * out_weights[j]) - output_train_errors[j];
- /*printf("%f = (%f * %f) - %f;\n", diff, activation, out_weights[j], output_train_errors[j]);*/
- cand_score += (diff * diff);
- error_value += diff * out_weights[j];
+ diff = (activation * cand_out_weights[j]) - output_train_errors[j];
+ printf("diff = %f = (%f * %f) - %f;\n", diff, activation, cand_out_weights[j], output_train_errors[j]);
+ cand_out_slopes[j] += diff * activation;
+ printf("cand_out_slopes[%d] <= %f += %f * %f;\n", j, cand_out_slopes[j], diff, activation);
+ error_value += diff * cand_out_weights[j];
+ cand_score -= (diff * diff);
+ printf("cand_score[%d][%d] = %f -= (%f * %f)\n", cand_it - first_cand, j, cand_score, diff, diff);
}
ann->cascade_candidate_scores[cand_it - first_cand] = cand_score;
error_value *= derived;
-
+
cand_slopes = ann->train_slopes + cand_it->first_con;
for(i = 0; i < num_connections; i++){
cand_slopes[i] += error_value * neurons[i].value;
@@ -455,44 +602,71 @@ void fann_update_candidate_weights(struct fann *ann, unsigned int num_data)
struct fann_neuron *first_cand = (ann->last_layer-1)->last_neuron + 1; /* there is an empty neuron between the actual neurons and the candidate neuron */
struct fann_neuron *last_cand = first_cand + ann->cascade_num_candidates-1;
- fann_update_weights_quickprop(ann, num_data, first_cand->first_con, last_cand->last_con+ann->num_output);
+ fann_update_weights_special_quickprop(ann, num_data, first_cand->first_con, last_cand->last_con+ann->num_output);
}
float fann_train_candidates_epoch(struct fann *ann, struct fann_train_data *data)
{
- unsigned int i;
- fann_type best_score = ann->cascade_candidate_scores[0];
- unsigned int best_candidate = 0;
+ unsigned int i, j;
+ unsigned int best_candidate;
+ fann_type best_score;
unsigned int num_cand = ann->cascade_num_candidates;
- float MSE = fann_get_MSE(ann);
+ fann_type *output_train_errors = ann->train_errors + (ann->total_neurons - ann->num_output);
for(i = 0; i < num_cand; i++){
- ann->cascade_candidate_scores[i] = (fann_type)MSE;
+ ann->cascade_candidate_scores[i] = ann->MSE_value;
}
-
- fann_reset_MSE(ann);
+ /*printf("start score: %f\n", ann->MSE_value);*/
for(i = 0; i < data->num_data; i++){
fann_run(ann, data->input[i]);
- fann_compute_MSE(ann, data->output[i]);
+
+ for(j = 0; j < ann->num_output; j++){
+ /* TODO only debug, but the error is in opposite direction, this might be usefull info */
+ /*if(output_train_errors[j] != (ann->output[j] - data->output[i][j])){
+ printf("difference in calculated error at %f != %f; %f = %f - %f;\n", output_train_errors[j], (ann->output[j] - data->output[i][j]), output_train_errors[j], ann->output[j], data->output[i][j]);
+ }*/
+
+ /*
+ output_train_errors[j] = (data->output[i][j] - ann->output[j])/2;
+ output_train_errors[j] = ann->output[j] - data->output[i][j];
+ */
+
+ output_train_errors[j] = (ann->output[j] - data->output[i][j]);
+
+ if(ann->activation_function_output == FANN_SIGMOID_SYMMETRIC ||
+ ann->activation_function_output == FANN_SIGMOID_SYMMETRIC_STEPWISE){
+ output_train_errors[j] /= 2.0;
+ }
+
+ if ( output_train_errors[j] < -.9999999 )
+ output_train_errors[j] = -17.0;
+ else if ( output_train_errors[j] > .9999999 )
+ output_train_errors[j] = 17.0;
+ else
+ output_train_errors[j] = (fann_type)log ( (1.0+output_train_errors[j]) / (1.0-output_train_errors[j]) );
+
+ }
+
fann_update_candidate_slopes(ann);
}
fann_update_candidate_weights(ann, data->num_data);
/* find the best candidate score */
+ best_candidate = 0;
+ best_score = ann->cascade_candidate_scores[best_candidate];
for(i = 1; i < num_cand; i++){
if(ann->cascade_candidate_scores[i] > best_score){
best_candidate = i;
- best_score = ann->cascade_candidate_scores[i];
+ best_score = ann->cascade_candidate_scores[best_candidate];
}
}
ann->cascade_best_candidate = ann->total_neurons + best_candidate + 1;
- /*printf("Best candidate: %d(%d) with score %f\n", ann->cascade_best_candidate, best_candidate, best_score);*/
+ /*printf("Best candidate: %d(%d) with score %f, real score: %f\n", ann->cascade_best_candidate, best_candidate, ann->MSE_value-best_score, best_score);*/
return best_score;
-
}
/* add a layer ad the position pointed to by *layer */
@@ -596,7 +770,7 @@ void fann_add_candidate_neuron(struct fann *ann, struct fann_layer *layer)
/* the output weights for the candidates are located after the input weights */
candidate_output_weight = candidate->last_con;
- /* move the actual neurons and the indexes to the connection arrays */
+ /* move the actual output neurons and the indexes to the connection arrays */
for(neuron_it = (ann->last_layer-1)->last_neuron-1;
neuron_it != neuron_place; neuron_it--){
#ifdef CASCADE_DEBUG
@@ -623,15 +797,16 @@ void fann_add_candidate_neuron(struct fann *ann, struct fann_layer *layer)
/* set the new weight to the newly allocated neuron */
#ifdef CASCADE_DEBUG
- printf("cadidate output weight set to weight[%d] = weight[%d] = %f\n", neuron_it->last_con-1, candidate_output_weight, 0.0 - ann->weights[candidate_output_weight]);
+ printf("cadidate output weight set to weight[%d] = weight[%d] = %f %f\n", neuron_it->last_con-1, candidate_output_weight, -(ann->weights[candidate_output_weight]), (ann->weights[candidate_output_weight]));
#endif
- ann->weights[neuron_it->last_con-1] = 0.0 - ann->weights[candidate_output_weight];
+ ann->weights[neuron_it->last_con-1] = -(ann->weights[candidate_output_weight]) * ann->cascade_weight_multiplier;
candidate_output_weight++;
}
/* Now inititalize the actual neuron */
neuron_place->value = 0;
+ neuron_place->sum = 0;
neuron_place->last_con = (neuron_place+1)->first_con;
neuron_place->first_con = neuron_place->last_con - num_connections_in;
#ifdef CASCADE_DEBUG
diff --git a/src/fann_train.c b/src/fann_train.c
index 6ad8f9f..a88826a 100644
--- a/src/fann_train.c
+++ b/src/fann_train.c
@@ -148,7 +148,7 @@ FANN_EXTERNAL fann_type * FANN_API fann_test(struct fann *ann, fann_type *input,
fann_type *output_begin = fann_run(ann, input);
fann_type *output_it;
const fann_type *output_end = output_begin + ann->num_output;
- fann_type neuron_diff;
+ fann_type neuron_diff, neuron_diff2;
/* calculate the error */
for(output_it = output_begin;
@@ -163,10 +163,14 @@ FANN_EXTERNAL fann_type * FANN_API fann_test(struct fann *ann, fann_type *input,
}
#ifdef FIXEDFANN
- ann->MSE_value += (neuron_diff/(float)ann->multiplier) * (neuron_diff/(float)ann->multiplier);
+ neuron_diff2 = (neuron_diff/(float)ann->multiplier) * (neuron_diff/(float)ann->multiplier);
#else
- ann->MSE_value += (float)(neuron_diff * neuron_diff);
+ neuron_diff2 = (float)(neuron_diff * neuron_diff);
#endif
+ ann->MSE_value += neuron_diff2;
+ if(neuron_diff2 >= 0.25){
+ ann->num_bit_fail++;
+ }
desired_output++;
}
@@ -208,6 +212,7 @@ FANN_EXTERNAL void FANN_API fann_reset_MSE(struct fann *ann)
{
ann->num_MSE = 0;
ann->MSE_value = 0;
+ ann->num_bit_fail = 0;
}
#ifndef FIXEDFANN
@@ -222,7 +227,7 @@ FANN_EXTERNAL void FANN_API fann_reset_MSE(struct fann *ann)
*/
void fann_compute_MSE(struct fann *ann, fann_type *desired_output)
{
- fann_type neuron_value, neuron_diff, *error_it = 0, *error_begin = 0;
+ fann_type neuron_value, neuron_diff, neuron_diff2, *error_it = 0, *error_begin = 0;
struct fann_neuron *last_layer_begin = (ann->last_layer-1)->first_neuron;
const struct fann_neuron *last_layer_end = last_layer_begin + ann->num_output;
const struct fann_neuron *first_neuron = ann->first_layer->first_neuron;
@@ -249,13 +254,18 @@ void fann_compute_MSE(struct fann *ann, fann_type *desired_output)
for(; last_layer_begin != last_layer_end; last_layer_begin++){
neuron_value = last_layer_begin->value;
neuron_diff = *desired_output - neuron_value;
+ /*printf("neuron_diff %f = %f - %f ... %f\n", neuron_diff, *desired_output, neuron_value, last_layer_begin->sum);*/
if(ann->activation_function_output == FANN_SIGMOID_SYMMETRIC ||
ann->activation_function_output == FANN_SIGMOID_SYMMETRIC_STEPWISE){
neuron_diff /= 2.0;
}
-
- ann->MSE_value += (float)(neuron_diff * neuron_diff);
+
+ neuron_diff2 = (float)(neuron_diff * neuron_diff);
+ ann->MSE_value += neuron_diff2;
+ if(neuron_diff2 >= 0.25){
+ ann->num_bit_fail++;
+ }
if(ann->train_error_function){ /* TODO make switch when more functions */
if ( neuron_diff < -.9999999 )
@@ -516,6 +526,7 @@ void fann_update_slopes_batch(struct fann *ann, struct fann_layer *layer_begin,
void fann_clear_train_arrays(struct fann *ann)
{
unsigned int i;
+ fann_type delta_zero;
/* if no room allocated for the slope variabels, allocate it now
(calloc clears mem) */
@@ -548,10 +559,11 @@ void fann_clear_train_arrays(struct fann *ann)
return;
}
}
-
+
if(ann->training_algorithm == FANN_TRAIN_RPROP){
+ delta_zero = ann->rprop_delta_zero;
for(i = 0; i < ann->total_connections; i++){
- ann->prev_train_slopes[i] = (fann_type)0.0125;
+ ann->prev_train_slopes[i] = delta_zero;
}
} else {
memset(ann->prev_train_slopes, 0, (ann->total_connections) * sizeof(fann_type));
@@ -632,6 +644,9 @@ void fann_update_weights_quickprop(struct fann *ann, unsigned int num_data, unsi
next_step += epsilon * slope;
}
+ if(next_step > 1000 || next_step < -1000){
+ printf("quickprop[%d] weight=%f, slope=%f, next_step=%f, prev_step=%f\n", i, weights[i], slope, next_step, prev_step);
+ }
/* update global data arrays */
prev_steps[i] = next_step;
diff --git a/src/fann_train_data.c b/src/fann_train_data.c
index f5cf30d..8085d0c 100644
--- a/src/fann_train_data.c
+++ b/src/fann_train_data.c
@@ -210,14 +210,16 @@ FANN_EXTERNAL void FANN_API fann_train_on_data_callback(struct fann *ann, struct
#endif
if(epochs_between_reports && callback == NULL){
- printf("Max epochs %8d. Desired error: %.10f\n", max_epochs, desired_error);
+ printf("Max epochs %8d. Desired error: %.10f.\n", max_epochs, desired_error);
}
/* some training algorithms need stuff to be cleared etc. before training starts.
*/
if(ann->training_algorithm == FANN_TRAIN_RPROP ||
ann->training_algorithm == FANN_TRAIN_QUICKPROP){
- fann_clear_train_arrays(ann);
+ if(ann->prev_train_slopes == NULL){
+ fann_clear_train_arrays(ann);
+ }
}
for(i = 1; i <= max_epochs; i++){
@@ -231,7 +233,7 @@ FANN_EXTERNAL void FANN_API fann_train_on_data_callback(struct fann *ann, struct
|| i == 1
|| error < desired_error)){
if (callback == NULL) {
- printf("Epochs %8d. Current error: %.10f\n", i, error);
+ printf("Epochs %8d. Current error: %.10f. Bit fail %d.\n", i, error, ann->num_bit_fail);
} else if((*callback)(i, error) == -1){
/* you can break the training by returning -1 */
break;
diff --git a/src/include/fann_activation.h b/src/include/fann_activation.h
index 0b2fa22..c609fa6 100644
--- a/src/include/fann_activation.h
+++ b/src/include/fann_activation.h
@@ -53,7 +53,8 @@ enum {
/* Sigmoid activation function.
One of the most used activation functions.
span: 0 < y < 1
- y = 1/(1 + exp(-2*s*x)), d = 2*s*y*(1 - y)
+ y = 1/(1 + exp(-2*s*x))
+ d = 2*s*y*(1 - y)
*/
FANN_SIGMOID,
@@ -66,7 +67,8 @@ enum {
/* Symmetric sigmoid activation function, aka. tanh.
One of the most used activation functions.
span: -1 < y < 1
- y = tanh(s*x) = 2/(1 + exp(-2*s*x)) - 1, d = s*(1-(y*y))
+ y = tanh(s*x) = 2/(1 + exp(-2*s*x)) - 1
+ d = s*(1-(y*y))
*/
FANN_SIGMOID_SYMMETRIC,
@@ -78,7 +80,8 @@ enum {
/* Gausian activation function.
0 when x = -inf, 1 when x = 0 and 0 when x = inf
span: 0 < y < 1
- y = exp(-x*s*x*s), d = -2*x*y*s
+ y = exp(-x*s*x*s)
+ d = -2*x*y*s
*/
FANN_GAUSSIAN,
@@ -90,14 +93,16 @@ enum {
/* Fast (sigmoid like) activation function defined by David Elliott
span: 0 < y < 1
- y = ((x*s) / 2) / (1 + |x*s|) + 0.5, d = s*1/(2*(1+|x|)*(1+|x|))
+ y = ((x*s) / 2) / (1 + |x*s|) + 0.5
+ d = s*1/(2*(1+|x|)*(1+|x|))
NOT implemented yet.
*/
FANN_ELLIOT,
/* Fast (symmetric sigmoid like) activation function defined by David Elliott
span: -1 < y < 1
- y = (x*s) / (1 + |x*s|), d = s*1/((1+|x|)*(1+|x|))
+ y = (x*s) / (1 + |x*s|)
+ d = s*1/((1+|x|)*(1+|x|))
NOT implemented yet.
*/
FANN_ELLIOT_SYMMETRIC
diff --git a/src/include/fann_data.h b/src/include/fann_data.h
index 7249c25..8739dab 100644
--- a/src/include/fann_data.h
+++ b/src/include/fann_data.h
@@ -33,6 +33,9 @@ struct fann_neuron
*/
unsigned int first_con;
unsigned int last_con;
+ /* The sum of the inputs multiplied with the weights */
+ fann_type sum;
+ /* The value of the activation function applied to the sum */
fann_type value;
#ifdef __GNUC__
}__attribute__((packed));
@@ -175,6 +178,10 @@ struct fann
*/
float MSE_value;
+ /* The number of outputs which would fail (only valid for classification problems)
+ */
+ unsigned int num_bit_fail;
+
/* The error function used during training. (default FANN_ERRORFUNC_TANH)
*/
unsigned int train_error_function;
@@ -201,6 +208,14 @@ struct fann
*/
unsigned int cascade_best_candidate;
+ /* The upper limit for a candidate score
+ */
+ fann_type cascade_candidate_limit;
+
+ /* Scale of copied candidate output weights
+ */
+ fann_type cascade_weight_multiplier;
+
/* An array consisting of the score of the individual candidates,
which is used to decide which candidate is the best
*/
@@ -240,6 +255,9 @@ struct fann
/* The maximum stepsize */
float rprop_delta_max;
+ /* The initial stepsize */
+ float rprop_delta_zero;
+
/* Used to contain the slope errors used during batch training
* Is allocated during first training session,
* which means that if we do not train, it is never allocated.
diff --git a/src/include/fann_internal.h b/src/include/fann_internal.h
index 04d6b27..1f54ce0 100644
--- a/src/include/fann_internal.h
+++ b/src/include/fann_internal.h
@@ -24,6 +24,7 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#include <math.h>
#include <stdio.h>
+#include <stdlib.h>
#include "fann_data.h"
#define FANN_FIX_VERSION "FANN_FIX_1.1"
@@ -77,7 +78,8 @@ fann_type fann_activation_derived(unsigned int activation_function,
#define fann_max(x, y) (((x) > (y)) ? (x) : (y))
#define fann_min(x, y) (((x) < (y)) ? (x) : (y))
#define fann_safe_free(x) {if(x) { free(x); x = NULL; }}
-#define fann_clip(x, lo, hi) (((x) < (lo)) ? (lo) : (((x) > (hi)) ? (hi) : (x)))
+/* #define fann_clip(x, lo, hi) (((x) < (lo)) ? (lo) : (((x) > (hi)) ? (hi) : (x))) */
+#define fann_clip(x, lo, hi) (x)
#define fann_rand(min_value, max_value) (((double)(min_value))+(((double)(max_value)-((double)(min_value)))*rand()/(RAND_MAX+1.0)))
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/libfann.git
More information about the debian-science-commits
mailing list