[opencv] 217/251: Added ELU and test for it
Nobuhiro Iwamatsu
iwamatsu at moszumanska.debian.org
Sun Aug 27 23:27:44 UTC 2017
This is an automated email from the git hooks/post-receive script.
iwamatsu pushed a commit to annotated tag 3.3.0
in repository opencv.
commit 8d6b8b45b6450747146e6c9c7a11e2102903b146
Author: Aleksandr Rybnikov <arrybn at gmail.com>
Date: Tue Aug 1 16:58:34 2017 +0300
Added ELU and test for it
---
modules/dnn/include/opencv2/dnn/all_layers.hpp | 6 +++++
modules/dnn/src/init.cpp | 1 +
modules/dnn/src/layers/elementwise_layers.cpp | 37 ++++++++++++++++++++++++++
modules/dnn/src/tensorflow/tf_importer.cpp | 7 +++++
modules/dnn/test/test_layers.cpp | 18 +++++++++++++
5 files changed, 69 insertions(+)
diff --git a/modules/dnn/include/opencv2/dnn/all_layers.hpp b/modules/dnn/include/opencv2/dnn/all_layers.hpp
index 4f01227..333656a 100644
--- a/modules/dnn/include/opencv2/dnn/all_layers.hpp
+++ b/modules/dnn/include/opencv2/dnn/all_layers.hpp
@@ -349,6 +349,12 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
static Ptr<ChannelsPReLULayer> create(const LayerParams& params);
};
+ class CV_EXPORTS ELULayer : public ActivationLayer
+ {
+ public:
+ static Ptr<ELULayer> create(const LayerParams ¶ms);
+ };
+
class CV_EXPORTS TanHLayer : public ActivationLayer
{
public:
diff --git a/modules/dnn/src/init.cpp b/modules/dnn/src/init.cpp
index 97ea169..32ff69e 100644
--- a/modules/dnn/src/init.cpp
+++ b/modules/dnn/src/init.cpp
@@ -96,6 +96,7 @@ void initializeLayerFactory()
CV_DNN_REGISTER_LAYER_CLASS(ChannelsPReLU, ChannelsPReLULayer);
CV_DNN_REGISTER_LAYER_CLASS(Sigmoid, SigmoidLayer);
CV_DNN_REGISTER_LAYER_CLASS(TanH, TanHLayer);
+ CV_DNN_REGISTER_LAYER_CLASS(ELU, ELULayer);
CV_DNN_REGISTER_LAYER_CLASS(BNLL, BNLLLayer);
CV_DNN_REGISTER_LAYER_CLASS(AbsVal, AbsLayer);
CV_DNN_REGISTER_LAYER_CLASS(Power, PowerLayer);
diff --git a/modules/dnn/src/layers/elementwise_layers.cpp b/modules/dnn/src/layers/elementwise_layers.cpp
index 95afd96..9c929a0 100644
--- a/modules/dnn/src/layers/elementwise_layers.cpp
+++ b/modules/dnn/src/layers/elementwise_layers.cpp
@@ -302,6 +302,35 @@ struct SigmoidFunctor
int64 getFLOPSPerElement() const { return 3; }
};
+struct ELUFunctor
+{
+ typedef ELULayer Layer;
+
+ explicit ELUFunctor() {}
+
+ void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
+ {
+ for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize )
+ {
+ for(int i = 0; i < len; i++ )
+ {
+ float x = srcptr[i];
+ dstptr[i] = x >= 0.f ? x : exp(x) - 1;
+ }
+ }
+ }
+
+#ifdef HAVE_HALIDE
+ void attachHalide(const Halide::Expr& input, Halide::Func& top)
+ {
+ Halide::Var x("x"), y("y"), c("c"), n("n");
+ top(x, y, c, n) = select(input >= 0.0f, input, exp(input) - 1);
+ }
+#endif // HAVE_HALIDE
+
+ int64 getFLOPSPerElement() const { return 2; }
+};
+
struct AbsValFunctor
{
typedef AbsLayer Layer;
@@ -504,6 +533,14 @@ Ptr<SigmoidLayer> SigmoidLayer::create(const LayerParams& params)
return l;
}
+Ptr<ELULayer> ELULayer::create(const LayerParams& params)
+{
+ Ptr<ELULayer> l(new ElementWiseLayer<ELUFunctor>(ELUFunctor()));
+ l->setParamsFrom(params);
+
+ return l;
+}
+
Ptr<AbsLayer> AbsLayer::create(const LayerParams& params)
{
Ptr<AbsLayer> l(new ElementWiseLayer<AbsValFunctor>());
diff --git a/modules/dnn/src/tensorflow/tf_importer.cpp b/modules/dnn/src/tensorflow/tf_importer.cpp
index 7797fa7..603b836 100644
--- a/modules/dnn/src/tensorflow/tf_importer.cpp
+++ b/modules/dnn/src/tensorflow/tf_importer.cpp
@@ -677,6 +677,13 @@ void TFImporter::populateNet(Net dstNet)
connectToAllBlobs(layer_id, dstNet, parsePin(layer.input(0)), id, layer.input_size());
}
+ else if (type == "Elu")
+ {
+ int id = dstNet.addLayer(name, "ELU", layerParams);
+ layer_id[name] = id;
+
+ connectToAllBlobs(layer_id, dstNet, parsePin(layer.input(0)), id, layer.input_size());
+ }
else if (type == "MaxPool")
{
layerParams.set("pool", "max");
diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp
index 14c984f..4ca06ef 100644
--- a/modules/dnn/test/test_layers.cpp
+++ b/modules/dnn/test/test_layers.cpp
@@ -268,11 +268,29 @@ static void test_Reshape_Split_Slice_layers()
normAssert(input, output);
}
+
TEST(Layer_Test_Reshape_Split_Slice, Accuracy)
{
test_Reshape_Split_Slice_layers();
}
+TEST(Layer_Conv_Elu, Accuracy)
+{
+ Net net;
+ {
+ Ptr<Importer> importer = createTensorflowImporter(_tf("layer_elu_model.pb"));
+ ASSERT_TRUE(importer != NULL);
+ importer->populateNet(net);
+ }
+ Mat inp = blobFromNPY(_tf("layer_elu_in.npy"));
+ Mat ref = blobFromNPY(_tf("layer_elu_out.npy"));
+
+ net.setInput(inp, "input");
+ Mat out = net.forward();
+
+ normAssert(ref, out);
+}
+
class Layer_LSTM_Test : public ::testing::Test
{
public:
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opencv.git
More information about the debian-science-commits
mailing list