[opencv] 106/251: Rewrote googlenet tests

Nobuhiro Iwamatsu iwamatsu at moszumanska.debian.org
Sun Aug 27 23:27:32 UTC 2017


This is an automated email from the git hooks/post-receive script.

iwamatsu pushed a commit to annotated tag 3.3.0
in repository opencv.

commit 7d1140340ead8da9d4442d87fabfd3f2a8787c88
Author: Aleksandr Rybnikov <arrybn at gmail.com>
Date:   Thu Jun 29 16:45:17 2017 +0300

    Rewrote googlenet tests
---
 modules/dnn/src/dnn.cpp                       | 11 ++++--
 modules/dnn/src/layers/convolution_layer.cpp  |  2 ++
 modules/dnn/test/imagenet_cls_test_alexnet.py |  5 ++-
 modules/dnn/test/test_googlenet.cpp           | 50 ++++++++++++++++++++-------
 samples/dnn/torch_enet.cpp                    |  2 ++
 5 files changed, 52 insertions(+), 18 deletions(-)

diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp
index 2743328..5529cad 100644
--- a/modules/dnn/src/dnn.cpp
+++ b/modules/dnn/src/dnn.cpp
@@ -694,6 +694,7 @@ struct Net::Impl
         for (it = layers.begin(); it != layers.end(); it++)
         {
             if (it->second.id != 0) {
+                it->second.inputBlobs.clear();
                 it->second.outputBlobs.clear();
                 it->second.internals.clear();
             }
@@ -1106,8 +1107,10 @@ struct Net::Impl
                         bnormData->skipFlags[DNN_BACKEND_DEFAULT] = true;
                         ld.outputBlobs = layers[lpNext.lid].outputBlobs;
                         if( bnormData->consumers.size() == 1 )
+                        {
                             nextData = &layers[bnormData->consumers[0].lid];
-                        lpNext = LayerPin(bnormData->consumers[0].lid, 0);
+                            lpNext = LayerPin(bnormData->consumers[0].lid, 0);
+                        }
                     }
                 }
 
@@ -1124,7 +1127,10 @@ struct Net::Impl
                         scaleData->skipFlags[DNN_BACKEND_DEFAULT] = true;
                         ld.outputBlobs = layers[lpNext.lid].outputBlobs;
                         if( scaleData->consumers.size() == 1 )
+                        {
                             nextData = &layers[scaleData->consumers[0].lid];
+                            lpNext = LayerPin(scaleData->consumers[0].lid, 0);
+                        }
                     }
                 }
 
@@ -1132,7 +1138,8 @@ struct Net::Impl
                 if( nextData )
                     nextActivLayer = nextData->layerInstance.dynamicCast<ActivationLayer>();
 
-                if( !nextActivLayer.empty() && currLayer->setActivation(nextActivLayer) )
+                if( !nextActivLayer.empty() && pinsToKeep.count(lpNext) == 0
+                        && currLayer->setActivation(nextActivLayer) )
                 {
                     printf_(("\tfused with %s\n", nextActivLayer->name.c_str()));
                     nextData->skipFlags[DNN_BACKEND_DEFAULT] = true;
diff --git a/modules/dnn/src/layers/convolution_layer.cpp b/modules/dnn/src/layers/convolution_layer.cpp
index 3dd63a3..d7c92e6 100644
--- a/modules/dnn/src/layers/convolution_layer.cpp
+++ b/modules/dnn/src/layers/convolution_layer.cpp
@@ -198,6 +198,8 @@ public:
     bool setActivation(const Ptr<ActivationLayer>& layer)
     {
         activ = layer;
+        if (activ.empty())
+            reluslope.clear();
         return !activ.empty();
     }
 
diff --git a/modules/dnn/test/imagenet_cls_test_alexnet.py b/modules/dnn/test/imagenet_cls_test_alexnet.py
index c136208..46623b8 100644
--- a/modules/dnn/test/imagenet_cls_test_alexnet.py
+++ b/modules/dnn/test/imagenet_cls_test_alexnet.py
@@ -146,9 +146,8 @@ class DnnCaffeModel(Framework):
         return 'DNN'
 
     def get_output(self, input_blob):
-        self.net.setBlob(self.in_blob_name, input_blob)
-        self.net.forward()
-        return self.net.getBlob(self.out_blob_name)
+        self.net.setInput(input_blob, self.in_blob_name)
+        return self.net.forward(self.out_blob_name)
 
 
 class ClsAccEvaluation:
diff --git a/modules/dnn/test/test_googlenet.cpp b/modules/dnn/test/test_googlenet.cpp
index f3aeb0f..2486916 100644
--- a/modules/dnn/test/test_googlenet.cpp
+++ b/modules/dnn/test/test_googlenet.cpp
@@ -56,16 +56,10 @@ static std::string _tf(TString filename)
     return (getOpenCVExtraDir() + "/dnn/") + filename;
 }
 
-static void launchGoogleNetTest()
+TEST(Reproducibility_GoogLeNet, Accuracy)
 {
-    Net net;
-    {
-        const string proto = findDataFile("dnn/bvlc_googlenet.prototxt", false);
-        const string model = findDataFile("dnn/bvlc_googlenet.caffemodel", false);
-        Ptr<Importer> importer = createCaffeImporter(proto, model);
-        ASSERT_TRUE(importer != NULL);
-        importer->populateNet(net);
-    }
+    Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt", false),
+                               findDataFile("dnn/bvlc_googlenet.caffemodel", false));
 
     std::vector<Mat> inpMats;
     inpMats.push_back( imread(_tf("googlenet_0.png")) );
@@ -77,6 +71,12 @@ static void launchGoogleNetTest()
 
     Mat ref = blobFromNPY(_tf("googlenet_prob.npy"));
     normAssert(out, ref);
+}
+
+TEST(IntermediateBlobs_GoogLeNet, Accuracy)
+{
+    Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt", false),
+                               findDataFile("dnn/bvlc_googlenet.caffemodel", false));
 
     std::vector<String> blobsNames;
     blobsNames.push_back("conv1/7x7_s2");
@@ -84,7 +84,7 @@ static void launchGoogleNetTest()
     blobsNames.push_back("inception_4c/1x1");
     blobsNames.push_back("inception_4c/relu_1x1");
     std::vector<Mat> outs;
-    Mat in = blobFromImage(inpMats[0]);
+    Mat in = blobFromImage(imread(_tf("googlenet_0.png")));
     net.setInput(in, "data");
     net.forward(outs, blobsNames);
     CV_Assert(outs.size() == blobsNames.size());
@@ -95,13 +95,37 @@ static void launchGoogleNetTest()
         std::replace( filename.begin(), filename.end(), '/', '#');
         Mat ref = blobFromNPY(_tf("googlenet_" + filename + ".npy"));
 
-        //normAssert(outs[i], ref, "", 1E-4, 1E-2);
+        normAssert(outs[i], ref, "", 1E-4, 1E-2);
     }
 }
 
-TEST(Reproducibility_GoogLeNet, Accuracy)
+TEST(SeveralCalls_GoogLeNet, Accuracy)
 {
-    launchGoogleNetTest();
+    Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt", false),
+                               findDataFile("dnn/bvlc_googlenet.caffemodel", false));
+
+    std::vector<Mat> inpMats;
+    inpMats.push_back( imread(_tf("googlenet_0.png")) );
+    inpMats.push_back( imread(_tf("googlenet_1.png")) );
+    ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty());
+
+    net.setInput(blobFromImages(inpMats), "data");
+    Mat out = net.forward();
+
+    Mat ref = blobFromNPY(_tf("googlenet_prob.npy"));
+    normAssert(out, ref);
+
+    std::vector<String> blobsNames;
+    blobsNames.push_back("conv1/7x7_s2");
+    std::vector<Mat> outs;
+    Mat in = blobFromImage(inpMats[0]);
+    net.setInput(in, "data");
+    net.forward(outs, blobsNames);
+    CV_Assert(outs.size() == blobsNames.size());
+
+    ref = blobFromNPY(_tf("googlenet_conv1#7x7_s2.npy"));
+
+    normAssert(outs[0], ref, "", 1E-4, 1E-2);
 }
 
 }
diff --git a/samples/dnn/torch_enet.cpp b/samples/dnn/torch_enet.cpp
index e27dd16..4f9ad21 100644
--- a/samples/dnn/torch_enet.cpp
+++ b/samples/dnn/torch_enet.cpp
@@ -85,7 +85,9 @@ int main(int argc, char **argv)
     }
 
     //! [Make forward pass]
+    tm.start();
     Mat result = net.forward(oBlob);
+    tm.stop();
 
     if (!resultFile.empty()) {
         CV_Assert(result.isContinuous());

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opencv.git



More information about the debian-science-commits mailing list