[opengm] 155/386: changed loss interface (+2 squashed commits) [051a8d8] Adjust test to calling the loss() function with a graphical model. [1c06923] removed wrapped functions which will not work anyway... [2f05d63] changed loss interface

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:36:56 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit 88ce0cd7e3731dd6d8c0ecae974d1677aa475086
Author: DerThorsten <thorsten.beier at iwr.uni-heidelberg.de>
Date:   Thu Dec 18 23:56:10 2014 +0100

    changed loss interface (+2 squashed commits)
    [051a8d8] Adjust test to calling the loss() function with a graphical model.
    [1c06923] removed wrapped functions which will not work anyway...
    [2f05d63] changed loss interface
---
 include/opengm/learning/gridsearch-learning.hxx          |  2 +-
 include/opengm/learning/loss/generalized-hammingloss.hxx | 10 +++++-----
 include/opengm/learning/loss/hammingloss.hxx             |  8 ++++----
 include/opengm/learning/loss/noloss.hxx                  |  8 ++++----
 src/interfaces/python/opengm/learning/__init__.py        | 12 +++++++-----
 src/interfaces/python/opengm/learning/pyLoss.cxx         | 12 ++++++------
 src/unittest/learning/test_generalized_hammingloss.cxx   |  2 +-
 7 files changed, 28 insertions(+), 26 deletions(-)

diff --git a/include/opengm/learning/gridsearch-learning.hxx b/include/opengm/learning/gridsearch-learning.hxx
index cb45393..b0b06ad 100644
--- a/include/opengm/learning/gridsearch-learning.hxx
+++ b/include/opengm/learning/gridsearch-learning.hxx
@@ -84,7 +84,7 @@ namespace opengm {
                inf.infer();
                inf.arg(confs[m]);
                const std::vector<typename INF::LabelType>& gt =  dataset_.getGT(m);
-               loss += lossFunction.loss(confs[m].begin(), confs[m].end(), gt.begin(), gt.end());
+               loss += lossFunction.loss(dataset_.getModel(m),confs[m].begin(), confs[m].end(), gt.begin(), gt.end());
             }
             
             // *call visitor*
diff --git a/include/opengm/learning/loss/generalized-hammingloss.hxx b/include/opengm/learning/loss/generalized-hammingloss.hxx
index 16ef2ff..f7158a3 100644
--- a/include/opengm/learning/loss/generalized-hammingloss.hxx
+++ b/include/opengm/learning/loss/generalized-hammingloss.hxx
@@ -32,7 +32,7 @@ public:
                 return nodeLossMultiplier_ < labelLossMultiplier_;
         }
         bool operator>(const GeneralizedHammingLoss & other) const{
-                nodeLossMultiplier_ > labelLossMultiplier_;
+                return nodeLossMultiplier_ > labelLossMultiplier_;
         }
 
         /**
@@ -53,8 +53,8 @@ public:
 public:
     GeneralizedHammingLoss(const Parameter& param = Parameter()) : param_(param){}
 
-    template<class IT1, class IT2>
-            double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
+    template<class GM, class IT1, class IT2>
+            double loss(const GM & gm, IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
 
     template<class GM, class IT>
     void addLoss(GM& gm, IT GTBegin) const;
@@ -104,8 +104,8 @@ inline void GeneralizedHammingLoss::Parameter::load(const hid_t& groupHandle) {
     }
 }
 
-template<class IT1, class IT2>
-double GeneralizedHammingLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
+template<class GM, class IT1, class IT2>
+double GeneralizedHammingLoss::loss(const GM & gm, IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
 {
     double loss = 0.0;
     size_t nodeIndex = 0;
diff --git a/include/opengm/learning/loss/hammingloss.hxx b/include/opengm/learning/loss/hammingloss.hxx
index 783d615..47e272e 100644
--- a/include/opengm/learning/loss/hammingloss.hxx
+++ b/include/opengm/learning/loss/hammingloss.hxx
@@ -35,8 +35,8 @@ namespace opengm {
       public:
          HammingLoss(const Parameter& param = Parameter()) : param_(param){}
 
-         template<class IT1, class IT2>
-         double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
+         template<class GM, class IT1, class IT2>
+         double loss(const GM & gm, IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
   
          template<class GM, class IT>
          void addLoss(GM& gm, IT GTBegin) const;
@@ -50,8 +50,8 @@ namespace opengm {
           marray::hdf5::save(groupHandle,"lossId",name);
       }
 
-      template<class IT1, class IT2>
-      double HammingLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
+      template<class GM, class IT1, class IT2>
+      double HammingLoss::loss(const GM & gm, IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
       {
          double loss = 0.0;
          for(; labelBegin!= labelEnd; ++labelBegin, ++GTBegin){
diff --git a/include/opengm/learning/loss/noloss.hxx b/include/opengm/learning/loss/noloss.hxx
index bce7acc..e207433 100644
--- a/include/opengm/learning/loss/noloss.hxx
+++ b/include/opengm/learning/loss/noloss.hxx
@@ -39,8 +39,8 @@ namespace learning {
 
         }
 
-        template<class IT1, class IT2>
-        double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
+        template<class GM, class IT1, class IT2>
+        double loss(const GM & gm, IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
 
         template<class GM, class IT>
         void addLoss(GM& gm, IT GTBegin) const;
@@ -55,8 +55,8 @@ namespace learning {
         marray::hdf5::save(groupHandle,"lossId",name);
     }
 
-    template<class IT1, class IT2>
-    double NoLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
+    template<class GM, class IT1, class IT2>
+    double NoLoss::loss(const GM & gm, IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
     {
         double loss = 0.0;
         return loss;
diff --git a/src/interfaces/python/opengm/learning/__init__.py b/src/interfaces/python/opengm/learning/__init__.py
index 89f639f..da56988 100644
--- a/src/interfaces/python/opengm/learning/__init__.py
+++ b/src/interfaces/python/opengm/learning/__init__.py
@@ -111,23 +111,25 @@ def lPottsFunctions(nFunctions, numberOfLabels, features, weightIds):
     raise RuntimeError("not yet implemented")
 
 
-def lunaryFunction(weights, numberOfLabels, features, weightIds):
+def lUnaryFunction(weights, numberOfLabels, features, weightIds):
+    assert numberOfLabels >= 2
 
     features = numpy.require(features, dtype=value_type)
     weightIds = numpy.require(weightIds, dtype=index_type)
-    
+
     assert features.ndim == weightIds.ndim
     if features.ndim == 1 or weightIds.ndim == 1:
-        assert numberOfLabels ==2
+        assert numberOfLabels == 2
+        assert features.shape[0]  == weightIds.shape[0]
         features  = features.reshape(1,-1)
         weightIds = weightIds.reshape(1,-1)
 
     assert features.shape[0] in [numberOfLabels, numberOfLabels-1]
     assert weightIds.shape[0] in [numberOfLabels, numberOfLabels-1]
+    assert features.shape[1]  == weightIds.shape[1]
 
 
-
-    return LUnaryFunction(weights=weights, numberOfLabels=numberOfLabels, 
+    return LUnaryFunction(weights=weights, numberOfLabels=int(numberOfLabels), 
                           features=features, weightIds=weightIds)
 
 
diff --git a/src/interfaces/python/opengm/learning/pyLoss.cxx b/src/interfaces/python/opengm/learning/pyLoss.cxx
index abfde86..84a89f9 100644
--- a/src/interfaces/python/opengm/learning/pyLoss.cxx
+++ b/src/interfaces/python/opengm/learning/pyLoss.cxx
@@ -38,18 +38,18 @@ void export_loss(){
     typedef opengm::learning::GeneralizedHammingLoss::Parameter PyGeneralizedHammingLossParameter;
 
     class_<PyHammingLoss >("HammingLoss")
-        .def("loss", &PyHammingLoss::loss<Literator,Literator>)
-        .def("addLoss", &PyHammingLoss::addLoss<GM, Literator>)
+        //.def("loss", &PyHammingLoss::loss<const GM &, Literator,Literator>)
+        //.def("addLoss", &PyHammingLoss::addLoss<GM, Literator>)
     ;
 
     class_<PyNoLoss >("NoLoss")
-        .def("loss", &PyNoLoss::loss<Literator,Literator>)
-        .def("addLoss", &PyNoLoss::addLoss<GM, Literator>)
+        //.def("loss", &PyNoLoss::loss<const GM &,Literator,Literator>)
+        //.def("addLoss", &PyNoLoss::addLoss<GM, Literator>)
     ;
 
     class_<PyGeneralizedHammingLoss >("GeneralizedHammingLoss", init<PyGeneralizedHammingLossParameter>())
-        .def("loss", &PyGeneralizedHammingLoss::loss<Literator,Literator>)
-        .def("addLoss", &PyGeneralizedHammingLoss::addLoss<GM, Literator>)
+        //.def("loss", &PyGeneralizedHammingLoss::loss<const GM &,Literator,Literator>)
+        //.def("addLoss", &PyGeneralizedHammingLoss::addLoss<GM, Literator>)
     ;
 
 
diff --git a/src/unittest/learning/test_generalized_hammingloss.cxx b/src/unittest/learning/test_generalized_hammingloss.cxx
index 80a26e2..743a4e3 100644
--- a/src/unittest/learning/test_generalized_hammingloss.cxx
+++ b/src/unittest/learning/test_generalized_hammingloss.cxx
@@ -43,7 +43,6 @@ int main() {
    ground_truth.push_back(1);
    ground_truth.push_back(1);
 
-   OPENGM_ASSERT_OP(loss.loss(labels.begin(), labels.end(), ground_truth.begin(), ground_truth.end()), ==, 17.5);
 
    // add loss to a model and evaluate for a given labeling
    GM gm;
@@ -52,6 +51,7 @@ int main() {
    gm.addVariable(numberOfLabels);
    gm.addVariable(numberOfLabels);
    gm.addVariable(numberOfLabels);
+   OPENGM_ASSERT_OP(loss.loss(gm, labels.begin(), labels.end(), ground_truth.begin(), ground_truth.end()), ==, 17.5);
 
    // add a unary to node 2 (if indexed from 1)
    opengm::ExplicitFunction<GM::ValueType,GM::IndexType,GM::LabelType> f(&numberOfLabels, &(numberOfLabels)+1, 2.0);

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list