[opengm] 163/386: make weights_ mutable to be able to deserialize with function functor

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:37:09 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit 8271c50a778cae56c143e9d1b17a58f3a2ee27dc
Author: mschiegg <martin.schiegg at iwr.uni-heidelberg.de>
Date:   Fri Dec 19 14:59:32 2014 +0100

    make weights_ mutable to be able to deserialize with function functor
---
 include/opengm/functions/function_properties_base.hxx  |  2 +-
 include/opengm/functions/learnable/lpotts.hxx          |  4 ++--
 include/opengm/functions/learnable/lsum_of_experts.hxx |  4 ++--
 include/opengm/functions/learnable/lunary.hxx          |  4 ++--
 include/opengm/learning/dataset/dataset_io.hxx         | 18 ++++++++++++++++++
 5 files changed, 25 insertions(+), 7 deletions(-)

diff --git a/include/opengm/functions/function_properties_base.hxx b/include/opengm/functions/function_properties_base.hxx
index 4561507..6ff7c42 100644
--- a/include/opengm/functions/function_properties_base.hxx
+++ b/include/opengm/functions/function_properties_base.hxx
@@ -133,7 +133,7 @@ public:
    INDEX weightIndex(const size_t weightNumber)const{
       throw RuntimeError("Function base has no parameters,this needs to be implemented in any function type");
    }
-   void setWeights(const opengm::learning::Weights<VALUE>& weights){}
+   void setWeights(const opengm::learning::Weights<VALUE>& /*weights*/) const {}
    template<class ITERATOR> 
    VALUE weightGradient(size_t,ITERATOR) const {return 0;}
 
diff --git a/include/opengm/functions/learnable/lpotts.hxx b/include/opengm/functions/learnable/lpotts.hxx
index 9ce052e..5ce3263 100644
--- a/include/opengm/functions/learnable/lpotts.hxx
+++ b/include/opengm/functions/learnable/lpotts.hxx
@@ -50,7 +50,7 @@ public:
    template<class ITERATOR> T operator()(ITERATOR) const;
  
    // parameters
-   void setWeights(const opengm::learning::Weights<T>& weights)
+   void setWeights(const opengm::learning::Weights<T>& weights) const
       {weights_ = &weights;}
    size_t numberOfWeights()const
      {return weightIDs_.size();}
@@ -60,7 +60,7 @@ public:
    T weightGradient(size_t,ITERATOR) const;
 
 protected:
-   const opengm::learning::Weights<T> * weights_;
+   mutable const opengm::learning::Weights<T> * weights_;
    L numLabels_;
    std::vector<size_t> weightIDs_;
    std::vector<T> feat_;
diff --git a/include/opengm/functions/learnable/lsum_of_experts.hxx b/include/opengm/functions/learnable/lsum_of_experts.hxx
index 6b0072b..bb366a6 100644
--- a/include/opengm/functions/learnable/lsum_of_experts.hxx
+++ b/include/opengm/functions/learnable/lsum_of_experts.hxx
@@ -46,7 +46,7 @@ public:
    template<class ITERATOR> T operator()(ITERATOR) const;
  
    // parameters
-   void setWeights(const opengm::learning::Weights<T>& weights)
+   void setWeights(const opengm::learning::Weights<T>& weights) const
       {weights_ = &weights;}
    size_t numberOfWeights()const
      {return weightIDs_.size();}
@@ -56,7 +56,7 @@ public:
    T weightGradient(size_t,ITERATOR) const;
 
 protected:
-   const opengm::learning::Weights<T>*     weights_;
+   mutable const opengm::learning::Weights<T>* weights_;
    std::vector<L>                          shape_;
    std::vector<size_t>                     weightIDs_;
    std::vector<marray::Marray<T> >         feat_;
diff --git a/include/opengm/functions/learnable/lunary.hxx b/include/opengm/functions/learnable/lunary.hxx
index 22c120e..df7b6c5 100644
--- a/include/opengm/functions/learnable/lunary.hxx
+++ b/include/opengm/functions/learnable/lunary.hxx
@@ -49,7 +49,7 @@ public:
     template<class ITERATOR> T operator()(ITERATOR) const;
 
     // parameters
-    void setWeights(const opengm::learning::Weights<T>& weights){
+    void setWeights(const opengm::learning::Weights<T>& weights) const{
         weights_ = &weights;
     }
 
@@ -69,7 +69,7 @@ private:
 
     }
 protected:
-    const opengm::learning::Weights<T> *    weights_;
+    mutable const opengm::learning::Weights<T> *    weights_;
     std::vector<size_t> labelOffset_;
     std::vector<size_t> weightIds_;
     std::vector<V>      features_;
diff --git a/include/opengm/learning/dataset/dataset_io.hxx b/include/opengm/learning/dataset/dataset_io.hxx
index 8203e0a..19c8e44 100644
--- a/include/opengm/learning/dataset/dataset_io.hxx
+++ b/include/opengm/learning/dataset/dataset_io.hxx
@@ -14,6 +14,18 @@
 namespace opengm{
    namespace datasets{
 
+      template <class W>
+      struct WeightSetter {
+         public:
+           WeightSetter(W& w) : weights_(w) {}
+
+           template<class F>
+           void operator()(F& f) const { f.setWeights(weights_); }
+
+         private:
+           W& weights_;
+      };
+
       class DatasetSerialization{
       public:
          template<class DATASET>
@@ -81,6 +93,8 @@ namespace opengm{
          dataset.isCached_.resize(numModel);
          dataset.weights_ = opengm::learning::Weights<ValueType>(numWeights);
          OPENGM_ASSERT_OP(dataset.lossParams_.size(), ==, numModel);
+         WeightSetter<opengm::learning::Weights<ValueType> > wSetter(dataset.weights_);
+
          //Load Models and ground truth
          for(size_t m=0; m<numModel; ++m){
             std::stringstream ss;
@@ -89,6 +103,10 @@ namespace opengm{
             marray::hdf5::loadVec(file, "gt", dataset.gts_[m]);
             opengm::hdf5::load(dataset.gms_[m],ss.str(),"gm");
 
+            for(size_t fi = 0; fi < dataset.gms_[m].numberOfFactors(); ++fi) {
+                dataset.gms_[m][fi].callFunctor(wSetter);
+            }
+
             LossParameterType lossParam;
             hid_t lossGrp = marray::hdf5::openGroup(file, "loss");
             lossParam.load(lossGrp);

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list