[DRE-commits] [ruby-webrobots] 05/07: Refresh patches

Antonio Terceiro terceiro at moszumanska.debian.org
Fri Jul 15 12:03:59 UTC 2016


This is an automated email from the git hooks/post-receive script.

terceiro pushed a commit to branch master
in repository ruby-webrobots.

commit 67411e8e58bad97e7e534ff9e2cc146462deb7e3
Author: Antonio Terceiro <terceiro at debian.org>
Date:   Fri Jul 15 08:52:46 2016 -0300

    Refresh patches
---
 debian/changelog                      |  1 +
 debian/patches/disable-internet-tests | 81 +++++++++--------------------------
 debian/patches/remove-rubygems-calls  |  8 ++--
 3 files changed, 25 insertions(+), 65 deletions(-)

diff --git a/debian/changelog b/debian/changelog
index 467299f..a4796e9 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -9,6 +9,7 @@ ruby-webrobots (0.1.2-1) UNRELEASED; urgency=medium
 
   [ Antonio Terceiro ]
   * New upstream release
+    - Refresh patches
     - new build adds required Rubygems metadata (Closes: #830235)
   * Update packaging with a `dh-make-ruby -w` run
     - run test suite with debian/ruby-tests.rake; drop debian/ruby-tests.rb
diff --git a/debian/patches/disable-internet-tests b/debian/patches/disable-internet-tests
index 05a9cdb..d9a27dc 100644
--- a/debian/patches/disable-internet-tests
+++ b/debian/patches/disable-internet-tests
@@ -5,68 +5,29 @@ Description: Disable test requiring Internet
  Package builds can not rely on working Internet to be available.
  As skip() does not work on 1.8, this patch comments the test out.
 
-Index: ruby-webrobots/test/test_webrobots.rb
-===================================================================
---- ruby-webrobots.orig/test/test_webrobots.rb	2012-05-05 23:33:39.201696147 +0200
-+++ ruby-webrobots/test/test_webrobots.rb	2012-05-05 23:35:47.831921873 +0200
-@@ -465,33 +465,33 @@
+--- a/test/test_webrobots.rb
++++ b/test/test_webrobots.rb
+@@ -615,7 +615,7 @@ Disallow: /
+         }
+       end
      end
-   end
- 
--  context "robots.txt in the real world" do
--    setup do
--      @testbot = WebRobots.new('TestBot')
--      @msnbot = WebRobots.new('TestMSNBot')	# matches msnbot
--    end
--
--    should "be parsed for major sites" do
--      assert_nothing_raised {
--        assert !@testbot.allowed?("http://www.google.com/search")
--        assert !@testbot.allowed?("https://www.google.com/search")
--        assert !@testbot.allowed?("http://www.google.com/news/section?pz=1&cf=all&ned=jp&topic=y&ict=ln")
--        assert @testbot.allowed?("http://www.google.com/news/directory?pz=1&cf=all&ned=us&hl=en&sort=users&category=6")
--      }
--      assert_nothing_raised {
--        assert @testbot.allowed?("http://www.yahoo.com/")
--        assert !@testbot.allowed?("http://www.yahoo.com/?")
--        assert !@testbot.allowed?("http://www.yahoo.com/p/foo")
--      }
--      assert_nothing_raised {
--        assert !@testbot.allowed?("http://store.apple.com/vieworder")
--        assert @msnbot.allowed?("http://store.apple.com/vieworder")
--      }
--      assert_nothing_raised {
--        assert !@testbot.allowed?("http://github.com/login")
--      }
--    end
 -  end
-+#  context "robots.txt in the real world" do
-+#    setup do
-+#      @testbot = WebRobots.new('TestBot')
-+#      @msnbot = WebRobots.new('TestMSNBot')	# matches msnbot
-+#    end
-+#
-+#    should "be parsed for major sites" do
-+#      assert_nothing_raised {
-+#        assert !@testbot.allowed?("http://www.google.com/search")
-+#        assert !@testbot.allowed?("https://www.google.com/search")
-+#        assert !@testbot.allowed?("http://www.google.com/news/section?pz=1&cf=all&ned=jp&topic=y&ict=ln")
-+#        assert @testbot.allowed?("http://www.google.com/news/directory?pz=1&cf=all&ned=us&hl=en&sort=users&category=6")
-+#      }
-+#      assert_nothing_raised {
-+#        assert @testbot.allowed?("http://www.yahoo.com/")
-+#        assert !@testbot.allowed?("http://www.yahoo.com/?")
-+#        assert !@testbot.allowed?("http://www.yahoo.com/p/foo")
-+#      }
-+#      assert_nothing_raised {
-+#        assert !@testbot.allowed?("http://store.apple.com/vieworder")
-+#        assert @msnbot.allowed?("http://store.apple.com/vieworder")
-+#      }
-+#      assert_nothing_raised {
-+#        assert !@testbot.allowed?("http://github.com/login")
-+#      }
-+#    end
-+#  end
++  end if false
  
    context "meta robots tag" do
      setup do
+--- a/test/helper.rb
++++ b/test/helper.rb
+@@ -10,12 +10,6 @@
+ require 'test/unit'
+ require 'shoulda'
+ require 'webmock/test_unit'
+-require 'vcr'
+-
+-VCR.configure do |c|
+-  c.cassette_library_dir = 'test/vcr_cassettes'
+-  c.hook_into :webmock
+-end
+ 
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
+ $LOAD_PATH.unshift(File.dirname(__FILE__))
diff --git a/debian/patches/remove-rubygems-calls b/debian/patches/remove-rubygems-calls
index 0219078..cc06cae 100644
--- a/debian/patches/remove-rubygems-calls
+++ b/debian/patches/remove-rubygems-calls
@@ -5,10 +5,8 @@ Description: Remove Rubygems calls
  Rubygems and Bundler are forbidden by Debian policy to be used within
  packaged Ruby modules
 
-Index: ruby-webrobots/test/helper.rb
-===================================================================
---- ruby-webrobots.orig/test/helper.rb	2012-01-18 12:53:54.000000000 -0600
-+++ ruby-webrobots/test/helper.rb	2012-01-18 12:59:26.000000000 -0600
+--- a/test/helper.rb
++++ b/test/helper.rb
 @@ -1,12 +1,12 @@
 -require 'rubygems'
 -require 'bundler'
@@ -30,4 +28,4 @@ Index: ruby-webrobots/test/helper.rb
 +# end
  require 'test/unit'
  require 'shoulda'
- 
+ require 'webmock/test_unit'

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-ruby-extras/ruby-webrobots.git



More information about the Pkg-ruby-extras-commits mailing list