[DRE-commits] [SCM] ruby-webrobots.git branch, master, updated. debian/0.0.13-1-3-g04fcbbe
Christian Hofstaedtler
christian at hofstaedtler.name
Sat May 5 21:39:07 UTC 2012
The following commit has been merged in the master branch:
commit 04fcbbe2f89d40467af34f8f2aa8ef0ac762ad13
Author: Christian Hofstaedtler <christian at hofstaedtler.name>
Date: Sat May 5 23:38:07 2012 +0200
Completely comment out offending test
skip() doesn't work on 1.8.
diff --git a/debian/changelog b/debian/changelog
index c9cc4cb..57df130 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,6 +1,6 @@
ruby-webrobots (0.0.13-2) unstable; urgency=low
- * Disable a test so package build does not depend on the Internet
+ * Remove a test so package build does not depend on the Internet
(Closes: #666642)
-- Christian Hofstaedtler <christian at hofstaedtler.name> Sat, 05 May 2012 23:20:41 +0200
diff --git a/debian/patches/disable-internet-tests b/debian/patches/disable-internet-tests
index a56a55a..05a9cdb 100644
--- a/debian/patches/disable-internet-tests
+++ b/debian/patches/disable-internet-tests
@@ -2,17 +2,71 @@ Author: Christian Hofstaedtler <christian at hofstaedtler.name>
Forwarded: not-needed
Last-updated: 2012-05-05
Description: Disable test requiring Internet
- Package builds can not rely on working Internet to be available, so
- this disables the test.
+ Package builds can not rely on working Internet to be available.
+ As skip() does not work on 1.8, this patch comments the test out.
+
Index: ruby-webrobots/test/test_webrobots.rb
===================================================================
---- ruby-webrobots.orig/test/test_webrobots.rb 2012-05-05 23:15:19.480919860 +0200
-+++ ruby-webrobots/test/test_webrobots.rb 2012-05-05 23:16:03.988304277 +0200
-@@ -472,6 +472,7 @@
+--- ruby-webrobots.orig/test/test_webrobots.rb 2012-05-05 23:33:39.201696147 +0200
++++ ruby-webrobots/test/test_webrobots.rb 2012-05-05 23:35:47.831921873 +0200
+@@ -465,33 +465,33 @@
end
+ end
- should "be parsed for major sites" do
-+ skip('Disabled in Debian so build works without Internet')
- assert_nothing_raised {
- assert !@testbot.allowed?("http://www.google.com/search")
- assert !@testbot.allowed?("https://www.google.com/search")
+- context "robots.txt in the real world" do
+- setup do
+- @testbot = WebRobots.new('TestBot')
+- @msnbot = WebRobots.new('TestMSNBot') # matches msnbot
+- end
+-
+- should "be parsed for major sites" do
+- assert_nothing_raised {
+- assert !@testbot.allowed?("http://www.google.com/search")
+- assert !@testbot.allowed?("https://www.google.com/search")
+- assert !@testbot.allowed?("http://www.google.com/news/section?pz=1&cf=all&ned=jp&topic=y&ict=ln")
+- assert @testbot.allowed?("http://www.google.com/news/directory?pz=1&cf=all&ned=us&hl=en&sort=users&category=6")
+- }
+- assert_nothing_raised {
+- assert @testbot.allowed?("http://www.yahoo.com/")
+- assert !@testbot.allowed?("http://www.yahoo.com/?")
+- assert !@testbot.allowed?("http://www.yahoo.com/p/foo")
+- }
+- assert_nothing_raised {
+- assert !@testbot.allowed?("http://store.apple.com/vieworder")
+- assert @msnbot.allowed?("http://store.apple.com/vieworder")
+- }
+- assert_nothing_raised {
+- assert !@testbot.allowed?("http://github.com/login")
+- }
+- end
+- end
++# context "robots.txt in the real world" do
++# setup do
++# @testbot = WebRobots.new('TestBot')
++# @msnbot = WebRobots.new('TestMSNBot') # matches msnbot
++# end
++#
++# should "be parsed for major sites" do
++# assert_nothing_raised {
++# assert !@testbot.allowed?("http://www.google.com/search")
++# assert !@testbot.allowed?("https://www.google.com/search")
++# assert !@testbot.allowed?("http://www.google.com/news/section?pz=1&cf=all&ned=jp&topic=y&ict=ln")
++# assert @testbot.allowed?("http://www.google.com/news/directory?pz=1&cf=all&ned=us&hl=en&sort=users&category=6")
++# }
++# assert_nothing_raised {
++# assert @testbot.allowed?("http://www.yahoo.com/")
++# assert !@testbot.allowed?("http://www.yahoo.com/?")
++# assert !@testbot.allowed?("http://www.yahoo.com/p/foo")
++# }
++# assert_nothing_raised {
++# assert !@testbot.allowed?("http://store.apple.com/vieworder")
++# assert @msnbot.allowed?("http://store.apple.com/vieworder")
++# }
++# assert_nothing_raised {
++# assert !@testbot.allowed?("http://github.com/login")
++# }
++# end
++# end
+
+ context "meta robots tag" do
+ setup do
--
ruby-webrobots.git
More information about the Pkg-ruby-extras-commits
mailing list