[SCM] WebKit Debian packaging branch, debian/experimental, updated. upstream/1.3.3-9427-gc2be6fc

tony at chromium.org tony at chromium.org
Wed Dec 22 13:18:12 UTC 2010


The following commit has been merged in the debian/experimental branch:
commit efa63b9088d033dfe04a88b8ab198c7d316f3711
Author: tony at chromium.org <tony at chromium.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Date:   Fri Sep 10 18:31:27 2010 +0000

    2010-09-10  Tony Chang  <tony at chromium.org>
    
            Unreviewed, rolling out r67216.
            http://trac.webkit.org/changeset/67216
            https://bugs.webkit.org/show_bug.cgi?id=44709
    
            Broke
    
            * Scripts/webkitpy/layout_tests/deduplicate_tests.py:
            * Scripts/webkitpy/layout_tests/deduplicate_tests_unittest.py:
    
    git-svn-id: http://svn.webkit.org/repository/webkit/trunk@67218 268f45cc-cd09-0410-ab3c-d52691b4dbfc

diff --git a/WebKitTools/ChangeLog b/WebKitTools/ChangeLog
index 72d6ff1..8326b83 100644
--- a/WebKitTools/ChangeLog
+++ b/WebKitTools/ChangeLog
@@ -1,5 +1,16 @@
 2010-09-10  Tony Chang  <tony at chromium.org>
 
+        Unreviewed, rolling out r67216.
+        http://trac.webkit.org/changeset/67216
+        https://bugs.webkit.org/show_bug.cgi?id=44709
+
+        Broke
+
+        * Scripts/webkitpy/layout_tests/deduplicate_tests.py:
+        * Scripts/webkitpy/layout_tests/deduplicate_tests_unittest.py:
+
+2010-09-10  Tony Chang  <tony at chromium.org>
+
         Reviewed by Ojan Vafai.
 
         deduplicate-tests should be runnable from any WebKit directory
diff --git a/WebKitTools/Scripts/webkitpy/layout_tests/deduplicate_tests.py b/WebKitTools/Scripts/webkitpy/layout_tests/deduplicate_tests.py
index 39b9431..c543d91 100644
--- a/WebKitTools/Scripts/webkitpy/layout_tests/deduplicate_tests.py
+++ b/WebKitTools/Scripts/webkitpy/layout_tests/deduplicate_tests.py
@@ -36,7 +36,6 @@ import os
 import subprocess
 import sys
 import re
-import webkitpy.common.checkout.scm as scm
 import webkitpy.common.system.executive as executive
 import webkitpy.common.system.logutils as logutils
 import webkitpy.layout_tests.port.factory as port_factory
@@ -57,8 +56,7 @@ def port_fallbacks():
         try:
             platforms = port_factory.get(port_name).baseline_search_path()
         except NotImplementedError:
-            _log.error("'%s' lacks baseline_search_path(), please fix."
-                       % port_name)
+            _log.error("'%s' lacks baseline_search_path(), please fix." % port_name)
             fallbacks[port_name] = [_BASE_PLATFORM]
             continue
         fallbacks[port_name] = [os.path.basename(p) for p in platforms][1:]
@@ -158,35 +156,11 @@ def has_intermediate_results(test, fallbacks, matching_platform,
     return False
 
 
-def get_relative_test_path(filename, relative_to):
-    """Constructs a relative path to |filename| from |relative_to|.  Also, if
-    |relative_to| is a sub directory of the layout test directory and
-    |filename| is not in |relative_to|, return None.  This lets us filter
-    the results to only show results that are under where the script was run
-    from.
-    Args:
-        filename: The test file we're trying to get a relative path to.
-        relative_to: The absolute path we're relative to.
-    Returns:
-        A relative path to filename or None.
-    """
-    layout_test_dir = os.path.join(scm.find_checkout_root(), 'LayoutTests')
-    abs_path = os.path.join(layout_test_dir, filename)
-    path = os.path.relpath(abs_path, relative_to)
-    # If we're in the layout test directory, only return results that are below
-    # where the tool was run from.
-    if (relative_to.startswith(layout_test_dir) and path.startswith('..')):
-        return None
-    return path
-
-
-def find_dups(hashes, port_fallbacks, relative_to):
+def find_dups(hashes, port_fallbacks):
     """Yields info about redundant test expectations.
     Args:
         hashes: a list of hashes as returned by cluster_file_hashes.
-        port_fallbacks: a list of fallback information as returned by
-            get_port_fallbacks.
-        relative_to: the directory that we want the results relative to
+        port_fallbacks: a list of fallback information as returned by get_port_fallbacks.
     Returns:
         a tuple containing (test, platform, fallback, platforms)
     """
@@ -202,20 +176,13 @@ def find_dups(hashes, port_fallbacks, relative_to):
         # See if any of the platforms are redundant with each other.
         for platform in platforms.keys():
             for fallback in port_fallbacks[platform]:
-                if fallback not in platforms.keys():
-                    continue
-                # We have to verify that there isn't an intermediate result
-                # that causes this duplicate hash to exist.
-                if has_intermediate_results(test, port_fallbacks[platform],
-                                            fallback):
-                    continue
-                # We print the relative path so it's easy to pipe the results
-                # to xargs rm.
-                path = get_relative_test_path(platforms[platform], relative_to)
-                if not path:
-                    continue
-                yield {'test': test, 'platform': platform,
-                       'fallback': fallback, 'path': path}
+                if fallback in platforms.keys():
+                    # We have to verify that there isn't an intermediate result
+                    # that causes this duplicate hash to exist.
+                    if not has_intermediate_results(test,
+                            port_fallbacks[platform], fallback):
+                        path = os.path.join('LayoutTests', platforms[platform])
+                        yield test, platform, fallback, path
 
 
 def deduplicate(glob_pattern):
@@ -225,11 +192,7 @@ def deduplicate(glob_pattern):
     Returns:
         a dictionary containing test, path, platform and fallback.
     """
-    current_dir = os.getcwd()
-    try:
-        os.chdir(scm.find_checkout_root())
-        fallbacks = port_fallbacks()
-        hashes = cluster_file_hashes(glob_pattern)
-        return list(find_dups(hashes, fallbacks, current_dir))
-    finally:
-        os.chdir(current_dir)
+    fallbacks = port_fallbacks()
+    hashes = cluster_file_hashes(glob_pattern)
+    return [{'test': test, 'path': path, 'platform': platform, 'fallback': fallback}
+             for test, platform, fallback, path in find_dups(hashes, fallbacks)]
diff --git a/WebKitTools/Scripts/webkitpy/layout_tests/deduplicate_tests_unittest.py b/WebKitTools/Scripts/webkitpy/layout_tests/deduplicate_tests_unittest.py
index 3044487..be2e381 100644
--- a/WebKitTools/Scripts/webkitpy/layout_tests/deduplicate_tests_unittest.py
+++ b/WebKitTools/Scripts/webkitpy/layout_tests/deduplicate_tests_unittest.py
@@ -186,22 +186,3 @@ class ListDuplicatesTest(unittest.TestCase):
                            'fallback': 'chromium-win',
                            'platform': 'chromium-linux'},
                           result[0])
-
-    def test_get_relative_test_path(self):
-        checkout_root = scm.find_checkout_root()
-        layout_test_dir = os.path.join(checkout_root, 'LayoutTests')
-        test_cases = (
-            ('platform/mac/test.html',
-             ('platform/mac/test.html', layout_test_dir)),
-            ('LayoutTests/platform/mac/test.html',
-             ('platform/mac/test.html', checkout_root)),
-            ('../LayoutTests/platform/mac/test.html',
-             ('platform/mac/test.html', os.path.join(checkout_root, 'WebCore'))),
-            ('test.html',
-             ('platform/mac/test.html', os.path.join(layout_test_dir, 'platform/mac'))),
-            (None,
-             ('platform/mac/test.html', os.path.join(layout_test_dir, 'platform/win'))),
-        )
-        for expected, inputs in test_cases:
-            self.assertEquals(expected,
-                              deduplicate_tests.get_relative_test_path(*inputs))

-- 
WebKit Debian packaging



More information about the Pkg-webkit-commits mailing list