[SCM] WebKit Debian packaging branch, debian/experimental, updated. upstream/1.3.3-9427-gc2be6fc

victorw at chromium.org victorw at chromium.org
Wed Dec 22 12:14:32 UTC 2010


The following commit has been merged in the debian/experimental branch:
commit f643c6ceeb08b6104ea8bc59608163a438ff9984
Author: victorw at chromium.org <victorw at chromium.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Date:   Tue Aug 17 18:47:41 2010 +0000

    2010-08-17  Victor Wang  <victorw at chromium.org>
    
            Reviewed by ojan at chromium.org.
    
            Update test results server:
            1. Normalize test results and times after merging (prune tests where
               all runs pass or do not have data, truncate all test items to max
               number of builds)
            2. times should be int not string.
            3. when inserting a new test item, should keep old data regardless
               whether or not they have same item type with new one.
    
            https://bugs.webkit.org/show_bug.cgi?id=43861
    
            * TestResultServer/model/jsonresults.py:
            * TestResultServer/model/jsonresults_unittest.py:
    
    
    git-svn-id: http://svn.webkit.org/repository/webkit/trunk@65527 268f45cc-cd09-0410-ab3c-d52691b4dbfc

diff --git a/WebKitTools/ChangeLog b/WebKitTools/ChangeLog
index 96bda63..8103a86 100644
--- a/WebKitTools/ChangeLog
+++ b/WebKitTools/ChangeLog
@@ -1,3 +1,20 @@
+2010-08-17  Victor Wang  <victorw at chromium.org>
+
+        Reviewed by ojan at chromium.org.
+
+        Update test results server:
+        1. Normalize test results and times after merging (prune tests where
+           all runs pass or do not have data, truncate all test items to max
+           number of builds)
+        2. times should be int not string.
+        3. when inserting a new test item, should keep old data regardless
+           whether or not they have same item type with new one.
+
+        https://bugs.webkit.org/show_bug.cgi?id=43861
+
+        * TestResultServer/model/jsonresults.py:
+        * TestResultServer/model/jsonresults_unittest.py:
+
 2010-08-16  Sam Weinig  <sam at webkit.org>
 
         Reviewed by Mark Rowe.
diff --git a/WebKitTools/TestResultServer/model/jsonresults.py b/WebKitTools/TestResultServer/model/jsonresults.py
index f8b685e..a0f25a9 100755
--- a/WebKitTools/TestResultServer/model/jsonresults.py
+++ b/WebKitTools/TestResultServer/model/jsonresults.py
@@ -40,8 +40,11 @@ JSON_RESULTS_BUILD_NUMBERS = "buildNumbers"
 JSON_RESULTS_TESTS = "tests"
 JSON_RESULTS_RESULTS = "results"
 JSON_RESULTS_TIMES = "times"
+JSON_RESULTS_PASS = "P"
+JSON_RESULTS_NO_DATA = "N"
+JSON_RESULTS_MIN_TIME = 1
 JSON_RESULTS_VERSION = 3
-JSON_RESULTS_MAX_BUILDS = 750
+JSON_RESULTS_MAX_BUILDS = 1500
 
 
 class JsonResults(object):
@@ -218,13 +221,14 @@ class JsonResults(object):
                     results = incremental_test[JSON_RESULTS_RESULTS]
                     times = incremental_test[JSON_RESULTS_TIMES]
                 else:
-                    results = [[1, "P"]]
-                    times = [[1, "0"]]
+                    results = [[1, JSON_RESULTS_PASS]]
+                    times = [[1, 0]]
 
                 cls._insert_item_run_length_encoded(
                     results, aggregated_test[JSON_RESULTS_RESULTS])
                 cls._insert_item_run_length_encoded(
                     times, aggregated_test[JSON_RESULTS_TIMES])
+                cls._normalize_results_json(test_name, aggregated_json)
             else:
                 aggregated_json[test_name] = incremental_json[test_name]
 
@@ -243,17 +247,69 @@ class JsonResults(object):
                 aggregated_item[0][0] = min(
                     aggregated_item[0][0] + item[0], JSON_RESULTS_MAX_BUILDS)
             else:
-                # The test item values need to be summed from continuous runs.
-                # If there is an older item (not most recent one) whose value is
-                # same as the one to insert, then we should remove the old item
-                # from aggregated list.
-                for i in reversed(range(1, len(aggregated_item))):
-                    if item[1] == aggregated_item[i][1]:
-                        aggregated_item.pop(i)
-
                 aggregated_item.insert(0, item)
 
     @classmethod
+    def _normalize_results_json(cls, test_name, aggregated_json):
+        """ Prune tests where all runs pass or tests that no longer exist and
+        truncate all results to JSON_RESULTS_MAX_BUILDS.
+
+        Args:
+          test_name: Name of the test.
+          aggregated_json: The JSON object with all the test results for
+                           this builder.
+        """
+
+        aggregated_test = aggregated_json[test_name]
+        aggregated_test[JSON_RESULTS_RESULTS] = \
+            cls._remove_items_over_max_number_of_builds(
+                aggregated_test[JSON_RESULTS_RESULTS])
+        aggregated_test[JSON_RESULTS_TIMES] = \
+            cls._remove_items_over_max_number_of_builds(
+                aggregated_test[JSON_RESULTS_TIMES])
+
+        is_all_pass = cls._is_results_all_of_type(
+            aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_PASS)
+        is_all_no_data = cls._is_results_all_of_type(
+            aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_NO_DATA)
+
+        max_time = max(
+            [time[1] for time in aggregated_test[JSON_RESULTS_TIMES]])
+        # Remove all passes/no-data from the results to reduce noise and
+        # filesize. If a test passes every run, but
+        # takes >= JSON_RESULTS_MIN_TIME to run, don't throw away the data.
+        if (is_all_no_data or
+           (is_all_pass and max_time < JSON_RESULTS_MIN_TIME)):
+            del aggregated_json[test_name]
+
+    @classmethod
+    def _remove_items_over_max_number_of_builds(cls, encoded_list):
+        """Removes items from the run-length encoded list after the final
+        item that exceeds the max number of builds to track.
+
+        Args:
+          encoded_results: run-length encoded results. An array of arrays, e.g.
+              [[3,'A'],[1,'Q']] encodes AAAQ.
+        """
+        num_builds = 0
+        index = 0
+        for result in encoded_list:
+            num_builds = num_builds + result[0]
+            index = index + 1
+            if num_builds > JSON_RESULTS_MAX_BUILDS:
+                return encoded_list[:index]
+
+        return encoded_list
+
+    @classmethod
+    def _is_results_all_of_type(cls, results, type):
+        """Returns whether all the results are of the given type
+        (e.g. all passes).
+        """
+
+        return len(results) == 1 and results[0][1] == type
+
+    @classmethod
     def _check_json(cls, builder, json):
         """Check whether the given json is valid.
 
diff --git a/WebKitTools/TestResultServer/model/jsonresults_unittest.py b/WebKitTools/TestResultServer/model/jsonresults_unittest.py
index 8cab017..940eebb 100755
--- a/WebKitTools/TestResultServer/model/jsonresults_unittest.py
+++ b/WebKitTools/TestResultServer/model/jsonresults_unittest.py
@@ -26,6 +26,7 @@
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+import jsonresults
 import unittest
 
 from jsonresults import JsonResults
@@ -127,7 +128,7 @@ class JsonResultsTest(unittest.TestCase):
         # Nothing to merge.
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
             # Incremental results
             None,
             # Expect no merge happens.
@@ -137,7 +138,7 @@ class JsonResultsTest(unittest.TestCase):
         # Nothing to merge.
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
             # Incremental results
             ([], []),
             # Expected no merge happens.
@@ -149,9 +150,9 @@ class JsonResultsTest(unittest.TestCase):
             # Aggregated results
             None,
             # Incremental results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
             # Expected results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]))
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]))
 
         # Single test for single run.
         # Incremental results has the latest build and same test results for
@@ -160,11 +161,11 @@ class JsonResultsTest(unittest.TestCase):
         # of runs for "P" (200 + 1) to get merged results.
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
             # Incremental results
-            (["3"], [["001.html", "[1,\"P\"]", "[1,\"0\"]"]]),
+            (["3"], [["001.html", "[1,\"F\"]", "[1,0]"]]),
             # Expected results
-            (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[201,\"0\"]"]]))
+            (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"]]))
 
         # Single test for single run.
         # Incremental results has the latest build but different test results
@@ -172,72 +173,68 @@ class JsonResultsTest(unittest.TestCase):
         # Insert the incremental results at the first place.
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
             # Incremental results
-            (["3"], [["001.html", "[1, \"I\"]", "[1,\"1\"]"]]),
+            (["3"], [["001.html", "[1, \"I\"]", "[1,1]"]]),
             # Expected results
-            (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"P\"]", "[1,\"1\"],[200,\"0\"]"]]))
+            (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"]", "[1,1],[200,0]"]]))
 
         # Single test for single run.
         # Incremental results has the latest build but different test results
         # for that run.
-        # The test "results" and "times" need to be continuous, so the old
-        # [10,"I"] result should be dropped because a new result of same type [1,"I"]
-        # is inserted in front of [200,"P"].
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"],[10,\"I\"]", "[200,\"0\"],[10,\"1\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"],[10,\"I\"]", "[200,0],[10,1]"]]),
             # Incremental results
-            (["3"], [["001.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+            (["3"], [["001.html", "[1,\"I\"]", "[1,1]"]]),
             # Expected results
-            (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"P\"]", "[1,\"1\"],[200,\"0\"]"]]))
+            (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"F\"],[10,\"I\"]", "[1,1],[200,0],[10,1]"]]))
 
         # Multiple tests for single run.
         # All tests have incremental updates.
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[100,\"I\"]", "[100,\"1\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]),
             # Incremental results
-            (["3"], [["001.html", "[1,\"P\"]", "[1,\"0\"]"], ["002.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+            (["3"], [["001.html", "[1,\"F\"]", "[1,0]"], ["002.html", "[1,\"I\"]", "[1,1]"]]),
             # Expected results
-            (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[201,\"0\"]"], ["002.html", "[101,\"I\"]", "[101,\"1\"]"]]))
+            (["3", "2", "1"], [["001.html", "[201,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]]))
 
         # Multiple tests for single run.
-        # Not all tests have update.
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[100,\"I\"]", "[100,\"1\"]"], ["003.html", "[10,\"F\"]", "[10,\"0\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[100,\"I\"]", "[100,1]"]]),
             # Incremental results
-            (["3"], [["002.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+            (["3"], [["002.html", "[1,\"I\"]", "[1,1]"]]),
             # Expected results
-            (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[201,\"0\"]"], ["002.html", "[101,\"I\"]", "[101,\"1\"]"], ["003.html", "[1,\"P\"],[10,\"F\"]", "[11,\"0\"]"]]))
+            (["3", "2", "1"], [["001.html", "[1,\"P\"],[200,\"F\"]", "[201,0]"], ["002.html", "[101,\"I\"]", "[101,1]"]]))
 
         # Single test for multiple runs.
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
             # Incremental results
-            (["4", "3"], [["001.html", "[2, \"I\"]", "[2,\"2\"]"]]),
+            (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"]]),
             # Expected results
-            (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"P\"]", "[2,\"2\"],[200,\"0\"]"]]))
+            (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"]]))
 
         # Multiple tests for multiple runs.
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[10,\"Z\"]", "[10,\"0\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"], ["002.html", "[10,\"Z\"]", "[10,0]"]]),
             # Incremental results
-            (["4", "3"], [["001.html", "[2, \"I\"]", "[2,\"2\"]"], ["002.html", "[1,\"C\"]", "[1,\"1\"]"]]),
+            (["4", "3"], [["001.html", "[2, \"I\"]", "[2,2]"], ["002.html", "[1,\"C\"]", "[1,1]"]]),
             # Expected results
-            (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"P\"]", "[2,\"2\"],[200,\"0\"]"], ["002.html", "[1,\"C\"],[10,\"Z\"]", "[1,\"1\"],[10,\"0\"]"]]))
+            (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"F\"]", "[2,2],[200,0]"], ["002.html", "[1,\"C\"],[10,\"Z\"]", "[1,1],[10,0]"]]))
 
         # Test the build in incremental results is older than the most recent
         # build in aggregated results.
         # The incremental results should be dropped and no merge happens.
         self._test_merge(
             # Aggregated results
-            (["3", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            (["3", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
             # Incremental results
-            (["2"], [["001.html", "[1, \"P\"]", "[1,\"0\"]"]]),
+            (["2"], [["001.html", "[1, \"F\"]", "[1,0]"]]),
             # Expected no merge happens.
             None)
 
@@ -246,11 +243,49 @@ class JsonResultsTest(unittest.TestCase):
         # The incremental results should be dropped and no merge happens.
         self._test_merge(
             # Aggregated results
-            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            (["2", "1"], [["001.html", "[200,\"F\"]", "[200,0]"]]),
             # Incremental results
-            (["3", "2"], [["001.html", "[2, \"P\"]", "[2,\"0\"]"]]),
+            (["3", "2"], [["001.html", "[2, \"F\"]", "[2,0]"]]),
             # Expected no merge happens.
             None)
 
+        # Remove test where there is no data in all runs.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"N\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
+            # Incremental results
+            (["3"], [["001.html", "[1,\"N\"]", "[1,0]"], ["002.html", "[1,\"P\"]", "[1,0]"]]),
+            # Expected results
+            (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]]))
+
+        # Remove test where all run pass and max running time <= 1 seconds
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
+            # Incremental results
+            (["3"], [["001.html", "[1,\"P\"]", "[1,1]"], ["002.html", "[1,\"P\"]", "[1,0]"]]),
+            # Expected results
+            (["3", "2", "1"], [["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]]))
+
+        # Do not remove test where all run pass but max running time > 1 seconds
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,0]"], ["002.html", "[10,\"F\"]", "[10,0]"]]),
+            # Incremental results
+            (["3"], [["001.html", "[1,\"P\"]", "[1,2]"], ["002.html", "[1,\"P\"]", "[1,0]"]]),
+            # Expected results
+            (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[1,2],[200,0]"], ["002.html", "[1,\"P\"],[10,\"F\"]", "[11,0]"]]))
+
+        # Remove items from test results and times that exceeds the max number
+        # of builds to track.
+        max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS)
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[" + max_builds + ",\"F\"],[1,\"I\"]", "[" + max_builds + ",0],[1,1]"]]),
+            # Incremental results
+            (["3"], [["001.html", "[1,\"T\"]", "[1,1]"]]),
+            # Expected results
+            (["3", "2", "1"], [["001.html", "[1,\"T\"],[" + max_builds + ",\"F\"]", "[1,1],[" + max_builds + ",0]"]]))
+
 if __name__ == '__main__':
     unittest.main()

-- 
WebKit Debian packaging



More information about the Pkg-webkit-commits mailing list