[SCM] WebKit Debian packaging branch, debian/experimental, updated. upstream/1.3.3-9427-gc2be6fc

victorw at chromium.org victorw at chromium.org
Wed Dec 22 11:43:11 UTC 2010


The following commit has been merged in the debian/experimental branch:
commit 02db5da3acbfbb6e36c07095331ee9c99a9ffc33
Author: victorw at chromium.org <victorw at chromium.org@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Date:   Wed Aug 4 23:50:58 2010 +0000

    2010-08-04  Victor Wang  <victorw at chromium.org>
    
            Reviewed by Ojan Vafai.
    
            -. Add result.json incremental merging functionality to test results app engine.
            -. blobstore is not suitable for AE data merging and there is no API to
               programatically edit existing blob or write a new one yet, so replace blobstore
               with datastore. If file is oversize (>1000*1000 bytes), store file data in
               multiple datastore entries.
            -. Fix styles.
    
            Test: jsonresults_unittest to test merging logics.
    
            https://bugs.webkit.org/show_bug.cgi?id=38599
    
            * TestResultServer/handlers/dashboardhandler.py:
            * TestResultServer/handlers/menu.py:
            * TestResultServer/handlers/testfilehandler.py:
            * TestResultServer/main.py:
            * TestResultServer/model/dashboardfile.py:
            * TestResultServer/model/datastorefile.py: Added.
            * TestResultServer/model/jsonresults.py: Added.
            * TestResultServer/model/jsonresults_unittest.py: Added.
            * TestResultServer/model/testfile.py:
            * TestResultServer/templates/uploadform.html:
    
    
    git-svn-id: http://svn.webkit.org/repository/webkit/trunk@64687 268f45cc-cd09-0410-ab3c-d52691b4dbfc

diff --git a/WebKitTools/ChangeLog b/WebKitTools/ChangeLog
index d00f8c4..0b89a82 100644
--- a/WebKitTools/ChangeLog
+++ b/WebKitTools/ChangeLog
@@ -1,3 +1,29 @@
+2010-08-04  Victor Wang  <victorw at chromium.org>
+
+        Reviewed by Ojan Vafai.
+
+        -. Add result.json incremental merging functionality to test results app engine.
+        -. blobstore is not suitable for AE data merging and there is no API to
+           programatically edit existing blob or write a new one yet, so replace blobstore
+           with datastore. If file is oversize (>1000*1000 bytes), store file data in
+           multiple datastore entries.
+        -. Fix styles.
+
+        Test: jsonresults_unittest to test merging logics.
+
+        https://bugs.webkit.org/show_bug.cgi?id=38599
+
+        * TestResultServer/handlers/dashboardhandler.py:
+        * TestResultServer/handlers/menu.py:
+        * TestResultServer/handlers/testfilehandler.py:
+        * TestResultServer/main.py:
+        * TestResultServer/model/dashboardfile.py:
+        * TestResultServer/model/datastorefile.py: Added.
+        * TestResultServer/model/jsonresults.py: Added.
+        * TestResultServer/model/jsonresults_unittest.py: Added.
+        * TestResultServer/model/testfile.py:
+        * TestResultServer/templates/uploadform.html:
+
 2010-08-04  Antonio Gomes  <tonikitoo at webkit.org>
 
         Reviewed by Simon Hausmann and Kenneth Christiansen.
diff --git a/WebKitTools/TestResultServer/handlers/dashboardhandler.py b/WebKitTools/TestResultServer/handlers/dashboardhandler.py
index 45bc471..587d737 100644
--- a/WebKitTools/TestResultServer/handlers/dashboardhandler.py
+++ b/WebKitTools/TestResultServer/handlers/dashboardhandler.py
@@ -3,7 +3,7 @@
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
-# 
+#
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
@@ -13,7 +13,7 @@
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
-# 
+#
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
@@ -38,6 +38,7 @@ from model.dashboardfile import DashboardFile
 
 PARAM_FILE = "file"
 
+
 def get_content_type(filename):
     return mimetypes.guess_type(filename)[0] or "application/octet-stream"
 
diff --git a/WebKitTools/TestResultServer/handlers/menu.py b/WebKitTools/TestResultServer/handlers/menu.py
index ad2599d..f2f3855 100644
--- a/WebKitTools/TestResultServer/handlers/menu.py
+++ b/WebKitTools/TestResultServer/handlers/menu.py
@@ -3,7 +3,7 @@
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
-# 
+#
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
@@ -13,7 +13,7 @@
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
-# 
+#
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
@@ -61,4 +61,3 @@ class Menu(webapp.RequestHandler):
 
         self.response.out.write(
             template.render("templates/menu.html", template_values))
-
diff --git a/WebKitTools/TestResultServer/handlers/testfilehandler.py b/WebKitTools/TestResultServer/handlers/testfilehandler.py
index 972b606..97953e7 100644
--- a/WebKitTools/TestResultServer/handlers/testfilehandler.py
+++ b/WebKitTools/TestResultServer/handlers/testfilehandler.py
@@ -3,7 +3,7 @@
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
-# 
+#
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
@@ -13,7 +13,7 @@
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
-# 
+#
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
@@ -30,11 +30,10 @@ import logging
 import urllib
 
 from google.appengine.api import users
-from google.appengine.ext import blobstore
 from google.appengine.ext import webapp
-from google.appengine.ext.webapp import blobstore_handlers
 from google.appengine.ext.webapp import template
 
+from model.jsonresults import JsonResults
 from model.testfile import TestFile
 
 PARAM_BUILDER = "builder"
@@ -43,10 +42,11 @@ PARAM_FILE = "file"
 PARAM_NAME = "name"
 PARAM_KEY = "key"
 PARAM_TEST_TYPE = "testtype"
+PARAM_INCREMENTAL = "incremental"
 
 
 class DeleteFile(webapp.RequestHandler):
-    """Delete test file for a given builder and name from datastore (metadata) and blobstore (file data)."""
+    """Delete test file for a given builder and name from datastore."""
 
     def get(self):
         key = self.request.get(PARAM_KEY)
@@ -55,7 +55,7 @@ class DeleteFile(webapp.RequestHandler):
         name = self.request.get(PARAM_NAME)
 
         logging.debug(
-            "Deleting File, builder: %s, test_type: %s, name: %s, blob key: %s.",
+            "Deleting File, builder: %s, test_type: %s, name: %s, key: %s.",
             builder, test_type, name, key)
 
         TestFile.delete_file(key, builder, test_type, name, 100)
@@ -65,7 +65,7 @@ class DeleteFile(webapp.RequestHandler):
             % (builder, test_type, name))
 
 
-class GetFile(blobstore_handlers.BlobstoreDownloadHandler):
+class GetFile(webapp.RequestHandler):
     """Get file content or list of files for given builder and name."""
 
     def _get_file_list(self, builder, test_type, name):
@@ -77,7 +77,8 @@ class GetFile(blobstore_handlers.BlobstoreDownloadHandler):
             name: file name
         """
 
-        files = TestFile.get_files(builder, test_type, name, 100)
+        files = TestFile.get_files(
+            builder, test_type, name, load_data=False, limit=100)
         if not files:
             logging.info("File not found, builder: %s, test_type: %s, name: %s.",
                          builder, test_type, name)
@@ -103,16 +104,15 @@ class GetFile(blobstore_handlers.BlobstoreDownloadHandler):
             name: file name
         """
 
-        files = TestFile.get_files(builder, test_type, name, 1)
+        files = TestFile.get_files(
+            builder, test_type, name, load_data=True, limit=1)
         if not files:
             logging.info("File not found, builder: %s, test_type: %s, name: %s.",
                          builder, test_type, name)
             return
 
-        blob_key = files[0].blob_key
-        blob_info = blobstore.get(blob_key)
-        if blob_info:
-            self.send_blob(blob_info, "text/plain")
+        self.response.headers["Content-Type"] = "text/plain; charset=utf-8"
+        self.response.out.write(files[0].data)
 
     def get(self):
         builder = self.request.get(PARAM_BUILDER)
@@ -133,89 +133,69 @@ class GetFile(blobstore_handlers.BlobstoreDownloadHandler):
             return self._get_file_content(builder, test_type, name)
 
 
-class GetUploadUrl(webapp.RequestHandler):
-    """Get an url for uploading file to blobstore. A special url is required for each blobsotre upload."""
-
-    def get(self):
-        upload_url = blobstore.create_upload_url("/testfile/upload")
-        logging.info("Getting upload url: %s.", upload_url)
-        self.response.out.write(upload_url)
-
-
-class Upload(blobstore_handlers.BlobstoreUploadHandler):
-    """Upload file to blobstore."""
+class Upload(webapp.RequestHandler):
+    """Upload test results file to datastore."""
 
     def post(self):
-        uploaded_files = self.get_uploads("file")
-        if not uploaded_files:
-            return self._upload_done([("Missing upload file field.")])
+        file_params = self.request.POST.getall(PARAM_FILE)
+        if not file_params:
+            self.response.out.write("FAIL: missing upload file field.")
+            return
 
         builder = self.request.get(PARAM_BUILDER)
         if not builder:
-            for blob_info in uploaded_files:
-                blob_info.delete()
-    
-            return self._upload_done([("Missing builder parameter in upload request.")])
+            self.response.out.write("FAIL: missing builder parameter.")
+            return
 
         test_type = self.request.get(PARAM_TEST_TYPE)
+        incremental = self.request.get(PARAM_INCREMENTAL)
 
         logging.debug(
             "Processing upload request, builder: %s, test_type: %s.",
             builder, test_type)
 
+        # There are two possible types of each file_params in the request:
+        # one file item or a list of file items.
+        # Normalize file_params to a file item list.
+        files = []
+        logging.debug("test: %s, type:%s", file_params, type(file_params))
+        for item in file_params:
+            if not isinstance(item, list) and not isinstance(item, tuple):
+                item = [item]
+            files.extend(item)
+
         errors = []
-        for blob_info in uploaded_files:
-            tf = TestFile.update_file(builder, test_type, blob_info)
-            if not tf:
+        for file in files:
+            filename = file.filename.lower()
+            if ((incremental and filename == "results.json") or
+                (filename == "incremental_results.json")):
+                # Merge incremental json results.
+                saved_file = JsonResults.update(builder, test_type, file.value)
+            else:
+                saved_file = TestFile.update(
+                    builder, test_type, file.filename, file.value)
+
+            if not saved_file:
                 errors.append(
                     "Upload failed, builder: %s, test_type: %s, name: %s." %
-                    (builder, test_type, blob_info.filename))
-                blob_info.delete()
-
-        return self._upload_done(errors)
-
-    def _upload_done(self, errors):
-        logging.info("upload done.")
+                    (builder, test_type, file.filename))
 
-        error_messages = []
-        for error in errors:
-            logging.info(error)
-            error_messages.append("error=%s" % urllib.quote(error))
-
-        if error_messages:
-            redirect_url = "/uploadfail?%s" % "&".join(error_messages)
+        if errors:
+            messages = "FAIL: " + "; ".join(errors)
+            logging.warning(messages)
+            self.response.set_status(500, messages)
+            self.response.out.write("FAIL")
         else:
-            redirect_url = "/uploadsuccess"
-
-        logging.info(redirect_url)
-        # BlobstoreUploadHandler requires redirect at the end.
-        self.redirect(redirect_url)
+            self.response.set_status(200)
+            self.response.out.write("OK")
 
 
 class UploadForm(webapp.RequestHandler):
-    """Show a form so user can submit a file to blobstore."""
+    """Show a form so user can upload a file."""
 
     def get(self):
-        upload_url = blobstore.create_upload_url("/testfile/upload")
         template_values = {
-            "upload_url": upload_url,
+            "upload_url": "/testfile/upload",
         }
         self.response.out.write(template.render("templates/uploadform.html",
                                                 template_values))
-
-class UploadStatus(webapp.RequestHandler):
-    """Return status of file uploading"""
-
-    def get(self):
-        logging.debug("Update status")
-
-        if self.request.path == "/uploadsuccess":
-            self.response.set_status(200)
-            self.response.out.write("OK")
-        else:
-            errors = self.request.params.getall("error")
-            if errors:
-                messages = "FAIL: " + "; ".join(errors)
-                logging.warning(messages)
-                self.response.set_status(500, messages)
-                self.response.out.write("FAIL")
diff --git a/WebKitTools/TestResultServer/main.py b/WebKitTools/TestResultServer/main.py
index 7a0d237..aa6e432 100644
--- a/WebKitTools/TestResultServer/main.py
+++ b/WebKitTools/TestResultServer/main.py
@@ -3,7 +3,7 @@
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
-# 
+#
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
@@ -13,7 +13,7 @@
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
-# 
+#
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
@@ -42,17 +42,15 @@ routes = [
     ('/dashboards/update', dashboardhandler.UpdateDashboardFile),
     ('/dashboards/([^?]+)?', dashboardhandler.GetDashboardFile),
     ('/testfile/delete', testfilehandler.DeleteFile),
-    ('/testfile/uploadurl', testfilehandler.GetUploadUrl),
     ('/testfile/upload', testfilehandler.Upload),
     ('/testfile/uploadform', testfilehandler.UploadForm),
     ('/testfile/?', testfilehandler.GetFile),
-    ('/uploadfail', testfilehandler.UploadStatus),
-    ('/uploadsuccess', testfilehandler.UploadStatus),
     ('/*|/menu', menu.Menu),
 ]
 
 application = webapp.WSGIApplication(routes, debug=True)
 
+
 def main():
     run_wsgi_app(application)
 
diff --git a/WebKitTools/TestResultServer/model/dashboardfile.py b/WebKitTools/TestResultServer/model/dashboardfile.py
index c74f071..57d3f6f 100644
--- a/WebKitTools/TestResultServer/model/dashboardfile.py
+++ b/WebKitTools/TestResultServer/model/dashboardfile.py
@@ -3,7 +3,7 @@
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
-# 
+#
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
@@ -13,7 +13,7 @@
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
-# 
+#
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
@@ -36,6 +36,7 @@ from google.appengine.ext import db
 SVN_PATH_DASHBOARD = ("http://src.chromium.org/viewvc/chrome/trunk/tools/"
     "dashboards/")
 
+
 class DashboardFile(db.Model):
     name = db.StringProperty()
     data = db.BlobProperty()
@@ -92,7 +93,7 @@ class DashboardFile(db.Model):
         if not files:
             logging.info("No existing file, added as new file.")
             return cls.add_file(name, data)
-        
+
         logging.debug("Updating existing file.")
         file = files[0]
         file.data = data
diff --git a/WebKitTools/TestResultServer/model/datastorefile.py b/WebKitTools/TestResultServer/model/datastorefile.py
new file mode 100755
index 0000000..dd4c366
--- /dev/null
+++ b/WebKitTools/TestResultServer/model/datastorefile.py
@@ -0,0 +1,129 @@
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from datetime import datetime
+import logging
+
+from google.appengine.ext import db
+
+MAX_DATA_ENTRY_PER_FILE = 10
+MAX_ENTRY_LEN = 1000 * 1000
+
+
+class DataEntry(db.Model):
+    """Datastore entry that stores one segmant of file data
+       (<1000*1000 bytes).
+    """
+
+    data = db.BlobProperty()
+
+    @classmethod
+    def get(cls, key):
+        return db.get(key)
+
+    def get_data(self, key):
+        return db.get(key)
+
+
+class DataStoreFile(db.Model):
+    """This class stores file in datastore.
+       If a file is oversize (>1000*1000 bytes), the file is split into
+       multiple segments and stored in multiple datastore entries.
+    """
+
+    name = db.StringProperty()
+    data_keys = db.ListProperty(db.Key)
+    date = db.DateTimeProperty(auto_now_add=True)
+
+    data = None
+
+    def delete_data(self, keys=None):
+        if not keys:
+            keys = self.data_keys
+
+        for key in keys:
+            data_entry = DataEntry.get(key)
+            if data_entry:
+                data_entry.delete()
+
+    def save_data(self, data):
+        if not data:
+            logging.warning("No data to save.")
+            return False
+
+        if len(data) > (MAX_DATA_ENTRY_PER_FILE * MAX_ENTRY_LEN):
+            logging.error("File too big, can't save to datastore: %dK",
+                len(data) / 1024)
+            return False
+
+        start = 0
+        keys = self.data_keys
+        self.data_keys = []
+        while start < len(data):
+            if keys:
+                key = keys.pop(0)
+                data_entry = DataEntry.get(key)
+                if not data_entry:
+                    logging.warning("Found key, but no data entry: %s", key)
+                    data_entry = DataEntry()
+            else:
+                data_entry = DataEntry()
+
+            data_entry.data = db.Blob(data[start: start + MAX_ENTRY_LEN])
+            data_entry.put()
+
+            logging.info("Data saved: %s.", data_entry.key())
+            self.data_keys.append(data_entry.key())
+
+            start = start + MAX_ENTRY_LEN
+
+        if keys:
+            self.delete_data(keys)
+
+        self.data = data
+
+        return True
+
+    def load_data(self):
+        if not self.data_keys:
+            logging.warning("No data to load.")
+            return None
+
+        data = []
+        for key in self.data_keys:
+            logging.info("Loading data for key: %s.", key)
+            data_entry = DataEntry.get(key)
+            if not data_entry:
+                logging.error("No data found for key: %s.", key)
+                return None
+
+            data.append(data_entry.data)
+
+        self.data = "".join(data)
+
+        return self.data
diff --git a/WebKitTools/TestResultServer/model/jsonresults.py b/WebKitTools/TestResultServer/model/jsonresults.py
new file mode 100755
index 0000000..d86fbcd
--- /dev/null
+++ b/WebKitTools/TestResultServer/model/jsonresults.py
@@ -0,0 +1,365 @@
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from datetime import datetime
+from django.utils import simplejson
+import logging
+
+from model.testfile import TestFile
+
+JSON_RESULTS_FILE = "results.json"
+JSON_RESULTS_PREFIX = "ADD_RESULTS("
+JSON_RESULTS_SUFFIX = ");"
+JSON_RESULTS_VERSION_KEY = "version"
+JSON_RESULTS_BUILD_NUMBERS = "buildNumbers"
+JSON_RESULTS_TESTS = "tests"
+JSON_RESULTS_RESULTS = "results"
+JSON_RESULTS_TIMES = "times"
+JSON_RESULTS_VERSION = 3
+JSON_RESULTS_MAX_BUILDS = 750
+
+
+class JsonResults(object):
+    @classmethod
+    def _strip_prefix_suffix(cls, data):
+        """Strip out prefix and suffix of json results string.
+
+        Args:
+            data: json file content.
+
+        Returns:
+            json string without prefix and suffix.
+        """
+
+        assert(data.startswith(JSON_RESULTS_PREFIX))
+        assert(data.endswith(JSON_RESULTS_SUFFIX))
+
+        return data[len(JSON_RESULTS_PREFIX):
+                    len(data) - len(JSON_RESULTS_SUFFIX)]
+
+    @classmethod
+    def _generate_file_data(cls, json, sort_keys=False):
+        """Given json string, generate file content data by adding
+           prefix and suffix.
+
+        Args:
+            json: json string without prefix and suffix.
+
+        Returns:
+            json file data.
+        """
+
+        data = simplejson.dumps(json, separators=(',', ':'),
+            sort_keys=sort_keys)
+        return JSON_RESULTS_PREFIX + data + JSON_RESULTS_SUFFIX
+
+    @classmethod
+    def _load_json(cls, file_data):
+        """Load json file to a python object.
+
+        Args:
+            file_data: json file content.
+
+        Returns:
+            json object or
+            None on failure.
+        """
+
+        json_results_str = cls._strip_prefix_suffix(file_data)
+        if not json_results_str:
+            logging.warning("No json results data.")
+            return None
+
+        try:
+            return simplejson.loads(json_results_str)
+        except Exception, err:
+            logging.debug(json_results_str)
+            logging.error("Failed to load json results: %s", str(err))
+            return None
+
+    @classmethod
+    def _merge_json(cls, aggregated_json, incremental_json):
+        """Merge incremental json into aggregated json results.
+
+        Args:
+            aggregated_json: aggregated json object.
+            incremental_json: incremental json object.
+
+        Returns:
+            True if merge succeeds or
+            False on failure.
+        """
+
+        # Merge non tests property data.
+        # Tests properties are merged in _merge_tests.
+        if not cls._merge_non_test_data(aggregated_json, incremental_json):
+            return False
+
+        # Merge tests results and times
+        incremental_tests = incremental_json[JSON_RESULTS_TESTS]
+        if incremental_tests:
+            aggregated_tests = aggregated_json[JSON_RESULTS_TESTS]
+            cls._merge_tests(aggregated_tests, incremental_tests)
+
+        return True
+
+    @classmethod
+    def _merge_non_test_data(cls, aggregated_json, incremental_json):
+        """Merge incremental non tests property data into aggregated json results.
+
+        Args:
+            aggregated_json: aggregated json object.
+            incremental_json: incremental json object.
+
+        Returns:
+            True if merge succeeds or
+            False on failure.
+        """
+
+        incremental_builds = incremental_json[JSON_RESULTS_BUILD_NUMBERS]
+        aggregated_builds = aggregated_json[JSON_RESULTS_BUILD_NUMBERS]
+        aggregated_build_number = int(aggregated_builds[0])
+        # Loop through all incremental builds, start from the oldest run.
+        for index in reversed(range(len(incremental_builds))):
+            build_number = int(incremental_builds[index])
+            logging.debug("Merging build %s, incremental json index: %d.",
+                build_number, index)
+
+            # Return if not all build numbers in the incremental json results
+            # are newer than the most recent build in the aggregated results.
+            # FIXME: make this case work.
+            if build_number < aggregated_build_number:
+                logging.warning(("Build %d in incremental json is older than "
+                    "the most recent build in aggregated results: %d"),
+                    build_number, aggregated_build_number)
+                return False
+
+            # Return if the build number is duplicated.
+            # FIXME: skip the duplicated build and merge rest of the results.
+            #        Need to be careful on skiping the corresponding value in
+            #        _merge_tests because the property data for each test could
+            #        be accumulated.
+            if build_number == aggregated_build_number:
+                logging.warning("Duplicate build %d in incremental json",
+                    build_number)
+                return False
+
+            # Merge this build into aggreagated results.
+            cls._merge_one_build(aggregated_json, incremental_json, index)
+            logging.debug("Merged build %s, merged json: %s.",
+                build_number, aggregated_json)
+
+        return True
+
+    @classmethod
+    def _merge_one_build(cls, aggregated_json, incremental_json,
+                         incremental_index):
+        """Merge one build of incremental json into aggregated json results.
+
+        Args:
+            aggregated_json: aggregated json object.
+            incremental_json: incremental json object.
+            incremental_index: index of the incremental json results to merge.
+        """
+
+        for key in incremental_json.keys():
+            # Merge json results except "tests" properties (results, times etc).
+            # "tests" properties will be handled separately.
+            if key == JSON_RESULTS_TESTS:
+                continue
+
+            if key in aggregated_json:
+                aggregated_json[key].insert(
+                    0, incremental_json[key][incremental_index])
+                aggregated_json[key] = \
+                    aggregated_json[key][:JSON_RESULTS_MAX_BUILDS]
+            else:
+                aggregated_json[key] = incremental_json[key]
+
+    @classmethod
+    def _merge_tests(cls, aggregated_json, incremental_json):
+        """Merge "tests" properties:results, times.
+
+        Args:
+            aggregated_json: aggregated json object.
+            incremental_json: incremental json object.
+        """
+
+        for test_name in incremental_json:
+            incremental_test = incremental_json[test_name]
+            if test_name in aggregated_json:
+                aggregated_test = aggregated_json[test_name]
+                cls._insert_item_run_length_encoded(
+                    incremental_test[JSON_RESULTS_RESULTS],
+                    aggregated_test[JSON_RESULTS_RESULTS])
+                cls._insert_item_run_length_encoded(
+                    incremental_test[JSON_RESULTS_TIMES],
+                    aggregated_test[JSON_RESULTS_TIMES])
+            else:
+                aggregated_json[test_name] = incremental_test
+
+    @classmethod
+    def _insert_item_run_length_encoded(cls, incremental_item, aggregated_item):
+        """Inserts the incremental run-length encoded results into the aggregated
+           run-length encoded results.
+
+        Args:
+            incremental_item: incremental run-length encoded results.
+            aggregated_item: aggregated run-length encoded results.
+        """
+
+        for item in incremental_item:
+            if len(aggregated_item) and item[1] == aggregated_item[0][1]:
+                aggregated_item[0][0] = min(
+                    aggregated_item[0][0] + item[0], JSON_RESULTS_MAX_BUILDS)
+            else:
+                # The test item values need to be summed from continuous runs.
+                # If there is an older item (not most recent one) whose value is
+                # same as the one to insert, then we should remove the old item
+                # from aggregated list.
+                for i in reversed(range(1, len(aggregated_item))):
+                    if item[1] == aggregated_item[i][1]:
+                        aggregated_item.pop(i)
+
+                aggregated_item.insert(0, item)
+
+    @classmethod
+    def _check_json(cls, builder, json):
+        """Check whether the given json is valid.
+
+        Args:
+            builder: builder name this json is for.
+            json: json object to check.
+
+        Returns:
+            True if the json is valid or
+            False otherwise.
+        """
+
+        version = json[JSON_RESULTS_VERSION_KEY]
+        if version > JSON_RESULTS_VERSION:
+            logging.error("Results JSON version '%s' is not supported.",
+                version)
+            return False
+
+        if not builder in json:
+            logging.error("Builder '%s' is not in json results.", builder)
+            return False
+
+        results_for_builder = json[builder]
+        if not JSON_RESULTS_BUILD_NUMBERS in results_for_builder:
+            logging.error("Missing build number in json results.")
+            return False
+
+        return True
+
+    @classmethod
+    def merge(cls, builder, aggregated, incremental, sort_keys=False):
+        """Merge incremental json file data with aggregated json file data.
+
+        Args:
+            builder: builder name.
+            aggregated: aggregated json file data.
+            incremental: incremental json file data.
+            sort_key: whether or not to sort key when dumping json results.
+
+        Returns:
+            Merged json file data if merge succeeds or
+            None on failure.
+        """
+
+        if not incremental:
+            logging.warning("Nothing to merge.")
+            return None
+
+        logging.info("Loading incremental json...")
+        incremental_json = cls._load_json(incremental)
+        if not incremental_json:
+            return None
+
+        logging.info("Checking incremental json...")
+        if not cls._check_json(builder, incremental_json):
+            return None
+
+        logging.info("Loading existing aggregated json...")
+        aggregated_json = cls._load_json(aggregated)
+        if not aggregated_json:
+            return incremental
+
+        logging.info("Checking existing aggregated json...")
+        if not cls._check_json(builder, aggregated_json):
+            return incremental
+
+        logging.info("Merging json results...")
+        try:
+            if not cls._merge_json(
+                aggregated_json[builder],
+                incremental_json[builder]):
+                return None
+        except Exception, err:
+            logging.error("Failed to merge json results: %s", str(err))
+            return None
+
+        aggregated_json[JSON_RESULTS_VERSION_KEY] = JSON_RESULTS_VERSION
+
+        return cls._generate_file_data(aggregated_json, sort_keys)
+
+    @classmethod
+    def update(cls, builder, test_type, incremental):
+        """Update datastore json file data by merging it with incremental json
+           file.
+
+        Args:
+            builder: builder name.
+            test_type: type of test results.
+            incremental: incremental json file data to merge.
+
+        Returns:
+            TestFile object if update succeeds or
+            None on failure.
+        """
+
+        files = TestFile.get_files(builder, test_type, JSON_RESULTS_FILE)
+        if files:
+            file = files[0]
+            new_results = cls.merge(builder, file.data, incremental)
+        else:
+            # Use the incremental data if there is no aggregated file to merge.
+            file = TestFile()
+            file.builder = builder
+            file.name = JSON_RESULTS_FILE
+            new_results = incremental
+            logging.info("No existing json results, incremental json is saved.")
+
+        if not new_results:
+            return None
+
+        if not file.save(new_results):
+            return None
+
+        return file
diff --git a/WebKitTools/TestResultServer/model/jsonresults_unittest.py b/WebKitTools/TestResultServer/model/jsonresults_unittest.py
new file mode 100755
index 0000000..fd646c8
--- /dev/null
+++ b/WebKitTools/TestResultServer/model/jsonresults_unittest.py
@@ -0,0 +1,256 @@
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from jsonresults import JsonResults
+
+JSON_RESULTS_TEMPLATE = (
+    '{"Webkit":{'
+    '"allFixableCount":[[TESTDATA_COUNT]],'
+    '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],'
+    '"chromeRevision":[[TESTDATA_CHROMEREVISION]],'
+    '"deferredCounts":[[TESTDATA_COUNTS]],'
+    '"fixableCount":[[TESTDATA_COUNT]],'
+    '"fixableCounts":[[TESTDATA_COUNTS]],'
+    '"secondsSinceEpoch":[[TESTDATA_TIMES]],'
+    '"tests":{[TESTDATA_TESTS]},'
+    '"webkitRevision":[[TESTDATA_WEBKITREVISION]],'
+    '"wontfixCounts":[[TESTDATA_COUNTS]]'
+    '},'
+    '"version":3'
+    '}')
+
+JSON_RESULTS_COUNTS_TEMPLATE = (
+    '{'
+    '"C":[TESTDATA],'
+    '"F":[TESTDATA],'
+    '"I":[TESTDATA],'
+    '"O":[TESTDATA],'
+    '"P":[TESTDATA],'
+    '"T":[TESTDATA],'
+    '"X":[TESTDATA],'
+    '"Z":[TESTDATA]}')
+
+JSON_RESULTS_TESTS_TEMPLATE = (
+    '"[TESTDATA_TEST_NAME]":{'
+    '"results":[[TESTDATA_TEST_RESULTS]],'
+    '"times":[[TESTDATA_TEST_TIMES]]}')
+
+JSON_RESULTS_PREFIX = "ADD_RESULTS("
+JSON_RESULTS_SUFFIX = ");"
+
+
+class JsonResultsTest(unittest.TestCase):
+    def setUp(self):
+        self._builder = "Webkit"
+
+    def _make_test_json(self, test_data):
+        if not test_data:
+            return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX
+
+        (builds, tests) = test_data
+        if not builds or not tests:
+            return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX
+
+        json = JSON_RESULTS_TEMPLATE
+
+        counts = []
+        build_numbers = []
+        webkit_revision = []
+        chrome_revision = []
+        times = []
+        for build in builds:
+            counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build))
+            build_numbers.append("1000%s" % build)
+            webkit_revision.append("2000%s" % build)
+            chrome_revision.append("3000%s" % build)
+            times.append("100000%s000" % build)
+
+        json = json.replace("[TESTDATA_COUNTS]", ",".join(counts))
+        json = json.replace("[TESTDATA_COUNT]", ",".join(builds))
+        json = json.replace("[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers))
+        json = json.replace("[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision))
+        json = json.replace("[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision))
+        json = json.replace("[TESTDATA_TIMES]", ",".join(times))
+
+        json_tests = []
+        for test in tests:
+            t = JSON_RESULTS_TESTS_TEMPLATE.replace("[TESTDATA_TEST_NAME]", test[0])
+            t = t.replace("[TESTDATA_TEST_RESULTS]", test[1])
+            t = t.replace("[TESTDATA_TEST_TIMES]", test[2])
+            json_tests.append(t)
+
+        json = json.replace("[TESTDATA_TESTS]", ",".join(json_tests))
+
+        return JSON_RESULTS_PREFIX + json + JSON_RESULTS_SUFFIX
+
+    def _test_merge(self, aggregated_data, incremental_data, expected_data):
+        aggregated_results = self._make_test_json(aggregated_data)
+        incremental_results = self._make_test_json(incremental_data)
+        merged_results = JsonResults.merge(self._builder,
+            aggregated_results, incremental_results, sort_keys=True)
+
+        if expected_data:
+            expected_results = self._make_test_json(expected_data)
+            self.assertEquals(merged_results, expected_results)
+        else:
+            self.assertFalse(merged_results)
+
+    def test(self):
+        # Empty incremental results json.
+        # Nothing to merge.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            # Incremental results
+            None,
+            # Expect no merge happens.
+            None)
+
+        # No actual incremental test results (only prefix and suffix) to merge.
+        # Nothing to merge.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            # Incremental results
+            ([], []),
+            # Expected no merge happens.
+            None)
+
+        # No existing aggregated results.
+        # Merged results == new incremental results.
+        self._test_merge(
+            # Aggregated results
+            None,
+            # Incremental results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            # Expected results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]))
+
+        # Single test for single run.
+        # Incremental results has the latest build and same test results for
+        # that run.
+        # Insert the incremental results at the first place and sum number
+        # of runs for "P" (200 + 1) to get merged results.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            # Incremental results
+            (["3"], [["001.html", "[1,\"P\"]", "[1,\"0\"]"]]),
+            # Expected results
+            (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[201,\"0\"]"]]))
+
+        # Single test for single run.
+        # Incremental results has the latest build but different test results
+        # for that run.
+        # Insert the incremental results at the first place.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            # Incremental results
+            (["3"], [["001.html", "[1, \"I\"]", "[1,\"1\"]"]]),
+            # Expected results
+            (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"P\"]", "[1,\"1\"],[200,\"0\"]"]]))
+
+        # Single test for single run.
+        # Incremental results has the latest build but different test results
+        # for that run.
+        # The test "results" and "times" need to be continuous, so the old
+        # [10,"I"] result should be dropped because a new result of same type [1,"I"]
+        # is inserted in front of [200,"P"].
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"],[10,\"I\"]", "[200,\"0\"],[10,\"1\"]"]]),
+            # Incremental results
+            (["3"], [["001.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+            # Expected results
+            (["3", "2", "1"], [["001.html", "[1,\"I\"],[200,\"P\"]", "[1,\"1\"],[200,\"0\"]"]]))
+
+        # Multiple tests for single run.
+        # All tests have incremental updates.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[100,\"I\"]", "[100,\"1\"]"]]),
+            # Incremental results
+            (["3"], [["001.html", "[1,\"P\"]", "[1,\"0\"]"], ["002.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+            # Expected results
+            (["3", "2", "1"], [["001.html", "[201,\"P\"]", "[201,\"0\"]"], ["002.html", "[101,\"I\"]", "[101,\"1\"]"]]))
+
+        # Multiple tests for single run.
+        # Not all tests have update.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[100,\"I\"]", "[100,\"1\"]"]]),
+            # Incremental results
+            (["3"], [["002.html", "[1,\"I\"]", "[1,\"1\"]"]]),
+            # Expected results
+            (["3", "2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[101,\"I\"]", "[101,\"1\"]"]]))
+
+        # Single test for multiple runs.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            # Incremental results
+            (["4", "3"], [["001.html", "[2, \"I\"]", "[2,\"2\"]"]]),
+            # Expected results
+            (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"P\"]", "[2,\"2\"],[200,\"0\"]"]]))
+
+        # Multiple tests for multiple runs.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"], ["002.html", "[10,\"Z\"]", "[10,\"0\"]"]]),
+            # Incremental results
+            (["4", "3"], [["001.html", "[2, \"I\"]", "[2,\"2\"]"], ["002.html", "[1,\"C\"]", "[1,\"1\"]"]]),
+            # Expected results
+            (["4", "3", "2", "1"], [["001.html", "[2,\"I\"],[200,\"P\"]", "[2,\"2\"],[200,\"0\"]"], ["002.html", "[1,\"C\"],[10,\"Z\"]", "[1,\"1\"],[10,\"0\"]"]]))
+
+        # Test the build in incremental results is older than the most recent
+        # build in aggregated results.
+        # The incremental results should be dropped and no merge happens.
+        self._test_merge(
+            # Aggregated results
+            (["3", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            # Incremental results
+            (["2"], [["001.html", "[1, \"P\"]", "[1,\"0\"]"]]),
+            # Expected no merge happens.
+            None)
+
+        # Test the build in incremental results is same as the build in
+        # aggregated results.
+        # The incremental results should be dropped and no merge happens.
+        self._test_merge(
+            # Aggregated results
+            (["2", "1"], [["001.html", "[200,\"P\"]", "[200,\"0\"]"]]),
+            # Incremental results
+            (["3", "2"], [["001.html", "[2, \"P\"]", "[2,\"0\"]"]]),
+            # Expected no merge happens.
+            None)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/WebKitTools/TestResultServer/model/testfile.py b/WebKitTools/TestResultServer/model/testfile.py
index 35ab967..ce92b65 100644
--- a/WebKitTools/TestResultServer/model/testfile.py
+++ b/WebKitTools/TestResultServer/model/testfile.py
@@ -3,7 +3,7 @@
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
-# 
+#
 #     * Redistributions of source code must retain the above copyright
 # notice, this list of conditions and the following disclaimer.
 #     * Redistributions in binary form must reproduce the above
@@ -13,7 +13,7 @@
 #     * Neither the name of Google Inc. nor the names of its
 # contributors may be used to endorse or promote products derived from
 # this software without specific prior written permission.
-# 
+#
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
@@ -29,16 +29,14 @@
 from datetime import datetime
 import logging
 
-from google.appengine.ext import blobstore
 from google.appengine.ext import db
 
+from model.datastorefile import DataStoreFile
 
-class TestFile(db.Model):
+
+class TestFile(DataStoreFile):
     builder = db.StringProperty()
-    name = db.StringProperty()
     test_type = db.StringProperty()
-    blob_key = db.StringProperty()
-    date = db.DateTimeProperty(auto_now_add=True)
 
     @classmethod
     def delete_file(cls, key, builder, test_type, name, limit):
@@ -63,7 +61,7 @@ class TestFile(db.Model):
         return True
 
     @classmethod
-    def get_files(cls, builder, test_type, name, limit):
+    def get_files(cls, builder, test_type, name, load_data=True, limit=1):
         query = TestFile.all()
         if builder:
             query = query.filter("builder =", builder)
@@ -72,51 +70,54 @@ class TestFile(db.Model):
         if name:
             query = query.filter("name =", name)
 
-        return query.order("-date").fetch(limit)
+        files = query.order("-date").fetch(limit)
+        if load_data:
+            for file in files:
+                file.load_data()
+
+        return files
 
     @classmethod
-    def add_file(cls, builder, test_type, blob_info):
+    def add_file(cls, builder, test_type, name, data):
         file = TestFile()
         file.builder = builder
         file.test_type = test_type
-        file.name = blob_info.filename
-        file.blob_key = str(blob_info.key())
-        file.put()
+        file.name = name
+
+        if not file.save(data):
+            return None
 
         logging.info(
-            "File saved, builder: %s, test_type: %s, name: %s, blob key: %s.",
-            builder, test_type, file.name, file.blob_key)
+            "File saved, builder: %s, test_type: %s, name: %s, key: %s.",
+            builder, test_type, file.name, str(file.data_keys))
 
         return file
 
     @classmethod
-    def update_file(cls, builder, test_type, blob_info):
-        files = cls.get_files(builder, test_type, blob_info.filename, 1)
+    def update(cls, builder, test_type, name, data):
+        files = cls.get_files(builder, test_type, name)
         if not files:
-            return cls.add_file(builder, test_type, blob_info)
+            return cls.add_file(builder, test_type, name, data)
 
         file = files[0]
-        old_blob_info = blobstore.BlobInfo.get(file.blob_key)
-        if old_blob_info:
-            old_blob_info.delete()
-
-        file.builder = builder
-        file.test_type = test_type
-        file.name = blob_info.filename
-        file.blob_key = str(blob_info.key())
-        file.date = datetime.now()
-        file.put()
+        if not file.save(data):
+            return None
 
         logging.info(
-            "File replaced, builder: %s, test_type: %s, name: %s, blob key: %s.",
-            builder, test_type, file.name, file.blob_key)
+            "File replaced, builder: %s, test_type: %s, name: %s, data key: %s.",
+            builder, test_type, file.name, str(file.data_keys))
 
         return file
 
-    def _delete_all(self):
-        if self.blob_key:
-            blob_info = blobstore.BlobInfo.get(self.blob_key)
-            if blob_info:
-                blob_info.delete()
+    def save(self, data):
+        if not self.save_data(data):
+            return False
+
+        self.date = datetime.now()
+        self.put()
 
+        return True
+
+    def _delete_all(self):
+        self.delete_data()
         self.delete()
diff --git a/WebKitTools/TestResultServer/templates/uploadform.html b/WebKitTools/TestResultServer/templates/uploadform.html
index 933f9f5..3506c9c 100644
--- a/WebKitTools/TestResultServer/templates/uploadform.html
+++ b/WebKitTools/TestResultServer/templates/uploadform.html
@@ -18,6 +18,9 @@
         <td><input class=inputtext type="text" name="testtype" value=""/></td>
     </tr>
     </table>
+    <br>
+    <div><input class=button type="checkbox" name="incremental">Incremental results, merge with server file.</div>
+    <br>
     <div><input class=button type="file" name="file" multiple></div>
     <br>
     <div><input class=button type="submit" value="Upload"></div>

-- 
WebKit Debian packaging



More information about the Pkg-webkit-commits mailing list