[apt-proxy-devel] r607 - in people/halls/rework: . apt_proxy apt_proxy/test

Chris Halls halls at costa.debian.org
Tue May 30 15:27:21 UTC 2006


Author: halls
Date: Tue May 30 15:27:19 2006
New Revision: 607

Removed:
   people/halls/rework/debian/TODO
Modified:
   people/halls/rework/apt_proxy/apt_proxy.py
   people/halls/rework/apt_proxy/apt_proxy_conf.py
   people/halls/rework/apt_proxy/cache.py
   people/halls/rework/apt_proxy/fetchers.py
   people/halls/rework/apt_proxy/test/test_requests.py
   people/halls/rework/debian/changelog
   people/halls/rework/runtests

Log:
* Fix pipelining and remove disabling code
* Remove obsolete debian/TODO
* Add some code to test http client pipelining, not
  fully used because twisted web does not support pipelining
  currently


Modified: people/halls/rework/apt_proxy/apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy.py	(original)
+++ people/halls/rework/apt_proxy/apt_proxy.py	Tue May 30 15:27:19 2006
@@ -207,6 +207,7 @@
     cacheEntry = None        # Cache entry for file requested
     
     def __init__(self, channel, queued):
+        log.debug("New Request, queued=%s" % (queued),'Request');
         self.factory=channel.factory
         http.Request.__init__(self, channel, queued)
 
@@ -232,10 +233,6 @@
         log.debug("Request: %s %s backend=%s uri=%s"
                     % (self.method, self.uri, backendName, self.uri),'Request')
 
-        if self.factory.config.disable_pipelining:
-            #self.setHeader('Connection','close')
-            self.channel.persistent = 0
-
         if self.method != 'GET':
             #we currently only support GET
             log.debug("abort - method not implemented", 'Request')
@@ -301,9 +298,6 @@
         "Finish request after streaming"
         log.debug("finish. Queued: %s" % (self.queued) , 'Request')
         http.Request.finish(self)
-        #if self.factory.config.disable_pipelining:
-            #if hasattr(self.transport, 'loseConnection'):
-                #self.transport.loseConnection()
 
         if self.cacheEntry:
             self.cacheEntry.remove_request(self)
@@ -347,13 +341,16 @@
     def connectionLost(self, reason=None):
         "If the connection is lost, notify all my requests"
         __pychecker__ = 'unusednames=reason'
-        for req in self.requests:
-            req.connectionLost()
         log.debug("Client connection closed", 'Channel')
+        http.HTTPChannel.connectionLost(self, reason)
         if log.isEnabled('memleak'):
             memleak.print_top_10()
         #reactor.stop()   # use for shutting down apt-proxy when a client disconnects
 
+    #def requestDone(self, request):
+        #log.debug("========Request Done=========", 'Channel')
+        #http.HTTPChannel.requestDone(self, request)
+        
 class Factory(protocol.ServerFactory):
     """
     This is the center of apt-proxy, it holds all configuration and global data
@@ -423,7 +420,7 @@
         self.configurationChanged()
         self.dumpdbs()
         self.recycler = MirrorRecycler(self, 1)
-        self.recycler.start()
+        #self.recycler.start()
 
     def configurationChanged(self, oldconfig = None):
         """

Modified: people/halls/rework/apt_proxy/apt_proxy_conf.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy_conf.py	(original)
+++ people/halls/rework/apt_proxy/apt_proxy_conf.py	Tue May 30 15:27:19 2006
@@ -94,7 +94,6 @@
         ['max_versions', 3, '*int'],
         ['max_age', 10, '*time'],
         ['import_dir', '/var/cache/apt-proxy/import', 'string'],
-        ['disable_pipelining', '0', 'boolean'],
         ['passive_ftp', 'on', 'boolean'],
         ['dynamic_backends', 'on', 'boolean'],
         ['http_proxy', None , 'proxyspec'],

Modified: people/halls/rework/apt_proxy/cache.py
==============================================================================
--- people/halls/rework/apt_proxy/cache.py	(original)
+++ people/halls/rework/apt_proxy/cache.py	Tue May 30 15:27:19 2006
@@ -237,7 +237,7 @@
                 log.debug("Sending file to clients:%s size:%s" % (filename, size), 'CacheEntry')
                 self.streamfile = open(filename,'rb')
                 #fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
-        
+
                 for request in self.requests:
                     if request.start_streaming(size, mtime):
                         basic.FileSender().beginFileTransfer(self.streamfile, request) \

Modified: people/halls/rework/apt_proxy/fetchers.py
==============================================================================
--- people/halls/rework/apt_proxy/fetchers.py	(original)
+++ people/halls/rework/apt_proxy/fetchers.py	Tue May 30 15:27:19 2006
@@ -108,9 +108,12 @@
         Download was successful
         """
         log.debug("download complete. Sent:%s bytes" % (self.len_received), "Fetcher")
+        if not self.fetcher.pipelining:
+            self.connection_closed(self.fetcher)
+        if self.len_received==0:
+            self.download_started() # Send status code to clients
         self.cacheEntry.download_data_end()
         self.deferred.callback((True, ""))
-        #self.fetcher = None
 
     def fail_over(self, reason_code, reason_msg):
         """
@@ -123,6 +126,8 @@
     def download_failed(self, reason_code, reason_msg):
         #self.cacheEntry.download_data_end()
         log.debug("download_failed: (%s) %s " %(reason_code, reason_msg), "Fetcher")
+        if not self.fetcher.pipelining:
+            self.connection_closed(self.fetcher)
         self.cacheEntry.download_failure(reason_code, reason_msg)
         self.deferred.callback((False, reason_msg))
 
@@ -139,14 +144,18 @@
         @param data: raw data received from server
         @param save: if true, save to disk (rsync saves file itself)
         """
-        if self.len_received == 0:
-            if save:
-                self.cacheEntry.init_tempfile()
-            self.cacheEntry.download_started(self, self.size, self.mtime)
+        #log.debug("data_received: %s bytes" % len(data), 'Fetcher');
+        if not self.len_received:
+            self.download_started(save)
         self.len_received = self.len_received + len(data)
-
         self.cacheEntry.download_data_received(data)
 
+    def download_started(self, save=True):
+        if save:
+            self.cacheEntry.init_tempfile()
+        self.cacheEntry.download_started(self, self.size, self.mtime)
+
+
     def server_size(self, len):
         """
         The server has sent the expected length of the file
@@ -196,13 +205,17 @@
             self.fetcher.disconnect()
             self.fetcher = None
 
-    def connection_closed(self):
+    def connection_closed(self, fetcher):
         """
         A protocol fetcher's connection has closed - we must reopen the connection
         next time
         """
-        log.debug("Connection closed", 'Fetcher')
-        self.fetcher = None
+        log.debug("Connection closed for %s, state=%s" %(self.cacheEntry.path, self.cacheEntry.state), 'Fetcher')
+        #if self.cacheEntry.state in \
+        #   (self.cacheEntry.STATE_CONNECTING, self.cacheEntry.STATE_DOWNLOAD, self.cacheEntry.STATE_SENDFILE):
+        #    self.fetcher_internal_error("Backend connection closed")
+        if fetcher == self.fetcher:
+            self.fetcher = None
 
     def file_not_found(self):
         log.msg("(%s) file not found: %s" % (self.backendServer.path, self.cacheEntry.path), 'fetcher')
@@ -224,13 +237,17 @@
         Fetcher has determined that our cached file is up to date
         so the file is sent from our cache
         """
+        log.msg("(%s) up_to_date" % (self.cacheEntry.path), 'fetcher')
         self.cacheEntry.send_cached_file()
+        if not self.fetcher.pipelining:
+            self.connection_closed(self.fetcher)
         self.deferred.callback((True, ""))
 
 class FileFetcher:
     """
     A Fetcher that simply copies files from disk
     """
+    pipelining = True
     def __init__(self, backendServer):
         self.backendServer = backendServer
         self.isConnected = True # Always connected
@@ -270,11 +287,7 @@
     def __init__(self, parent):
         self.parent = parent # HttpFetcher
         self.proxy = self.parent.proxy
-        self.log_headers = None
         self.fetcher = None
-        self.close_on_completion = False
-        self.server_mtime = None
-        self.server_size = None
 
     def connectionMade(self):
         """
@@ -285,8 +298,13 @@
 
     def download(self, fetcher, uri, mtime):
         # Request file from backend
+        self.log_headers = None
+        self.close_on_completion = True
+        self.server_mtime = None
+        self.server_size = None
         self.fetcher = fetcher
         self.uri = uri
+        self.finished = False
         backendServer = self.parent.backendServer
         if self.proxy is None:
             serverpath = backendServer.path
@@ -310,6 +328,18 @@
 
         self.endHeaders()
 
+    def download_complete(self):
+        if self.finished: 
+            return
+        log.debug("File transfer complete",'http_client')
+        self.finished = True
+        #if self.close_on_completion:
+            #self.fetcher.disconnect()
+            #self.parent.connection_closed() # We don't have a persistent connection
+            #self.fetcher.disconnect()
+            #self.transport.loseConnection()
+        self.fetcher.download_complete()
+
     def handleStatus(self, version, code, message):
         __pychecker__ = 'unusednames=version,message'
         log.debug('handleStatus %s - %s' % (code, message), 'http_client')
@@ -318,16 +348,20 @@
         #self.setResponseCode(self.http_status)
 
     def handleResponse(self, buffer):
-        log.debug('handleResponse, %s bytes' % (len(buffer)), 'http_client')
+        #log.debug('handleResponse, %s bytes' % (len(buffer)), 'http_client')
+        log.debug('handleResponse status=%s' % (self.http_status), 'http_client')
         if self.http_status == http.NOT_MODIFIED:
             log.debug("Backend server reported file is not modified: " + self.uri,'http_client')
             self.fetcher.up_to_date()
         elif self.http_status == http.NOT_FOUND:
             log.debug("Not found on backend server",'http_client')
             self.fetcher.file_not_found()
+        elif self.http_status == http.OK:
+            self.download_complete()
         else:
             log.debug("Unknown status code: %s" % (self.http_status),'http_client')
-    
+            self.fetcher.fetcher_internal_error("Unknown status code: %s" % (self.http_status))
+
     def handleHeader(self, key, value):
 
         log.debug("Received: " + key + " " + str(value), 'http_client')
@@ -343,6 +377,9 @@
             if value == "close":
                 log.debug('will close on completion', 'http_client')
                 self.close_on_completion = True
+            elif value == "keep-alive":
+                log.debug('will not close on completion', 'http_client')
+                self.close_on_completion = False
 
     #def handleEndHeaders(self):
         #if self.http_status == http.NOT_MODIFIED:
@@ -361,11 +398,8 @@
             if self.server_size is not None:
                 if self.fetcher.len_received >= self.server_size:
                     if self.fetcher.len_received == self.server_size:
-                        log.debug("File transfer complete",'http_client')
-                        self.fetcher.download_complete()
-                        if self.close_on_completion:
-                            self.parent.disconnect()
-                            #self.transport.loseConnection()
+                        pass
+                        #self.download_complete()
                     else:
                         log.err("File transfer overrun! Expected size:%s Received size:%s" % 
                                 (self.server_size, self.fetcher.len_received), 'http_client')
@@ -406,11 +440,18 @@
         "log and handle to the base class."
         log.debug(name + " sendHeader:" + value,'http_client')
         http.HTTPClient.sendHeader(self, name, value)
+        
+    def disconnect(self):
+        log.debug("DISCONNECT:",'http_client')
+        import traceback
+        traceback.print_stack()
+
 
 class HttpFetcher(protocol.ClientFactory):
     """
     A Fetcher factory that retrieves files via HTTP
     """
+    pipelining = False # twisted's HTTP client does not support pipelining
     def __init__(self, backendServer):
         self.backendServer = backendServer
         self.isConnected = False
@@ -448,9 +489,9 @@
         log.debug("clientConnectionFailed reason: %s" % (reason), "http-client")
         self.connectCallback.errback(reason)
     def clientConnectionLost(self, connector, reason):
-        log.debug("clientConnectionLost", "http-client")
+        log.debug("clientConnectionLost reason=%s" %(reason), "http-client")
         if self.connection is not None and self.connection.fetcher is not None:
-            self.connection.fetcher.connection_closed()
+            self.connection.fetcher.connection_closed(self)
 
     def download(self, fetcher, uri, mtime):
         """
@@ -481,6 +522,7 @@
     so the timeout specified for the backend may not always be used
     """
     
+    pipelining = True
     def __init__(self, backendServer):
         self.backendServer = backendServer
         self.isConnected = False
@@ -765,7 +807,7 @@
     rsync is run as an external process
     """
     rsyncCommand = '/usr/bin/rsync'
-
+    pipelining = False
     def __init__(self, backendServer):
         self.backendServer = backendServer
         self.rsyncProcess = None
@@ -894,7 +936,7 @@
                 # Tell clients, but data is already saved by rsync so don't
                 # write file again
                 self.parent.download_complete()
-                self.parent.connection_closed() # We don't have a persistent connection
+                #self.parent.connection_closed() # We don't have a persistent connection
 
     def processEnded(self, status_object):
         __pychecker__ = 'unusednames=reason'
@@ -1037,7 +1079,7 @@
     def downloadFinished(self, result):
         success, message = result
         if success:
-            log.debug("download complete", 'DownloadQueue')
+            log.debug("download complete: %s" % (self.activeFile.cache_path), 'DownloadQueue')
         else:
             log.debug("download failed: %s" % (message), 'DownloadQueue')
         self.activeFile = None
@@ -1045,8 +1087,8 @@
 
     def startNextDownload(self):
         if len(self.queue)>0:
-            log.debug("start next download", 'DownloadQueue')
             self.activeFile = self.queue[0]
+            log.debug("start next download (%s)" % (self.activeFile.cache_path), 'DownloadQueue')
             self.queue = self.queue[1:]
 
             if self.fetcher is not None:

Modified: people/halls/rework/apt_proxy/test/test_requests.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_requests.py	(original)
+++ people/halls/rework/apt_proxy/test/test_requests.py	Tue May 30 15:27:19 2006
@@ -32,91 +32,127 @@
 from apt_proxy.test.test_fetchers import RsyncServer
 from apt_proxy.test.test_apt_proxy import apTestHelper
 
+class uriData:
+    """
+    class containing test data for a request
+    """
+    def __init__(self, filename, expectedResponse, if_modified_since=None, expectedSize=None, filePath=None):
+        self.filename = filename
+        self.expectedResponse = expectedResponse
+        self.if_modified_since = if_modified_since
+        self.filePath = filePath
+
+        if expectedSize is not None:
+            self.expectedSize = expectedSize # If not none, the file sent should have this size
+        elif filePath is not None:
+            # Get size of actual file
+            self.expectedSize = os.path.getsize(filePath)
+        else:
+            self.expectedSize = None
+
 class uriRequester(http.HTTPClient):
     """
-    Helper class to request files and parse responses
+    class to request files and parse responses
     """
-    def __init__(self, factory):
-        self.factory = factory
+    class requestFactory(protocol.ClientFactory):
+        """
+        Helper factory to connect to apt-proxy and send
+        HTTP requests using uriRequester
+        """
+        #def startedConnecting(self, connector):
+        #    print 'Started to connect.'
+        def __init__(self, request):
+            self.request = request
+        def buildProtocol(self, addr):
+            "Pass incoming connection to our request"
+            return self.request
+        def clientConnectionLost(self, connector, reason):
+            log.debug('Lost connection.  Reason:'+ str(reason))
+        def clientConnectionFailed(self, connector, reason):
+            log.err('Connection failed. Reason:', reason, 'requestFactory')
+            self.request.failed()
+
+    def __init__(self, host, *testData):
+        """
+        Start test. *testData holds classes of uriData
+        """
+        self.factory = self.requestFactory(self)
+        self.host = host
+        self.deferred = defer.Deferred() # Deferred that returns result of test
         self.http_status = None
         self.received_len = 0
+        self.tests=testData
 
     def connectionMade(self):
         """
         Http connection made
         """
-        log.debug("connection made to test apt-proxy server. requesting: " + self.factory.filename
-                   , 'uriRequesterTest')
-        self.sendCommand("GET", self.factory.filename)
-        self.sendHeader('host', self.factory.host)
-        if self.factory.if_modified_since is not None:
-            datetime = http.datetimeToString(self.factory.if_modified_since)
-            self.sendHeader('if-modified-since', datetime)
-        self.endHeaders()
-
+        log.debug("connection made to test apt-proxy server", 'uriRequester')
+        for i in range(0,len(self.tests)):
+            test = self.tests[i]
+            log.debug("requesting: %s" %(test.filename), 'uriRequester')
+            #self.sendCommand("GET", test.filename)
+            self.transport.write('%s %s HTTP/1.1\r\n' % ("GET", test.filename))
+
+            self.sendHeader('Host', self.host)
+            if len(self.tests)>1 and i != len(self.tests):
+                self.sendHeader('Connection','keep-alive')
+            else:
+                self.sendHeader('Connection','close')
+            if test.if_modified_since is not None:
+                datetime = http.datetimeToString(test.if_modified_since)
+                self.sendHeader('if-modified-since', datetime)
+            self.sendHeader("User-Agent", "apt-proxy test suite test_requests.py")
+
+            self.endHeaders()
+        self.getNextTest() # Start first test
+
+    def getNextTest(self):
+        # Ready for next status code
+        self.firstLine = 1 
+        #self.length = None
+        self.__buffer = ''
+
+        if len(self.tests):
+            self.nextTest = self.tests[0]
+            self.tests = self.tests[1:]
+            log.debug("waiting for test results for: " + self.nextTest.filename, 'uriRequester')
+        else:
+            log.debug('test passed', 'uriRequester')
+            self.deferred.callback(None)
+            
     #def handleStatus(self, version, code, message):
     def handleStatus(self, version, code, message):
         log.debug('handleStatus: (%s) %s - %s, expected:%s' % 
-                   (version, code, message, self.factory.expectedResponse), 'uriRequesterTest')
+                   (version, code, message, self.nextTest.expectedResponse), 'uriRequester')
         self.http_status = int(code)
 
     def dataReceived(self, data):
         self.received_len = self.received_len + len(data)
-        log.debug("data received, len: %s" % (self.received_len), 'uriRequesterTest')
+        log.debug("data received, len: %s" % (self.received_len), 'uriRequester')
         http.HTTPClient.dataReceived(self, data)
         
     def handleResponse(self, buffer):
         received_len = len(buffer)
-        log.debug('data received: %s bytes, expected:%s' % (received_len, self.factory.expectedSize), 'uriRequesterTest')
-        if self.http_status != self.factory.expectedResponse:
+        log.debug('data received: %s bytes, expected:%s' % (received_len, self.nextTest.expectedSize), 'uriRequester')
+        if self.http_status != self.nextTest.expectedResponse:
             log.debug('test FAILED: response code (%s) is not %s' % 
-                       (self.http_status, self.factory.expectedResponse), 'uriRequesterTest')
-            self.factory.failed()
-        elif self.factory.expectedSize is not None and received_len != self.factory.expectedSize:
+                       (self.http_status, self.nextTest.expectedResponse), 'uriRequester')
+            self.failed()
+        elif self.nextTest.expectedSize is not None and received_len != self.nextTest.expectedSize:
             log.debug('test FAILED: received %s bytes, but expected %s' % 
-                    (received_len, self.factory.expectedSize), 'uriRequesterTest')
-            self.factory.failed()
+                    (received_len, self.nextTest.expectedSize), 'uriRequester')
+            self.failed()
         else:
-            self.factory.passed()
-
-class requestFactory(protocol.ClientFactory):
-    """
-    Helper factory to connect to apt-proxy and send
-    HTTP requests using uriRequester
-    """
-    def __init__(self, filename, host, expectedResponse, if_modified_since=None, expectedSize=None):
-        self.filename = filename
-        self.host = host
-        self.testDone = False
-        self.testPassed = False
-        self.timedOut = False
-        self.expectedResponse = expectedResponse
-        self.if_modified_since = if_modified_since
-        self.expectedSize = expectedSize # If not none, the file sent should have this size
-        self.deferred = defer.Deferred() # Deferred that returns result of test
-    #def startedConnecting(self, connector):
-    #    print 'Started to connect.'
-    def buildProtocol(self, addr):
-        p = uriRequester(self)
-        p.factory = self
-        self.protocol = p
-        return p
-    def clientConnectionLost(self, connector, reason):
-        log.debug('Lost connection.  Reason:'+ str(reason))
-        self.testDone = True
-        #self.passed()
-
-    def clientConnectionFailed(self, connector, reason):
-        log.err('Connection failed. Reason:', reason, 'requestFactory')
-        self.failed()
+            self.passed()
 
     def passed(self):
-        log.debug('test passed', 'requestFactory')
-        self.deferred.callback(None)
+        self.getNextTest()
     def failed(self):
-        log.debug('test failed', 'requestFactory')
+        log.debug('test failed', 'uriRequester')
         self.deferred.errback(failure.Failure())
 
+
 class TestRequestHelper(apTestHelper):
     def setUp(self, config):
         apTestHelper.setUp(self)
@@ -134,21 +170,18 @@
         apTestHelper.tearDown(self)
         self.assertRaises(OSError, os.stat, self.cache_dir)
 
-    def doRequest(self, file, responseCode, if_modified_since=None, filePath=None):
+    def doRequest(self, *data):
         portno = self.port.getHost().port
-        log.debug("Starting test connection to 127.0.0.1:%s, file:%s:" %(portno, file), 'uriRequesterTest')
-        if filePath is not None:
-            # Get size of actual file
-            expectedSize = os.path.getsize(filePath)
-        else:
-            expectedSize = None
-        clientFactory = requestFactory(file, "127.0.0.1:%s"% (portno), responseCode, if_modified_since, expectedSize)
-        connection = reactor.connectTCP("127.0.0.1", portno, clientFactory)
+        host = "127.0.0.1:%s" % (portno)
+        for d in data:
+            log.debug("Starting test connection to %s, file:%s:" %(host, d.filename), 'uriRequesterTest')
+        client = uriRequester(host, *data)
+        connection = reactor.connectTCP("127.0.0.1", portno, client.factory)
         self.connection = connection
 
-        clientFactory.deferred.addBoth(lambda x: connection.disconnect())
-        self.lastRequestFactory = clientFactory
-        return clientFactory.deferred
+        client.deferred.addBoth(lambda x: connection.disconnect())
+        self.lastRequestFactory = client
+        return client.deferred
 
 class FileBackendTest(TestRequestHelper):
     def setUp(self):
@@ -165,18 +198,22 @@
         TestRequestHelper.setUp(self, config)
 
     def testNotFound(self):
-        return self.doRequest('/files/test.gz', http.NOT_FOUND)
+        return self.doRequest(uriData('/files/test.gz', http.NOT_FOUND))
+    testNotFound.timeout = 1
     def testPackagesFile(self):
         file = 'Packages.gz'
-        return self.doRequest('/files/'+file, http.OK, filePath=self.filedir+os.sep+file).addCallback(self.PackagesFile2)
+        d = uriData('/files/'+file, http.OK, filePath=self.filedir+os.sep+file)
+        return self.doRequest(d).addCallback(self.PackagesFile2)
+    testPackagesFile.timeout = 1
     def PackagesFile2(self, x):
         backend = self.factory.getBackend('files')
         # Check that request was deleted from backend
         self.assertEquals(len(backend.entries), 0)
 
     def testForbidden(self):
-        d = self.doRequest('/notbackend/Release', http.NOT_FOUND)
+        d = self.doRequest(uriData('/notbackend/Release', http.NOT_FOUND))
         return d
+    testForbidden.timeout = 1
 
 class WebServer:
     def start(self):
@@ -221,25 +258,36 @@
         log.debug("tearDown", self.debugname)
         TestRequestHelper.tearDown(self)
     def testNotFound(self):
-        return self.doRequest('/test_data/NotHere.gz', http.NOT_FOUND)
+        return self.doRequest(uriData('/test_data/NotHere.gz', http.NOT_FOUND))
     testNotFound.timeout = 2
 
     def downloadFile(self, file='/packages/Packages.gz'):
         """
         Download a file to cache
         self.backend is set to backend name
-        self.file is set to filename
-        self.filepath is set to physical filename
+        self.file is set to last filename
+        self.filepath is set to last physical filename
         """
-        self.filename = '/' + self.backendName + file
-        self.filepath = self.cache_dir + self.filename
+        return self.downloadFiles(file)
 
-        # File should not be in cache
-        self.assertRaises(OSError, os.stat, self.filepath)
-        d = self.doRequest(self.filename, http.OK, filePath=self.testfilesdir+file)
+    def downloadFiles(self, *files):
+        """
+        Download a number of files to cache
+        """
+        data = []
+        self.filepaths = []
+        for f in files:
+            self.filename = '/' + self.backendName + f
+            filepath = self.cache_dir + self.filename
+            # File should not be in cache
+            self.assertRaises(OSError, os.stat, filepath)
+            self.filepaths.append(filepath)
+            data.append(uriData(self.filename, http.OK, filePath=self.testfilesdir+f))
+        d = self.doRequest(*data)
         def checkPath(x):
-            # Check that file was really placed in cache
-            os.stat(self.filepath) 
+            # Check that files were really placed in cache
+            for f in self.filepaths:
+                os.stat(f)
         d.addCallback(checkPath)
         return d
 
@@ -259,7 +307,7 @@
         return self.testResult
     def NotModifiedGreater2(self, x):
         log.debug("testNotModifiedGreater: starting second client", self.debugname)
-        d = self.doRequest(self.filename, http.NOT_MODIFIED, time.time())
+        d = self.doRequest(uriData(self.filename, http.NOT_MODIFIED, time.time()))
         d.chainDeferred(self.testResult)
     testNotModifiedGreater.timeout = 3
 
@@ -270,7 +318,7 @@
         d.addErrback(lambda x: self.testResult.errback(failure.Failure()))
         return self.testResult
     def NotModifiedExact2(self, x):
-        d = self.doRequest(self.filename, http.NOT_MODIFIED, os.path.getmtime(self.filepath))
+        d = self.doRequest(uriData(self.filename, http.NOT_MODIFIED, os.path.getmtime(self.filepath)))
         d.chainDeferred(self.testResult)
     testNotModifiedExact.timeout = 2
 
@@ -314,13 +362,13 @@
         d.addErrback(self.CachedError)
         return self.testResult
     def Cached2(self, x):
-        d = self.doRequest(self.filename, http.OK, filePath=self.filepath)
+        d = self.doRequest(uriData(self.filename, http.OK, filePath=self.filepaths[0]))
         d.addCallback(self.Cached3)
         d.addErrback(self.CachedError)
     def Cached3(self, x):
         log.debug("Downloading second copy", self.debugname)
         self.factory.config.min_refresh_delay = 0
-        d = self.doRequest(self.filename, http.OK, filePath=self.filepath)
+        d = self.doRequest(uriData(self.filename, http.OK, filePath=self.filepaths[0]))
         d.addCallback(self.CachedPass)
         d.addErrback(self.CachedError)
     def CachedPass(self, x):
@@ -336,6 +384,18 @@
         b.config.bandwidth_limit = 10000000
         # We're not testing here that limiting is applied, just that the code runs
         return self.downloadFile(file='/packages/apt_0.0.1_test.deb')
+    testBwLimit.timeout = 2
+
+    # This test does not work with current twisted http client :(
+    #def testPipeline(self):
+        #"Test pipelined GETs"
+        #return self.downloadFiles('/packages/Packages.gz', '/packages/Packages', '/packages/Packages.bz2')
+    #testPipeline.timeout = 2
+
+    def testEmpty(self):
+        "Test download of empty file"
+        return self.downloadFiles('/packages/empty.txt')
+    testEmpty.timeout = 2
 
     #def testTimeout(self):
         #pass

Modified: people/halls/rework/debian/changelog
==============================================================================
--- people/halls/rework/debian/changelog	(original)
+++ people/halls/rework/debian/changelog	Tue May 30 15:27:19 2006
@@ -10,8 +10,9 @@
     rates (Closes: #306095)
   * Add more unit tests
   * Add support for rsync port specification
+  * Remove obsolete debian/TODO from source package
 
- -- Chris Halls <halls at debian.org>  Fri,  5 May 2006 18:34:40 +0100
+ -- Chris Halls <halls at debian.org>  Thu, 25 May 2006 14:11:32 +0100
 
 apt-proxy (1.9.33) unstable; urgency=low
 

Modified: people/halls/rework/runtests
==============================================================================
--- people/halls/rework/runtests	(original)
+++ people/halls/rework/runtests	Tue May 30 15:27:19 2006
@@ -4,7 +4,7 @@
 
 testfiles="packages/Packages packages/Packages.gz packages/Packages.bz2
            verify/invalid-gzip.gz 
-	   packages/apt_0.0.1_test.deb
+	   packages/apt_0.0.1_test.deb packages/empty.txt
 	   apt/apt_0.0.1_test.deb apt/apt_0.0.2_test.deb apt/apt_0.0.3_test.deb"
 rootdir="$(cd $(dirname $0); pwd)"
 testdir="$rootdir/test_data"
@@ -60,6 +60,7 @@
   echo Creating Packages file for package
   dpkg-scanpackages . /dev/null | tee Packages | gzip -c > Packages.gz
   bzip2 -c < Packages > Packages.bz2
+  touch empty.txt
   cd ..
 
   mkdir -p $testdir/verify



More information about the apt-proxy-devel mailing list