[apt-proxy-devel] r597 - in people/halls/rework/apt_proxy: . test

Chris Halls halls at costa.debian.org
Wed Mar 8 12:50:00 UTC 2006


Author: halls
Date: Wed Mar  8 12:49:54 2006
New Revision: 597

Modified:
   people/halls/rework/apt_proxy/apt_proxy.py
   people/halls/rework/apt_proxy/apt_proxy_conf.py
   people/halls/rework/apt_proxy/cache.py
   people/halls/rework/apt_proxy/fetchers.py
   people/halls/rework/apt_proxy/test/test_apt_proxy.py
   people/halls/rework/apt_proxy/test/test_cache.py
   people/halls/rework/apt_proxy/test/test_config.py
   people/halls/rework/apt_proxy/test/test_requests.py
Log:
* Fix ftp fetcher support
* More unit tests
* Fix fetcher keep connection logic
* More bug fixes


Modified: people/halls/rework/apt_proxy/apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy.py	(original)
+++ people/halls/rework/apt_proxy/apt_proxy.py	Wed Mar  8 12:49:54 2006
@@ -102,8 +102,9 @@
             return e
     def entry_done(self, entry):
         "A cache entry is finished and clients are disconnected"
-        #if self.entries.has_key(path):
-            
+        #if self.entries.has_key(entry.path):
+        log.debug("entry_done: %s" %(entry.path), 'Backend')
+        del self.entries[entry.path]
 
     def get_packages_db(self):
         "Return packages parser object for the backend, creating one if necessary"
@@ -195,7 +196,7 @@
     Each new request from connected clients generates a new instance of this
     class, and process() is called.
     """
-    local_mtime = None
+    if_modified_since = None
     local_size = None
     serve_if_cached = 1
     apFetcher = None
@@ -216,6 +217,11 @@
         # Clean up URL
         self.uri = os.path.normpath(self.uri)
 
+        if_modified_since = self.getHeader('if-modified-since')
+        if if_modified_since != None:
+            self.if_modified_since = http.stringToDatetime(
+                    if_modified_since)
+
         if self.uri[0] != '/':
             log.debug("Request must include at least one '/'")
             self.finishCode(http.FORBIDDEN, "Request must include at least one '/'")
@@ -268,13 +274,14 @@
         Prepare client to stream file
         Return false if streaming is not necessary (i.e. cache hit)
         """
-        log.debug("start_streaming size=%s mtime=%s" % (size, mtime) , 'Request')
-        if self.local_mtime <= mtime:
+        if self.if_modified_since is None or self.if_modified_since < mtime:
+            log.debug("start_streaming size=%s mtime=%s if_modified_since=%s" % (size, mtime, self.if_modified_since) , 'Request')
             self.setResponseCode(http.OK, 'Streaming file')
             self.setHeader('last-modified', http.datetimeToString(mtime))
             self.setHeader('content-length', size)
             return True
         else:
+            log.debug("file not modified: mtime=%s if_modified_since=%s" % (mtime, self.if_modified_since) , 'Request')
             self.setHeader("content-length", 0)
             self.finishCode(http.NOT_MODIFIED, 'File is up to date')
             return False
@@ -561,8 +568,8 @@
         @param cache_path: path of file within cache e.g. debian/dists/stable/Release.gpg
         """
         log.debug("File served: %s" % (cache_path))
-        #path = os.sep + cache_path # Backwards compat
-        path = cache_path
+        path = os.sep + cache_path # Backwards compat
+        #path = cache_path
         self.access_times[path]=time.time()
         if re.search("\.deb$", path):
             package = re.sub("^.*/", "", path)

Modified: people/halls/rework/apt_proxy/apt_proxy_conf.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy_conf.py	(original)
+++ people/halls/rework/apt_proxy/apt_proxy_conf.py	Wed Mar  8 12:49:54 2006
@@ -99,7 +99,7 @@
         ['dynamic_backends', 'on', 'boolean'],
         ['http_proxy', None , 'proxyspec'],
         ['username', 'aptproxy', 'string'],
-        ['read_limit', 0, '*int']
+        ['read_limit', None, '*int']
         ]
 
     """

Modified: people/halls/rework/apt_proxy/cache.py
==============================================================================
--- people/halls/rework/apt_proxy/cache.py	(original)
+++ people/halls/rework/apt_proxy/cache.py	Wed Mar  8 12:49:54 2006
@@ -363,7 +363,7 @@
         self.state = self.STATE_SENT
         self.fetcher = None
         self.backend.file_served(self)
-        self.factory.file_served(self.file_path)
+        self.factory.file_served(self.cache_path)
         self.factory.update_times[self.cache_path] = time.time()
         self.state = self.STATE_NEW
 

Modified: people/halls/rework/apt_proxy/fetchers.py
==============================================================================
--- people/halls/rework/apt_proxy/fetchers.py	(original)
+++ people/halls/rework/apt_proxy/fetchers.py	Wed Mar  8 12:49:54 2006
@@ -21,11 +21,11 @@
 network backends
 """
 
-import re, os, string
+import re, os, string, time
 from twisted.web import static, http
 from twisted.internet import protocol, reactor, defer
 from twisted.python import failure
-from twisted.protocols import policies
+from twisted.protocols import policies, ftp
 
 from misc import log
 
@@ -70,8 +70,10 @@
                 # The attempt to retrieve a file from the BackendServer failed.
                 log.debug("no more Backends", "fetcher")
                 return False
+        self.connectToBackend()
 
-        log.debug('Trying next backendServer', 'fetcher')
+    def connectToBackend(self):
+        log.debug('Connecting to backend server', 'fetcher')
         self.fetcher = self.backendServer.fetcher(self.backendServer)
         d = self.fetcher.connect()
         d.addCallback(self.connected)
@@ -91,7 +93,12 @@
         """
         log.debug("Downloading: " + self.cacheEntry.file_path, 'Fetcher')
         #init_tempfile()
-        self.activateNextBackendServer(self.fetcher)
+        if self.backendServer is None:
+            self.activateNextBackendServer(self.fetcher)
+        elif self.fetcher is None:
+            self.connectToBackend()
+        else:
+            self.download()
 
     def download_complete(self):
         """
@@ -100,6 +107,7 @@
         log.debug("download complete", "Fetcher")
         self.cacheEntry.download_data_end()
         self.deferred.callback((True, ""))
+        #self.fetcher = None
 
     def fail_over(self, reason_code, reason_msg):
         """
@@ -146,7 +154,7 @@
 
     def server_mtime(self, mtime):
         """
-        The server has sent the expected length of the file
+        The server has sent the modification time of the file
         """
         self.mtime = mtime
         log.debug("File mtime: " + str(mtime), 'Fetcher');
@@ -174,6 +182,9 @@
 
     def connected(self, result):
         log.debug("Connected to "+ self.backendServer.uri, 'Fetcher')
+        self.download()
+
+    def download(self):
         log.debug('downloading:%s mtime:%s' % (self.cacheEntry.path, self.cacheEntry.file_mtime), 'Fetcher')
         self.fetcher.download(self, self.cacheEntry.path, self.cacheEntry.file_mtime)
 
@@ -296,6 +307,14 @@
 
     def handleResponse(self, buffer):
         log.debug('handleResponse, %s bytes' % (len(buffer)), 'http_client')
+        if self.http_status == http.NOT_MODIFIED:
+            log.debug("Backend server reported file is not modified: " + self.uri,'http_client')
+            self.fetcher.up_to_date()
+        elif self.http_status == http.NOT_FOUND:
+            log.debug("Not found on backend server",'http_client')
+            self.fetcher.file_not_found()
+        else:
+            log.debug("Unknown status code: %s" % (self.http_status),'http_client')
     
     def handleHeader(self, key, value):
 
@@ -313,13 +332,15 @@
                 log.debug('will close on completion', 'http_client')
                 self.close_on_completion = True
 
-    def handleEndHeaders(self):
-        if self.http_status == http.NOT_MODIFIED:
-            log.debug("Backend server reported file is not modified: " + self.uri,'http_client')
-            self.fetcher.up_to_date()
-        elif self.http_status == http.NOT_FOUND:
-            log.debug("Not found on backend server",'http_client')
-            self.fetcher.file_not_found()
+    #def handleEndHeaders(self):
+        #if self.http_status == http.NOT_MODIFIED:
+            #log.debug("Backend server reported file is not modified: " + self.uri,'http_client')
+            #self.fetcher.up_to_date()
+        #elif self.http_status == http.NOT_FOUND:
+            #log.debug("Not found on backend server",'http_client')
+            #self.fetcher.file_not_found()
+        #else:
+            #log.debug("Unknown status code: %s" % (self.http_status),'http_client')
 
     def rawDataReceived(self, data):
         if self.http_status == http.OK:
@@ -447,51 +468,28 @@
         self.backendServer = backendServer
         self.isConnected = False
         self.instance = None
+        self.ftpclient = None
 
     def connect(self):
         """
         Establish connection to ftp server specified by backendServer
         """
-        self.connectCallback = defer.deferred()
-        if not self.proxy.host:
-            host = self.backendServer.host
-            port = self.backendServer.port
-        else:
-            host = self.proxy.host
-            port = self.proxy.port
-        reactor.connectTCP(host, port, self, self.backendServer.config.timeout)
-        return self.connectCallback
-
-    def download(self, fetcher, uri, mtime):
-        """
-        Request download
-        %param fetcher: Fetcher class to receive callbacks
-        %param uri: URI of file to be downloaded within backend
-        %param mtime: Modification time of current file in cache
-        """
-        self.parent = fetcher
-        self.cache_mtime = mtime
-        self.request_uri = uri
-
-        self.passive_ftp = self.backendServer.config.passive_ftp
-        
-        self.remote_file = (self.backendServer.path + "/" 
-                            + uri)
-
-        if not request.backendServer.username:
+        self.connectCallback = defer.Deferred()
+        if not self.backendServer.username:
             creator = protocol.ClientCreator(reactor, ftp.FTPClient, passive=0)
         else:
             creator = protocol.ClientCreator(reactor, ftp.FTPClient, request.backendServer.username,
                                     request.backendServer.password, passive=0)
-        d = creator.connectTCP(request.backendServer.host, request.backendServer.port,
-                               request.backend.config.timeout)
+        d = creator.connectTCP(self.backendServer.host, self.backendServer.port,
+                               self.backendServer.backend.config.timeout)
         d.addCallback(self.controlConnectionMade)
-        d.addErrback(self.connectionFailed)
+        d.addErrback(self.clientConnectionFailed)
+        return self.connectCallback
 
     def controlConnectionMade(self, ftpclient):
         self.ftpclient = ftpclient
-        
-        if(self.passive_ftp):
+
+        if(self.backendServer.backend.config.passive_ftp):
             log.debug('Got control connection, using passive ftp', 'ftp_client')
             self.ftpclient.passive = 1
         else:
@@ -500,104 +498,126 @@
 
         if log.isEnabled('ftp_client'):
             self.ftpclient.debug = 1
+        self.connectCallback.callback(None)
 
-        self.ftpFetchMtime()
-
-    def ftpFinish(self, code, message=None):
-        "Finish the transfer with code 'code'"
-        self.ftpclient.quit()
-        self.setResponseCode(code, message)
-        self.apDataReceived("")
-        self.apDataEnd(self.transfered)
+    def clientConnectionFailed(self, reason):
+        #self.instance.connectionFailed(reason)
+        log.debug("clientConnectionFailed reason: %s" % (reason), "ftp_client")
+        self.connectCallback.errback(reason)
 
-    def ftpFinishCached(self):
-        "Finish the transfer giving the requests the cached file."
-        self.ftpclient.quit()
-        self.apEndCached()
+    def download(self, fetcher, uri, mtime):
+        """
+        Request download
+        %param fetcher: Fetcher class to receive callbacks
+        %param uri: URI of file to be downloaded within backend
+        %param mtime: Modification time of current file in cache
+        """
+        self.parent = fetcher
+        self.cache_mtime = mtime
+        self.request_uri = uri
+        self.remote_file = (self.parent.backendServer.path + '/' 
+                            + uri)
+        self.ftpFetchMtime()
 
     def ftpFetchMtime(self):
         "Get the modification time from the server."
-        def apFtpMtimeFinish(msgs, fetcher, fail):
-            """
-            Got an answer to the mtime request.
-            
-            Someone should check that this is timezone independent.
-            """
-            code = None
-            if not fail:
-                code, msg = msgs[0].split()
-            mtime = None
-            if code == '213':
-                time_tuple=time.strptime(msg[:14], "%Y%m%d%H%M%S")
-                #replace day light savings with -1 (current)
-                time_tuple = time_tuple[:8] + (-1,)
-                #correct the result to GMT
-                mtime = time.mktime(time_tuple) - time.altzone
-                fetcher.server_mtime = mtime
-                if (fetcher.XXXXXXXXXXXXXX_mtime
-                      and fetcher.XXXXXXXXlocal_mtime >= mtime):
-                    fetcher.ftpFinishCached()
-                    return
-            fetcher.ftpFetchSize()
-
         d = self.ftpclient.queueStringCommand('MDTM ' + self.remote_file)
-        d.addCallbacks(apFtpMtimeFinish, apFtpMtimeFinish,
-                       (self, 0), None, (self, 1), None)
-        d.arm()
+        d.addCallback(self.ftpMtimeResult)
+        d.addErrback(self.ftpMtimeFailed)
+
+    def ftpMtimeResult(self, msgs):
+        """
+        Got an answer to the mtime request.
+
+        Someone should check that this is timezone independent.
+        """
+        code, msg = msgs[0].split()
+        if code == '213':
+            time_tuple=time.strptime(msg[:14], "%Y%m%d%H%M%S")
+            #replace day light savings with -1 (current)
+            time_tuple = time_tuple[:8] + (-1,)
+            #correct the result to GMT
+            mtime = time.mktime(time_tuple) - time.altzone
+            self.parent.server_mtime(mtime)
+
+            if (self.cache_mtime
+                    and self.cache_mtime >= mtime):
+                self.parent.up_to_date()
+                return
+        self.ftpFetchSize()
+
+    def ftpMtimeFailed(self, msgs):
+        if msgs.check(ftp.CommandFailed):
+            code = msgs.getErrorMessage()[2:5]
+            log.debug("ftp fetch of Mtime failed: %s code:%s" % (msgs.getErrorMessage(), code), 'ftp_client')
+            if code == '550':
+                # Not found
+                self.parent.file_not_found()
+                return
+        log.debug("ftp fetch of Mtime for %s unknown failure: %s" % (self.remote_file, msgs), 'ftp_client')
+        self.ftpFetchSize()
 
     def ftpFetchSize(self):
         "Get the size of the file from the server"
-        def apFtpSizeFinish(msgs, fetcher, fail):
-            code = None
-            if not fail:
-                code, msg = msgs[0].split()
-            if code != '213':
-                log.debug("SIZE FAILED",'ftp_client')
-                fetcher.ftpFetchList()
-            else:
-                fetcher.setResponseHeader('content-length', msg)
-                fetcher.ftpFetchFile()
 
         d = self.ftpclient.queueStringCommand('SIZE ' + self.remote_file)
-        d.addCallbacks(apFtpSizeFinish, apFtpSizeFinish,
-                       (self, 0), None, (self, 1), None)
-        d.arm()
+        d.addCallback(self.ftpSizeResult)
+        d.addErrback(self.ftpSizeFailed)
+
+    def ftpSizeResult(self, msgs):
+        code, msg = msgs[0].split()
+        if code == '213':
+            size = int(msg)
+            self.parent.server_size(size)
+            self.ftpFetchFile()
+        else:
+            self.ftpSizeFailed()
+    def ftpSizeFailed(self, msgs):
+        log.debug("ftp size failed: %s" % (msgs), 'ftp_client')
+        self.ftpFetchList()
 
     def ftpFetchList(self):
         "If ftpFetchSize didn't work try to get the size with a list command."
-        def apFtpListFinish(msg, filelist, fetcher, fail):
-            __pychecker__ = 'unusednames=msg'
-            if fail:
-                fetcher.ftpFinish(http.INTERNAL_SERVER_ERROR)
-                return
-            if len(filelist.files)== 0:
-                fetcher.ftpFinish(http.NOT_FOUND)
-                return
-            file = filelist.files[0]
-            fetcher.setResponseHeader('content-length', file['size'])
-            fetcher.ftpFetchFile()
-        filelist = ftp.FTPFileListProtocol()
-        d = self.ftpclient.list(self.remote_file, filelist)
-        d.addCallbacks(apFtpListFinish, apFtpListFinish,
-                       (filelist, self, 0), None,
-                       (filelist, self, 1), None)
-        d.arm()
+        self.filelist = ftp.FTPFileListProtocol()
+        d = self.ftpclient.list(self.remote_file, self.filelist)
+        d.addCallback(self.ftpListResult)
+        d.addErrback(self.ftpListFailed)
+
+    def ftpListResult(self, msg):
+        __pychecker__ = 'unusednames=msg'
+        if len(filelist.files)== 0:
+            log.debug("Not found on backend server",'ftp_client')
+            self.parent.file_not_found()
+            return
+        file = filelist.files[0]
+        self.parent.server_size(file['size'])
+        fetcher.ftpFetchFile()
+
+    def ftpListFailed(self, msgs):
+        log.debug("ftp list failed: %s" % (msgs), 'ftp_client')
+        self.parent.download_failed(http.INTERNAL_SERVER_ERROR, "Could not list directory")
 
     def ftpFetchFile(self):
         "And finally, we ask for the file."
-        def apFtpFetchFinish(msg, code, status, fetcher):
-            __pychecker__ = 'unusednames=msg,status'
-            fetcher.ftpFinish(code)
         log.debug('ftpFetchFile: ' + self.remote_file, 'ftp_client')
         d = self.ftpclient.retrieveFile(self.remote_file, self)
-        d.addCallbacks(apFtpFetchFinish, apFtpFetchFinish,
-                       (http.OK, "good", self), None,
-                       (http.NOT_FOUND, "fail", self), None)
-        d.arm()
+        d.addCallback(self.ftpFetchResult)
+        d.addErrback(self.ftpFetchFailed)
+    def ftpFetchResult(self, msg):
+        self.parent.download_complete()
+    def ftpFetchFailed(self, msg):
+        log.debug("ftp fetch failed: %s" % (msgs), 'ftp_client')
+        self.parent.file_not_found()
 
     def dataReceived(self, data):
-        self.setResponseCode(http.OK)
-        self.apDataReceived(data)
+        self.parent.data_received(data)
+
+    def disconnect(self):
+        log.debug('disconnecting', 'ftp_client')
+        if self.ftpclient is not None:
+            self.ftpclient.quit()
+            self.ftpclient.transport.loseConnection()
+            self.ftpclient = None
 
     def connectionLost(self, reason=None):
         """
@@ -980,10 +1000,16 @@
             self.queue = self.queue[1:]
     
             if self.fetcher is not None:
-                if self.fetcher.backendServer != self.activeFile.backend:
+                if self.fetcher.backendServer.backend != self.activeFile.backend:
+                    log.debug("old:%s new:%s" %(self.fetcher.backendServer.backend,self.activeFile.backend) 
+                              , 'DownloadQueue')
+                    log.debug("changing backend server", 'DownloadQueue')
                     self.fetcher.disconnect()
                     self.fetcher = Fetcher()
+                else:
+                    log.debug("keeping backend server", 'DownloadQueue')
             else:
+                log.debug("creating new fetcher", 'DownloadQueue')
                 self.fetcher = Fetcher()
             d = self.fetcher.start(self.activeFile)
             d.addCallback(self.downloadFinished)

Modified: people/halls/rework/apt_proxy/test/test_apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_apt_proxy.py	(original)
+++ people/halls/rework/apt_proxy/test/test_apt_proxy.py	Wed Mar  8 12:49:54 2006
@@ -215,7 +215,7 @@
         file = 'debian/dists/stable/Release.gpg'
         path = os.sep + file
         self.failIf(self.factory.access_times.has_key(path))
-        self.factory.file_served(path)
+        self.factory.file_served(file)
         self.failUnless(self.factory.access_times.has_key(path))
         # This is not a versioned file
         self.failIf(self.factory.packages.has_key(path))
@@ -227,7 +227,7 @@
         packagename = 'nonexistent'
         self.failIf(self.factory.access_times.has_key(path))
         self.failIf(self.factory.packages.has_key(packagename))
-        self.factory.file_served(path)
+        self.factory.file_served(file)
         self.failUnless(self.factory.access_times.has_key(path))
         # This is not a versioned file
         self.failUnless(self.factory.packages.has_key(packagename))

Modified: people/halls/rework/apt_proxy/test/test_cache.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_cache.py	(original)
+++ people/halls/rework/apt_proxy/test/test_cache.py	Wed Mar  8 12:49:54 2006
@@ -54,7 +54,7 @@
         self.factory = Factory(self.c)
         self.factory.createBackends()
         self.backend = self.factory.getBackend("files")
-        self.entry = CacheEntry(self.backend, "testdir/testfile.deb")
+        self.entry = self.backend.get_cache_entry("testdir/testfile.deb")
         self.request = DummyRequest()
 
     def tearDown(self):
@@ -181,5 +181,5 @@
         f.close()
         close_time = time.time()
         self.entry.stat_file()
-        self.assertApproximates(self.entry.file_mtime, close_time, 1)
+        self.assertApproximates(self.entry.file_mtime, close_time, 3)
         self.assertEquals(self.entry.file_size, 5)

Modified: people/halls/rework/apt_proxy/test/test_config.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_config.py	(original)
+++ people/halls/rework/apt_proxy/test/test_config.py	Wed Mar  8 12:49:54 2006
@@ -35,10 +35,12 @@
 port=8989
 address=1.2.3.4 5.6.7.8
 timeout = 888
+read_limit = 2323
 
 [backend1]
 backends = ftp://a.b.c
 timeout = 999
+read_limit = 3434
 
 [backend2]
 backends = 
@@ -79,4 +81,13 @@
         self.assertEquals(self.c.backends['dynamic1'].name,'dynamic1')
         self.assertEquals(self.c.backends['dynamic1'].dynamic,True)
         self.assertEquals(self.c.backends['dynamic1'].timeout,888)
+    def testReadLimit(self):
+        self.assertEquals(self.c.read_limit, 2323)
+        self.assertEquals(self.c.backends['backend1'].read_limit,3434)
+        self.assertEquals(self.c.backends['backend2'].read_limit,2323)
 
+class DefaultsTest(unittest.TestCase):
+    def setUp(self):
+        self.c = apConfig(StringIO(""))
+    def testDefaultReadLimit(self):
+        self.assertEquals(self.c.read_limit, None)

Modified: people/halls/rework/apt_proxy/test/test_requests.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_requests.py	(original)
+++ people/halls/rework/apt_proxy/test/test_requests.py	Wed Mar  8 12:49:54 2006
@@ -16,9 +16,10 @@
 
 """This module tests the client protocol itself"""
 
-import os
+import os, time
 from twisted.trial import unittest
 from twisted.internet import protocol, reactor
+from twisted import web
 from twisted.web import http
 from StringIO import StringIO
 
@@ -40,9 +41,13 @@
         """
         Http connection made
         """
-        log.debug("connection made to test apt-proxy server. requesting:" + self.factory.filename, 'uriRequesterTest')
+        log.debug("connection made to test apt-proxy server. requesting: " + self.factory.filename
+                   , 'uriRequesterTest')
         self.sendCommand("GET", self.factory.filename)
         self.sendHeader('host', self.factory.host)
+        if self.factory.if_modified_since is not None:
+            datetime = http.datetimeToString(self.factory.if_modified_since)
+            self.sendHeader('if-modified-since', datetime)
         self.endHeaders()
 
     #def handleStatus(self, version, code, message):
@@ -52,8 +57,8 @@
         self.http_status = int(code)
 
 
-    def dataReceived(self, data):
-        log.debug("data received, len: %s" % (len(data)), 'uriRequesterTest')
+    #def dataReceived(self, data):
+    #    log.debug("data received, len: %s" % (len(data)), 'uriRequesterTest')
     def handleResponse(self, buffer):
         log.debug('data received: %s bytes' % (len(buffer)), 'uriRequesterTest')
         self.received_len = len(buffer)
@@ -67,7 +72,7 @@
     Helper factory to connect to apt-proxy and send
     HTTP requests using uriRequester
     """
-    def __init__(self, filename, host, expectedResponse):
+    def __init__(self, filename, host, expectedResponse, if_modified_since=None):
         self.filename = filename
         self.host = host
         self.testDone = False
@@ -75,6 +80,7 @@
         self.timedOut = False
         self.expectedResponse = expectedResponse
         self.timeout = reactor.callLater(30, self.timeout)
+        self.if_modified_since = if_modified_since
     #def startedConnecting(self, connector):
     #    print 'Started to connect.'
     def buildProtocol(self, addr):
@@ -114,6 +120,7 @@
     def setUp(self, config):
         apTestHelper.setUp(self)
         config = self.config + '\n' + config
+        log.debug("config:\n" + config, 'TestRequestHelper')
         self.c = apConfig(StringIO(config))
         self.factory = Factory(self.c)
         self.factory.configurationChanged()
@@ -126,10 +133,10 @@
         apTestHelper.tearDown(self)
         self.assertRaises(OSError, os.stat, self.cache_dir)
 
-    def doRequest(self, file, responseCode):
+    def doRequest(self, file, responseCode, if_modified_since=None):
         portno = self.port.getHost().port
         log.debug("Starting test connection to 127.0.0.1:%s, file:%s:" %(portno, file), 'uriRequesterTest')
-        clientFactory = requestFactory(file, "127.0.0.1:%s"% (portno), responseCode)
+        clientFactory = requestFactory(file, "127.0.0.1:%s"% (portno), responseCode, if_modified_since)
         connection = reactor.connectTCP("127.0.0.1", portno, clientFactory)
 
         while clientFactory.testDone == False:
@@ -157,8 +164,6 @@
     def testNotFound(self):
         self.doRequest('/files/test.gz', http.NOT_FOUND)
     def testPackagesFile(self):
-        import twisted
-        twisted.internet.base.DelayedCall.debug = True
         self.doRequest('/files/Packages.gz', http.OK)
         backend = self.factory.getBackend('files')
         # Check that request was deleted from backend
@@ -167,6 +172,25 @@
     def testForbidden(self):
         self.doRequest('/notbackend/Release', http.NOT_FOUND)
         
+
+class WebServer:
+    def start(self):
+        """
+        Start web server, serving test data
+        
+        @ret port number that server listens on
+        """
+        root = web.static.File("../test_data")
+        #self.application = service.Application('web')
+        site = web.server.Site(root)
+        #self.port = reactor.listenTCP(0, self.application, interface="127.0.0.1")
+        self.port = reactor.listenTCP(0, site, interface="127.0.0.1")
+
+        return self.port.getHost().port
+
+    def stop(self):
+        self.port.stopListening()
+
 class DebianHttpBackendTest(TestRequestHelper):
     def setUp(self):
         """
@@ -174,58 +198,78 @@
         [files]
         backends=file:///<path to test packages directory>
         """
-        config = ("""
-dynamic_backends=off
-[debian]
-backends=http://ftp.debian.org/debian
-        """)
-        #print "config: " + config
+        #config = ("""
+#dynamic_backends=off
+#[debian]
+#backends=http://ftp.debian.org/debian
+        #""")
+        self.httpserver = WebServer()
+        port = self.httpserver.start()
+        config = ("dynamic_backends=off\n" +
+                  "[test_data]\n" +
+                  "backends=http://127.0.0.1:" + str(port))
         TestRequestHelper.setUp(self, config)
-
+    def tearDown(self):
+        self.httpserver.stop()
+        TestRequestHelper.tearDown(self)
     def testNotFound(self):
-        self.doRequest('/debian/NotHere.gz', http.NOT_FOUND)
-    def testReleaseFile(self):
-        file = '/debian/dists/stable/Release.gpg'
-        filepath = self.cache_dir + file
+        self.doRequest('/test_data/NotHere.gz', http.NOT_FOUND)
+    def downloadFile(self):
+        """
+        Download a file to cache
+        self.backend is set to backend name
+        self.file is set to filename
+        self.filepath is set to physical filename
+        """
+        self.backend = 'test_data'
+        self.filename = '/' + self.backend + '/packages/Packages.gz'
+        self.filepath = self.cache_dir + self.filename
 
         # File should not be in cache
-        self.assertRaises(OSError, os.stat, filepath)
-        self.doRequest(file, http.OK)
+        self.assertRaises(OSError, os.stat, self.filepath)
+        self.doRequest(self.filename, http.OK)
 
         # Check that file was really placed in cache
-        os.stat(filepath) 
+        os.stat(self.filepath) 
 
-    def testCached(self):
-        backend = 'debian'
-        file = '/' + backend + '/dists/stable/Release.gpg'
-        filepath = self.cache_dir + file
+    def testPackagesFile(self):
+        self.downloadFile()
 
-        self.assertRaises(OSError, os.stat, filepath) # File is not in cache
+        # Check access time datbase was updated
+        self.assertApproximates(self.factory.access_times[self.filename], time.time(), 2)
 
-        self.doRequest(file, http.OK)
-        os.stat(filepath) # File is in cache
+    def testNotModifiedGreater(self):
+        "Check http not modified is sent for new file"
+        self.downloadFile()
+        self.doRequest(self.filename, http.NOT_MODIFIED, time.time())
+
+    def testNotModifiedExact(self):
+        self.downloadFile()
+        self.doRequest(self.filename, http.NOT_MODIFIED, os.path.getmtime(self.filepath))
+
+    def testCached(self):
+        self.downloadFile()
 
-        self.doRequest(file, http.OK)
+        self.doRequest(self.filename, http.OK)
 
-        #b = self.factory.getBackend(backend)
-        # TODO
         log.debug("Downloading second copy", 'DebianHttpBackendTest')
         self.factory.config.min_refresh_delay = 0
-        self.doRequest(file, http.OK)
+        self.doRequest(self.filename, http.OK)
+    testCached.timeout = 2
 
-    def testTimeout(self):
-        pass
-    testTimeout.todo = True
-
-    def setFileTime(self):
-        "cache file modification time is set to same time as server time"
-        pass
-    setFileTime.todo = True
-
-    def doubleDownload(self):
-        "download, delete from cache, re-request file"
-        pass
-    doubleDownload.todo = True
+    #def testTimeout(self):
+        #pass
+    #testTimeout.todo = True
+
+    #def setFileTime(self):
+        #"cache file modification time is set to same time as server time"
+        #pass
+    #setFileTime.todo = True
+
+    #def doubleDownload(self):
+        #"download, delete from cache, re-request file"
+        #pass
+    #doubleDownload.todo = True
 
     # More TODO tests:
     # - file mtime is same as server mtime



More information about the apt-proxy-devel mailing list