[apt-proxy-devel] r592 - in people/halls/rework/apt_proxy: . test

Chris Halls halls at costa.debian.org
Thu Feb 16 22:00:18 UTC 2006


Author: halls
Date: Thu Feb 16 22:00:16 2006
New Revision: 592

Modified:
   people/halls/rework/apt_proxy/apt_proxy.py
   people/halls/rework/apt_proxy/apt_proxy_conf.py
   people/halls/rework/apt_proxy/cache.py
   people/halls/rework/apt_proxy/fetchers.py
   people/halls/rework/apt_proxy/misc.py
   people/halls/rework/apt_proxy/packages.py
   people/halls/rework/apt_proxy/test/test_apt_proxy.py
   people/halls/rework/apt_proxy/test/test_cache.py
   people/halls/rework/apt_proxy/test/test_packages.py
Log:
* HTTP fetches work first time around now, still more work needed
* More test code


Modified: people/halls/rework/apt_proxy/apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy.py	(original)
+++ people/halls/rework/apt_proxy/apt_proxy.py	Thu Feb 16 22:00:16 2006
@@ -16,9 +16,8 @@
 
 import os, stat, signal, fcntl, exceptions
 from os.path import dirname, basename
-import tempfile, glob, re, urlparse, time, string
-from twisted.internet import reactor, defer, abstract
-from twisted.protocols import ftp, basic
+import tempfile, glob, re, urlparse, time
+from twisted.internet import reactor, abstract
 from twisted.python.failure import Failure
 from twisted.internet import error, protocol
 from twisted.web import http
@@ -60,7 +59,9 @@
         self.config = config # apBackendConfig configuration information
         self.base = config.name # Name of backend
         self.uris=[]
+        self.queue = fetchers.DownloadQueue()
 
+        log.debug("Created Backend: " + self.base)
         for uri in config.backends:
             self.addURI(uri)
 
@@ -90,10 +91,10 @@
         a new object is created if it does not already exist
         """
         if self.entries.has_key(path):
-            return entries[path]
+            return self.entries[path]
         else:
-            e = CacheEntry(self, path)
-            entries[path] = e
+            e = cache.CacheEntry(self, path)
+            self.entries[path] = e
             return e
     def entry_done(self, entry):
         "A cache entry is finished and clients are disconnected"
@@ -115,7 +116,13 @@
 
     def file_served(self, entry):
         "A cache entry has served a file in this backend"
-        self.get_packages_db().packages_file(entry.file_path)
+        self.get_packages_db().file_updated(entry)
+
+    def start_download(self, entry):
+        """
+        A CacheEntry has requested that a file should be downloaded from the backend
+        """
+        self.queue.addFile(entry)
 
 class BackendServer:
     """
@@ -189,6 +196,7 @@
     uriIndex = 0             # Index of backend URI
     backend = None           # Backend for this request
     backendServer = None     # Current server to be tried
+    cacheEntry = None        # Cache entry for file requested
     
     def __init__(self, channel, queued):
         self.factory=channel.factory
@@ -200,7 +208,12 @@
         """
         log.debug("Request: " + self.method + " " + self.uri,'Request',8);
         # Clean up URL
-        self.uri = self.simplify_path(self.uri)
+        self.uri = os.path.normpath(self.uri)
+
+        if self.uri[0] != '/':
+            log.debug("Request must include at least one '/'")
+            self.finishCode(http.FORBIDDEN, "Request must include at least one '/'")
+            return
 
         backendName = self.uri[1:].split('/')[0]
         log.debug("Request: %s %s backend=%s uri=%s"%(self.method, self.uri, backendName, self.uri),'Request')
@@ -211,12 +224,12 @@
 
         if self.method != 'GET':
             #we currently only support GET
-            log.debug("abort - method not implemented")
+            log.debug("abort - method not implemented", 'Request')
             self.finishCode(http.NOT_IMPLEMENTED)
             return
 
         if re.search('/\.\./', self.uri):
-            log.debug("/../ in simplified uri ("+self.uri+")")
+            log.debug("/../ in simplified uri ("+self.uri+")", 'Request')
             self.finishCode(http.FORBIDDEN)
             return
 
@@ -227,20 +240,19 @@
 
         log.debug("backend: %s %s" % (self.backend.base, self.backend.uris))
 
-        self.cacheEntry = self.backend.get_cache_entry(self.uri)
+        backend_path = self.uri.split('/',2)[2]
+        self.cacheEntry = self.backend.get_cache_entry(backend_path)
 
         if not self.cacheEntry.filetype:
-            log.debug("abort - unknown extension")
-            self.finishCode(http.NOT_FOUND)
-            self.cacheEntry.remove_request(None)
+            log.debug("abort - unknown extension for file %s" % (backend_path), 'Request')
+            self.finishCode(http.FORBIDDEN, 'File not found - unknown extension')
             return
 
         self.setHeader('content-type', self.cacheEntry.filetype.contype)
 
-        if os.path.isdir(self.local_file):
-            log.debug("abort - Directory listing not allowed")
-            self.finishCode(http.FORBIDDEN)
-            self.cacheEntry.remove_request(None)
+        if os.path.isdir(self.cacheEntry.file_path):
+            log.debug("abort - Directory listing not allowed", 'Request')
+            self.finishCode(http.FORBIDDEN, 'Directory listing not permitted')
             return
 
         self.cacheEntry.add_request(self)
@@ -250,7 +262,8 @@
         Prepare client to stream file
         Return false if streaming is not necessary (i.e. cache hit)
         """
-        if request.local_mtime <= if_modified_since:
+        log.debug("start_streaming size=%s mtime=%s" % (size, mtime) , 'Request')
+        if self.local_mtime <= mtime:
             self.setResponseCode(http.OK, 'Streaming file')
             self.setHeader('last-modified', http.datetimeToString(mtime))
             self.setHeader('content-length', size)
@@ -262,25 +275,29 @@
 
     def finishCode(self, responseCode, message=None):
         "Finish the request with a status code and no streamed data"
+        log.debug("finishCode: %s, %s" % (responseCode, message), 'Request')
         self.setResponseCode(responseCode, message)
         self.write("")
         self.finish()
 
     def finish(self):
         "Finish request after streaming"
+        log.debug("finish" , 'Fetcher')
         http.Request.finish(self)
         if self.factory.config.disable_pipelining:
             if hasattr(self.transport, 'loseConnection'):
                 self.transport.loseConnection()
 
-        self.cacheEntry.remove_request(self)
-        self.cacheEntry = None
+        if self.cacheEntry:
+            self.cacheEntry.remove_request(self)
+            self.cacheEntry = None
 
     def connectionLost(self, reason=None):
         """
         The connection with the client was lost, remove this request from its
         Fetcher.
         """
+        log.debug("connectionLost" , 'Request')
         if self.cacheEntry:
             self.cacheEntry.remove_request(self)
         #self.finish()
@@ -297,7 +314,7 @@
 
     def headerReceived(self, line):
         "log and pass over to the base class"
-        #log.debug("Header: " + line)
+        log.debug("Header: " + line)
         if self.log_headers == None:
             self.log_headers = line
         else:
@@ -315,7 +332,7 @@
         __pychecker__ = 'unusednames=reason'
         for req in self.requests:
             req.connectionLost()
-        log.debug("Client connection closed")
+        log.debug("Client connection closed", 'Channel')
         if log.isEnabled('memleak'):
             memleak.print_top_10()
         #reactor.stop()   # use for shutting down apt-proxy when a client disconnects
@@ -346,11 +363,18 @@
     def periodic(self):
         "Called periodically as configured mainly to do mirror maintanace."
         log.debug("Doing periodic cleaning up")
+        self.periodicCallback = None
         self.clean_old_files()
         self.recycler.start()
         log.debug("Periodic cleaning done")
-        if (self.config.cleanup_freq != None):
-            reactor.callLater(self.config.cleanup_freq, self.periodic)
+        startPeriodic()
+    def startPeriodic(self):
+        if (self.config.cleanup_freq != None and self.periodicCallback is None):
+            self.periodicCallback = reactor.callLater(self.config.cleanup_freq, self.periodic)
+    def stopPeriodic(self):
+        if self.periodicCallback is not None:
+            self.periodicCallback.cancel()
+            self.periodicCallback = None
     def __del__(self):
         for f in self.databases:
             try:
@@ -362,6 +386,7 @@
         self.runningFetchers = {}
         self.backends = []
         self.config = config
+        self.periodicCallback = None
 
     def __getattr__ (self, name):
         def open_shelve(dbname):
@@ -429,7 +454,7 @@
                     setattr(self.config, param, getattr(oldconfig, param))
 
         if self.config.cleanup_freq != None and (oldconfig is None or oldconfig.cleanup_freq == None):
-            reactor.callLater(self.config.cleanup_freq, self.periodic)
+            self.startPeriodic()
         self.createBackends()
 
     def createBackends(self):
@@ -454,7 +479,7 @@
         # the hostname to get the files.
         backendServer = "http://" + name
         log.debug("Adding dynamic backend:" + name)
-        backendConfig = self.factory.config.addBackend(None, name, (backendServer,))
+        backendConfig = self.config.addBackend(None, name, (backendServer,))
         backend = Backend(self, backendConfig)
         self.backends[name] = backend
         return backend
@@ -581,6 +606,8 @@
         self.access_times.close()
         self.packages.close()
         packages.cleanup(self)
+        self.recycler.stop()
+        self.stopPeriodic()
 
     def dumpdbs (self):
         def dump_update(key, value):

Modified: people/halls/rework/apt_proxy/apt_proxy_conf.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy_conf.py	(original)
+++ people/halls/rework/apt_proxy/apt_proxy_conf.py	Thu Feb 16 22:00:16 2006
@@ -97,7 +97,7 @@
         ['disable_pipelining', '1', 'boolean'],
         ['passive_ftp', 'on', 'boolean'],
         ['dynamic_backends', 'on', 'boolean'],
-        ['http_proxy', '' , 'proxyspec'],
+        ['http_proxy', None , 'proxyspec'],
         ['username', 'aptproxy', 'string']
         ]
 

Modified: people/halls/rework/apt_proxy/cache.py
==============================================================================
--- people/halls/rework/apt_proxy/cache.py	(original)
+++ people/halls/rework/apt_proxy/cache.py	Thu Feb 16 22:00:16 2006
@@ -23,7 +23,9 @@
 """
 
 from twisted.internet import protocol, defer, reactor
-import os.path, re, stat
+from twisted.web import http
+from twisted.protocols import basic
+import os, re, stat
 from misc import log
 
 class CacheEntry:
@@ -35,9 +37,6 @@
     Active CacheEntries are managed in their corresponding Backend
     """
 
-    "Active Client Requests for this file"
-    requests = []
-
     # Define lifecyle of cache entry
     STATE_NEW = 1 # Entry is not yet being sent
     STATE_CONNECTING = 2 # Waiting for connection to download file
@@ -57,15 +56,21 @@
         """
         self.backend = backend
         self.factory = backend.factory
+        self.requests = [] # Active client requests for this cache entry
+        self.streamfile = None
 
         # Path of file within backend e.g. 'dists/stable/Release.gpg'
         self.path = path 
 
+        # Path of file within cache e.g. 'debian/dists/stable/Release.gpg'
+        self.cache_path = backend.base + os.sep + path
+
         # File in cache '/var/cache/apt-proxy/debian/dists/stable/Release.gpg'
-        self.file_path = self.factory.config.cache_dir + path
+        self.file_path = (self.factory.config.cache_dir + os.sep + 
+                          self.cache_path)
 
         # Directory of cache file '/var/cache/apt-proxy/debian/dists/stable'
-        self.filedir = os.dirname(self.file_path)
+        self.filedir = os.path.dirname(self.file_path)
 
         self.filetype = findFileType(path)
         self.filename = os.path.basename(path) # 'Release.gpg'
@@ -73,9 +78,11 @@
         # filebase='Release' fileext='gpg'
         (self.filebase, self.fileext) = os.path.splitext(self.filename)
 
-        self.create_directory()
+        # self.create_directory()
+        self.file_mtime = None
+        self.file_size = None
 
-    def add_reqest(self, request):
+    def add_request(self, request):
         """
         A new request has been received for this file
         """
@@ -88,7 +95,7 @@
             self.get()
         else:
             # Subsequent request - client must be brought up to date
-            if self.state == STATE_DOWNLOADING:
+            if self.state == self.STATE_DOWNLOAD:
                 raise RuntimeError, \
                       'TODO: multiple clients not implemented yet'
 
@@ -99,16 +106,16 @@
 
         If parameter request is None, downloading has been aborted early
         """
-        if request is not None:
-            self.requests.delete(request)
-        if len(requests) != 0:
+        if request is not None and request in self.requests:
+            self.requests.remove(request)
+        if len(self.requests) != 0:
             return
 
         log.debug("Last request removed",'cacheEntry')
         self.backend.entry_done(self)
 
         if (not self.factory.config.complete_clientless_downloads 
-             and self.state == STATE_DOWNLOADING
+             and self.state == self.STATE_DOWNLOAD
              and self.fetcher is not None):
             # Cancel download in progress
             self.fetcher.cancel_download()
@@ -122,17 +129,18 @@
         """
         Prepare a request for streaming
         """
+        log.msg("start_request_stream:" + self.file_path, "CacheEntry")
         request.startStreaming(self.size, self.mtime)
 
-        if self.transfered.size() != 0:
-            request.write(self.transfered.read_from(start=0)) # TODO - is this efficient?
+        if self.streamfile.size() != 0:
+            request.write(self.streamfile.read_from(start=0)) # TODO - is this efficient?
 
 
     def get(self):
         """
         Update current version of file in cache
         """
-        if self.state == STATE_NEW:
+        if self.state == self.STATE_NEW:
             if os.path.exists(self.file_path):
                 if self.check_age():
                     self.verify()
@@ -181,7 +189,7 @@
             deferred.callback(None)
         elif last_access < min_time:
             log.debug("file is too old: "+self.file_path, 'CacheEntry')
-            update_times[self.uri] = cur_time
+            update_times[self.uri] = cur_time  # TODO: Is this right?
             deferred.errback()
         else:
             log.debug("file is ok: "+self.file_path, 'CacheEntry')
@@ -191,7 +199,7 @@
         """
         File is up to date - send complete file from cache to clients
         """
-        log.msg("sending file from cache:" + self.file_path, "CachEntry")
+        log.msg("sending file from cache:" + self.file_path, "CacheEntry")
         self.transfer_file(self.path)
 
     def end_send_cached(self):
@@ -204,16 +212,17 @@
         """
         Send given file to clients
         """
+        log.msg("transfer_file:" + self.file_path, "CacheEntry")
         stat_tuple = os.stat(filename)
         self.file_mtime = stat_tuple[stat.ST_MTIME]
         self.file_size = stat_tuple[stat.ST_SIZE]
         size = os.stat(filename)[stat.ST_SIZE]
 
-        self.state = STATE_SENDFILE
+        self.state = self.STATE_SENDFILE
         if size > 0:
             log.debug("Sending file to clients:%s size:%s" % (filename, size), 'CacheEntry')
-            self.streamfile = open(self.local_file,'rb')
-            fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
+            self.streamfile = open(filename,'rb')
+            #fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
     
             for request in self.requests:
                 if request.start_streaming(self.file_size, self.file_mtime):
@@ -239,12 +248,21 @@
         if(not os.path.exists(self.filedir)):
             os.makedirs(self.filedir)
 
-    def download_started(self, fetcher):
+    def start_download(self):
+        """
+        Start file transfer from backend server
+        """
+        log.msg("start download:" + self.path, "CacheEntry")
+        self.backend.start_download(self)
+
+    def download_started(self, fetcher, size, mtime):
         """
         Callback from Fetcher
         A fetcher has begun streaming this file
         """
-        self.state = STATE_DOWNLOAD
+        log.msg("download started:" + self.file_path, "CacheEntry")
+        self.state = self.STATE_DOWNLOAD
+        self.create_directory()
         self.fetcher = fetcher
 
         """
@@ -268,47 +286,65 @@
 #                 gzip = FetcherGzip()
 #                 gzip.activate(loop, postconverting=1)
 
+
+        for req in self.requests:
+            req.start_streaming(size, mtime)
+
+
     def download_data_received(self, data):
         """
         Callback from Fetcher
         A block of data has been received from the streaming backend server
         """
+        #log.msg("download_data_received:" + self.file_path, "CacheEntry")
         for req in self.requests:
             req.write(data)
 
-    def rename_tempfile(self, filename):
-        """
-        When a Fetcher has streamed to a temporary file, rename this file to
-        the final name
-        """
-        os.rename(filename, self.file_path)
+        if self.streamfile:
+            # save to tempfile (if it in use)
+            self.streamfile.append(data)
 
     def download_data_end(self):
         """
         Callback from Fetcher
         File streaming is complete
         """
-        self.file_sent()
+        log.msg("download_data_end:" + self.file_path, "CacheEntry")
+
+        if self.streamfile is not None:
+            # File was streamed to clients
+            self.streamfile.close_and_rename(self.file_path)
+            self.streamfile = None
 
         for req in self.requests:
             req.finish()
 
-    def download_failure(self, reason):
+        self.file_sent()
+
+    def download_failure(self, http_code, reason):
         """
         Download is not possible
         """
+        log.msg("download_failure %s: (%s) %s"% (self.file_path, http_code, reason), "CacheEntry")
         for request in self.requests:
-            request.finishCode(http.NOT_FOUND, reason)
+            request.finishCode(http_code, reason)
 
     def file_sent(self):
         """
         File has been sent successfully to at least one client
         Update databases with statistics for this file
         """
-        self.state = STATE_SENT
+        log.msg("file_sent:" + self.file_path, "CacheEntry")
+
+        self.state = self.STATE_SENT
         self.backend.file_served(self)
-        self.factory.file_served(self.request.uri)
-        
+        self.factory.file_served(self.file_path)
+
+    def init_tempfile(self):
+        self.create_directory()
+        self.streamFilename = self.file_path + ".apDownload"
+        self.streamfile = StreamFile(self.streamFilename)
+
 class FileType:
     """
     This is just a way to distinguish between different filetypes.
@@ -349,13 +385,13 @@
     FileType(re.compile(r"\.txt$"), "application/plain-text", 1),
     FileType(re.compile(r"\.html$"), "application/text-html", 1),
 
-    FileType(re.compile(r"/(Packages|Release(\.gpg)?|Sources|Contents-.*)"
+    FileType(re.compile(r"(^|/)(Packages|Release(\.gpg)?|Sources|Contents-.*)"
                         r"(\.(gz|bz2))?$"), 
              "text/plain", 1),
 
     FileType(re.compile(r"\.rpm$"), "application/rpm", 0),
 
-    FileType(re.compile(r"/(pkglist|release|srclist)(\.(\w|-)+)?"
+    FileType(re.compile(r"(^|/)(pkglist|release|srclist)(\.(\w|-)+)?"
                         r"(\.(gz|bz2))?$"), 
              "text/plain", 1),
     )
@@ -374,17 +410,33 @@
     """
     CHUNKSIZE = 16384
     def __init__(self, name, mode='w+b'):
+        log.debug("Creating file: " + name, 'cache')
         self.file = file(name, mode, self.CHUNKSIZE)
+        self.name = name
     def append(self, data):
-        self.seek(0, SEEK_END)
-        self.write(data)
+        self.file.write(data)
     def size(self):
-        return self.tell()
+        return self.file.tell()
     def read_from(self, size=-1, start=None):
         if start != None:
-            self.seek(start, SEEK_SET)
-        data = file.read(self, size)
+            self.file.seek(start, SEEK_SET)
+        data = self.file.read(self, size)
+        self.file.seek(0, SEEK_END)
         return data
+    def close(self):
+        log.debug("Closing file: " + self.name, 'cache')
+        self.file.close()
+        self.file = None
+    def close_and_rename(self, new_name):
+        """
+        File was successfully downloaded - close and rename to final destination
+        """
+        self.close()
+        if self.name == new_name:
+            return
+        log.debug("renaming file: %s->%s " % (self.name, new_name), 'cache')
+        os.rename(self.name, new_name)
+        self.name = new_name
 
 class FileVerifier:
     """
@@ -490,24 +542,3 @@
                 self.parent.deferred.callback(None)
             else:
                 self.parent.failed(os.path.basename(self.exe)+ " failed")
-
-def simplify_path(old_path):
-    """
-    change //+ with /
-    change /directory/../ with /
-    More than three ocurrences of /../ together will not be
-    properly handled
-
-    NOTE: os.path.normpath could probably be used here.
-    """
-    path = re.sub(r"//+", "/", old_path)
-    path = re.sub(r"/\./+", "/", path)
-    new_path = re.sub(r"/[^/]+/\.\./", "/", path)
-    while (new_path != path):
-        path = new_path
-        new_path = re.sub(r"/[^/]+/\.\./", "/", path)
-    if (new_path != old_path):
-        log.debug("simplified path from " + old_path + 
-                  " to " + new_path,'simplify_path')
-    return path
-

Modified: people/halls/rework/apt_proxy/fetchers.py
==============================================================================
--- people/halls/rework/apt_proxy/fetchers.py	(original)
+++ people/halls/rework/apt_proxy/fetchers.py	Thu Feb 16 22:00:16 2006
@@ -21,9 +21,12 @@
 network backends
 """
 
-import re
+import re, os, string
 from twisted.web import static, http
-from twisted.internet import protocol
+from twisted.internet import protocol, reactor, defer
+from twisted.python import failure
+
+from misc import log
 
 
 class Fetcher:
@@ -35,16 +38,19 @@
     cacheEntry = None
     fetcher = None   # connection-specific fetcher
 
-    def init_tempfile(self):
-        self.streamFilename = self.cacheEntry.file_path + ".apDownload"
-        self.transfered = StreamFile(self.streamFilename)
+    def __init__(self):
+        self.backendServer = None
+        self.size = None # Size of file notified by fetcher's server
+        self.mtime = None # Mtime of file notified by fetcher's server
 
     def start(self, cacheEntry):
         self.cacheEntry = cacheEntry
+        log.debug("fetcher start:" + self.cacheEntry.filename, "fetcher")
         self.backend = cacheEntry.backend
         self.len_received = 0
-        self.deferred = Deferred()
-        return deferred
+        self.deferred = defer.Deferred()
+        self.start_download()
+        return self.deferred
 
     def activateNextBackendServer(self, fetcher):
         """
@@ -66,9 +72,9 @@
 
         log.debug('Trying next backendServer', 'fetcher')
         self.fetcher = self.backendServer.fetcher(self.backendServer)
-        deferred = fetcher.connect()
-        deferred.callback(self.connected)
-        deferred.errback(self.connection_failed)
+        d = self.fetcher.connect()
+        d.addCallback(self.connected)
+        d.addErrback(self.connection_failed)
         #fetcher.apEndTransfer(fetcher_class)
 
         return True
@@ -82,7 +88,7 @@
         Use post_convert and gzip_convert regular expresions of the Fetcher
         to gzip/gunzip file before and after download.
         """
-        log.debug("Downloading: " + self.path, 'CacheEntry')
+        log.debug("Downloading: " + self.cacheEntry.file_path, 'Fetcher')
         #init_tempfile()
         self.activateNextBackendServer(self.fetcher)
 
@@ -90,8 +96,9 @@
         """
         Download was successful
         """
+        log.debug("download complete", "Fetcher")
         self.cacheEntry.download_data_end()
-        self.deferred.callback()
+        self.deferred.callback((True, ""))
 
     def fail_over(self, reason_code, reason_msg):
         """
@@ -99,11 +106,13 @@
         backend
         """
         if not self.activateNextBackendServer(self.fetcher):
-            self.download_failed()
+            self.download_failed(reason_code, reason_msg)
 
     def download_failed(self, reason_code, reason_msg):
         #self.cacheEntry.download_data_end()
-        self.deferred.errback(reason_code, reason_msg)
+        log.debug("download_failed: (%s) %s " %(reason_code, reason_msg), "Fetcher")
+        self.cacheEntry.download_failure(reason_code, reason_msg)
+        self.deferred.callback((False, reason_msg))
 
     def cancel_download(self):
         if self.fetcher and self.fetcher.transport:
@@ -118,25 +127,37 @@
 
     def data_received(self, data):
         """
-        Data has been received from the backend server
+        File Data has been received from the backend server
         """
         if self.len_received == 0:
-            self.cacheEntry.download_started(self)
+            self.cacheEntry.init_tempfile()
+            self.cacheEntry.download_started(self, self.size, self.mtime)
         self.len_received = self.len_received + len(data)
 
-        if self.transfered:
-            # save to tempfile (if it in use)
-            self.transfered.append(data)
         self.cacheEntry.download_data_received(data)
 
+    def server_size(self, len):
+        """
+        The server has sent the expected length of the file
+        """
+        self.size = len
+        log.debug("File size: " + str(len), 'Fetcher');
+
+    def server_mtime(self, mtime):
+        """
+        The server has sent the expected length of the file
+        """
+        self.mtime = mtime
+        log.debug("File mtime: " + str(mtime), 'Fetcher');
+
     def transfer_complete(self):
         """
         All data has been transferred
         """
         log.debug("Finished receiving data: " + self.cacheEntry.filename, 'Fetcher');
-        if self.transferred:
-            self.transferred.close()
-            self.transferred = None
+        if self.transfered is not None:
+            self.transfered.close()
+            self.transfered = None
 
             if self.fetcher.server_mtime != None:
                 os.utime(self.local_file, (time.time(), self.fetcher.server_mtime))
@@ -147,13 +168,13 @@
 
         self.cacheEntry.download_data_end()
 
-    def conection_failed(self, reason = None):
+    def connection_failed(self, reason = None):
         """
         A fetcher has failed to connect to the backend server
         """
         msg = '[%s] Connection Failed: %s/%s'%(
             self.backend.name,
-            self.backendServer.path, self.request.backend_uri)
+            self.backendServer.path, self.cacheEntry.path)
 
         if reason:
             msg = '%s (%s)'%(msg, reason.getErrorMessage())
@@ -161,23 +182,34 @@
         log.err(msg)
         self.fail_over(http.SERVICE_UNAVAILABLE, reason)
 
-    def connected():
-        log.debug("Connected to  "+ self.request.backend_uri, 'Fetcher')
-        log.debug('downloading:%s mtime:%s' % (uri, mtime), 'Fetcher')
-        self.fetcher.download(self.cacheEntry.path, self.cacheEntry.file_mtime)
+    def connected(self, result):
+        log.debug("Connected to "+ self.backendServer.uri, 'Fetcher')
+        log.debug('downloading:%s mtime:%s' % (self.cacheEntry.path, self.cacheEntry.file_mtime), 'Fetcher')
+        self.fetcher.download(self, self.cacheEntry.path, self.cacheEntry.file_mtime)
+
+    def disconnect(self):
+        self.fetcher = None # TODO loseConnection?
+
+    def connection_closed(self):
+        """
+        A protocol fetcher's connection has closed - we must reopen the connection
+        next time
+        """
+        log.debug("Connection closed", 'Fetcher')
+        self.fetcher = None
 
-    def file_not_found():
-        log.msg("[%s] file not found: %s" % (self.backendServer.path, self.request.backend_uri))
+    def file_not_found(self):
+        log.msg("(%s) file not found: %s" % (self.backendServer.path, self.cacheEntry.path), 'fetcher')
         # TODO - failover?
-        self.download_failed(reason)
+        self.download_failed(http.NOT_FOUND, "file not found on backend")
 
-    def send_complete_file(filename):
+    def send_complete_file(self, filename):
         """
         Send a complete file (used by FileFetcher)
         """
         self.cacheEntry.transfer_file(filename)
 
-    def up_to_date():
+    def up_to_date(self):
         """
         Fetcher has determined that our cached file is up to date
         so the file is sent from our cache
@@ -194,9 +226,7 @@
 
     def connect(self):
         # We always conect
-        d = deferred()
-        d.runcallbacks()
-        return d
+        return defer.succeed(True)
 
     def download(self, fetcher, uri, mtime):
         """
@@ -209,7 +239,7 @@
         self.cache_mtime = mtime
         self.request_uri = uri
 
-        self.local_file = self.backendServer.uri[len("file:"):] + request_uri
+        self.local_file = self.backendServer.uri[len("file://"):] + '/' + uri
         if not os.path.exists(self.local_file):
             self.parent.file_not_found()
             return
@@ -225,7 +255,11 @@
     """
     def __init__(self, parent):
         self.parent = parent # HttpFetcher
-        self.proxy = parent.backendServer.config.http_proxy
+        self.proxy = self.parent.proxy
+        self.log_headers = None
+        self.fetcher = None
+        self.close_on_completion = False
+
     def connectionMade(self):
         """
         Http connection made - inform parent, which will
@@ -237,7 +271,7 @@
         # Request file from backend
         self.fetcher = fetcher
         backendServer = self.parent.backendServer
-        if not self.proxy.host:
+        if self.proxy is None:
             serverpath = backendServer.path
         else:
             serverpath = "http://" + backendServer.host
@@ -249,7 +283,7 @@
         self.sendCommand("GET", serverpath + "/" + uri)
 
         self.sendHeader('host', backendServer.host)
-        if self.proxy.user:
+        if self.proxy is not None and self.proxy.user is not None:
             self.sendHeader('Proxy-Authorization', "Basic " +
                             encodestring(self.proxy.user + ":" + self.proxy.password))
 
@@ -264,8 +298,11 @@
         log.debug('handleStatus %s - %s' % (code, message), 'http_client')
         self.http_status = int(code)
 
-        self.setResponseCode(self.http_status)
+        #self.setResponseCode(self.http_status)
 
+    def handleResponse(self, buffer):
+        log.debug('handleResponse, %s bytes' % (len(buffer)), 'http_client')
+    
     def handleHeader(self, key, value):
 
         log.debug("Received: " + key + " " + str(value), 'http_client')
@@ -273,16 +310,37 @@
 
         if key == 'last-modified':
             self.server_mtime = http.stringToDatetime(value)
+            self.fetcher.server_mtime(self.server_mtime)
         elif key == 'content-length':
-            self.server_size = http.stringToDatetime(value)
+            self.server_size = int(value)
+            self.fetcher.server_size(self.server_size)
+        elif key == 'connection':
+            if value == "close":
+                log.debug('will close on completion', 'http_client')
+                self.close_on_completion = True
 
     def handleEndHeaders(self):
         if self.http_status == http.NOT_MODIFIED:
             log.debug("NOT_MODIFIED " + str(self.status_code),'http_client')
             self.parent.up_to_date()
+        elif self.http_status == http.NOT_FOUND:
+            log.debug("Not found on backend server",'http_client')
+            self.fetcher.file_not_found()
 
     def rawDataReceived(self, data):
-        self.parent.data_received(data)
+        if self.http_status == http.OK:
+            self.fetcher.data_received(data)
+            #log.debug("Recieved: %s expected: %s" % (self.fetcher.len_received, self.server_size),'http_client')
+            if self.fetcher.len_received >= self.server_size:
+                if self.fetcher.len_received == self.server_size:
+                    log.debug("File transfer complete",'http_client')
+                    self.fetcher.download_complete()
+                    if self.close_on_completion:
+                        self.transport.loseConnection()
+                else:
+                    log.err("File transfer overrun! Expected size:%s Received size:%s" % 
+                            (self.server_size, self.fetcher.len_received), 'http_client')
+                    self.parent.download_failure(http.INTERNAL_SERVER_ERROR, "Data overrun")
 
 #     def handleResponse(self, buffer):
 #         if self.length == 0:
@@ -327,16 +385,18 @@
     def __init__(self, backendServer):
         self.backendServer = backendServer
         self.isConnected = False
+        self.instance = None
 
     def connect(self):
-        self.connectCallback = deferred()
-        if not self.proxy.host:
-            host = request.backendServer.host
-            port = request.backendServer.port
+        self.connectCallback = defer.Deferred()
+        self.proxy = self.backendServer.backend.config.http_proxy
+        if self.proxy is None:
+            host = self.backendServer.host
+            port = self.backendServer.port
         else:
             host = self.proxy.host
             port = self.proxy.port
-        reactor.connectTCP(host, port, self, request.backend.config.timeout)
+        reactor.connectTCP(host, port, self, self.backendServer.backend.config.timeout)
         return self.connectCallback
 
     def buildProtocol(self, addr):
@@ -346,12 +406,14 @@
         "Connection was made to HTTP backend (callback from HTTP client)"
         self.connection = connection
         self.isConnected = True
-        self.connectCallback.runcallbacks()
+        self.connectCallback.callback(None)
 
     def clientConnectionFailed(self, connector, reason):
         self.instance.connectionFailed(reason)
     def clientConnectionLost(self, connector, reason):
-        log.debug("XXX clientConnectionLost", "http-client")
+        log.debug("clientConnectionLost", "http-client")
+        if self.connection is not None and self.connection.fetcher is not None:
+            self.connection.fetcher.connection_closed()
 
     def download(self, fetcher, uri, mtime):
         """
@@ -386,7 +448,7 @@
         """
         Establish connection to ftp server specified by backendServer
         """
-        self.connectCallback = deferred()
+        self.connectCallback = defer.deferred()
         if not self.proxy.host:
             host = self.backendServer.host
             port = self.backendServer.port
@@ -891,34 +953,42 @@
     """
     This class manages a list of files to download and schedules downloads
     """
-    queuedFiles = []
-    activeFile = None
-    fetcher = None
+
+    def __init__(self):
+        self.queue = [] # List of cacheEntry classes waiting
+        self.activeFile = None
+        self.fetcher = None
 
     def addFile(self, cacheEntry):
         """
         Add a file to the queue and start downloading if necessary
         """
-        self.queuedFiles.append(cacheEntry)
+        self.queue.append(cacheEntry)
         if self.activeFile is None:
             self.startNextDownload()
 
     def startNextDownload(self):
-        activeFile = queuedFiles[0]
-        queuedFiles = queuedFiles[1:]
-
-        if self.fetcher is not None:
-            if self.fetcher.Backend != self.activeFile.Backend:
-                self.fetcher.closeConnection()
+        if len(self.queue)>0:
+            log.debug("start next download", 'DownloadQueue')
+            self.activeFile = self.queue[0]
+            self.queue = self.queue[1:]
+    
+            if self.fetcher is not None:
+                if self.fetcher.backendServer != self.activeFile.backend:
+                    self.fetcher.disconnect()
+                    self.fetcher = Fetcher()
+            else:
                 self.fetcher = Fetcher()
+            d = self.fetcher.start(self.activeFile)
+            d.addCallback(self.downloadFinished)
         else:
-            self.fetcher = Fetcher()
-        deferred = self.fetcher.start(activeFile)
-        deferred.addCallback(self.downloadComplete)
-        deferred.addErrback(self.downloadFailed)
-
-    def downloadCopmlete(self):
-        startNextDownload(self)
-    def downloadFailed(self):
-        pass # TODO
+            log.debug("download queue is empty", 'DownloadQueue')
 
+    def downloadFinished(self, result):
+        success, messaage = result
+        if success:
+            log.debug("download complete", 'DownloadQueue')
+        else:
+            log.debug("download failed: %s" % (message), 'DownloadQueue')
+        self.activeFile = None
+        self.startNextDownload()

Modified: people/halls/rework/apt_proxy/misc.py
==============================================================================
--- people/halls/rework/apt_proxy/misc.py	(original)
+++ people/halls/rework/apt_proxy/misc.py	Thu Feb 16 22:00:16 2006
@@ -113,6 +113,7 @@
     def __init__(self, factory, timer):
         self.timer = timer
         self.factory = factory
+        self.callback = None
     def start(self):
         """
         Starts the Recycler if it is not working, it will use
@@ -129,8 +130,12 @@
             for backend in self.factory.backends.values():
                  self.pending.append(backend.base)
             self.stack = []
-            reactor.callLater(self.timer, self.process)
+            self.callback = reactor.callLater(self.timer, self.process)
             self.working = 1
+    def stop(self):
+        if self.callback is not None:
+            self.callback.cancel()
+            self.callback = None
     def pop(self):
         if self.stack:
             (self.cur_dir, self.cur_uri, self.pending) = self.stack.pop()
@@ -144,6 +149,7 @@
         """
         Process the next entry, is called automatically via callLater.
         """
+        self.callback = None
         entry = self.pending.pop()
         uri  = os.path.join(self.cur_uri, entry)
         path = os.path.join(self.cur_dir, entry)
@@ -171,7 +177,7 @@
         if not self.pending:
             self.pop()
         if self.working:
-            reactor.callLater(self.timer, self.process)
+            self.callback = reactor.callLater(self.timer, self.process)
 
 if __name__ == '__main__':
     #Just for testing purposes.

Modified: people/halls/rework/apt_proxy/packages.py
==============================================================================
--- people/halls/rework/apt_proxy/packages.py	(original)
+++ people/halls/rework/apt_proxy/packages.py	Thu Feb 16 22:00:16 2006
@@ -67,17 +67,18 @@
         except:
             pass
 
-    def update_file(self, uri):
+    def update_file(self, entry):
         """
         Called from apt_proxy.py when files get updated so we can update our
         fake lists/ directory and sources.list.
 
-        @param uri Filename of cached file (without cache_dir prefix)
+        @param entry CacheEntry for cached file
         """
-        if basename(uri)=="Packages" or basename(uri)=="Release":
-            log.msg("REGISTERING PACKAGE:"+uri,'apt_pkg',4)
-            stat_result = os.stat(self.cache_dir+'/'+uri)
-            self.packages[uri] = stat_result
+        if entry.filename=="Packages" or entry.filename=="Release":
+            log.msg("Registering package file: "+entry.cache_path, 'apt_pkg', 4)
+            stat_result = os.stat(entry.file_path)
+            self.packages[entry.cache_path] = stat_result
+
     def get_files(self):
         """
         Get list of files in database.  Each file will be checked that it exists
@@ -85,7 +86,7 @@
         files = self.packages.keys()
         #print self.packages.keys()
         for f in files:
-            if not os.path.exists(self.cache_dir + '/' + f):
+            if not os.path.exists(self.cache_dir + os.sep + f):
                 log.debug("File in packages database has been deleted: "+f, 'apt_pkg')
                 del files[files.index(f)]
                 del self.packages[f]
@@ -170,11 +171,11 @@
         #print "start aptPackages [%s] %s " % (self.backendName, self.cache_dir)
         del self.packages
         #print "Deleted aptPackages [%s] %s " % (self.backendName, self.cache_dir)
-    def file_updated(self, uri):
+    def file_updated(self, entry):
         """
         A file in the backend has changed.  If this affects us, unload our apt database
         """
-        if self.packages.update_file(uri):
+        if self.packages.update_file(entry):
             self.unload()
 
     def __save_stdout(self):
@@ -294,21 +295,21 @@
 
 
 def cleanup(factory):
-    for backend in factory.backends:
+    for backend in factory.backends.values():
         backend.get_packages_db().cleanup()
 
-def get_mirror_path(factory, file):
-    """
-    Look for the path of 'file' in all backends.
-    """
-    info = AptDpkgInfo(file)
-    paths = []
-    for backend in factory.backends:
-        path = backend.get_packages_db().get_mirror_path(info['Package'],
-                                                info['Version'])
-        if path:
-            paths.append('/'+backend.base+'/'+path)
-    return paths
+#def get_mirror_path(factory, file):
+    #"""
+    #Look for the path of 'file' in all backends.
+    #"""
+    #info = AptDpkgInfo(file)
+    #paths = []
+    #for backend in factory.backends:
+        #path = backend.get_packages_db().get_mirror_path(info['Package'],
+                                                #info['Version'])
+        #if path:
+            #paths.append('/'+backend.base+'/'+path)
+    #return paths
 
 def get_mirror_versions(factory, package):
     """

Modified: people/halls/rework/apt_proxy/test/test_apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_apt_proxy.py	(original)
+++ people/halls/rework/apt_proxy/test/test_apt_proxy.py	Thu Feb 16 22:00:16 2006
@@ -68,7 +68,7 @@
 """
 
 class apTestHelper(unittest.TestCase):
-    default_config = "[DEFAULT]\ndebug=all:9 apt:0\n" # Config string to use
+    default_config = "[DEFAULT]\ndebug=all:9 apt:0 memleak:0\n cleanup_freq=off\n" # Config string to use
     def setUp(self):
         self.cache_dir = tempfile.mkdtemp('.aptproxy')
         self.config = self.default_config.replace('[DEFAULT]','[DEFAULT]\ncache_dir=' + self.cache_dir)

Modified: people/halls/rework/apt_proxy/test/test_cache.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_cache.py	(original)
+++ people/halls/rework/apt_proxy/test/test_cache.py	Thu Feb 16 22:00:16 2006
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2005 Chris Halls <halls at debian.org>
+# Copyright (C) 2006 Chris Halls <halls at debian.org>
 #
 # This library is free software; you can redistribute it and/or
 # modify it under the terms of version 2.1 of the GNU Lesser General Public
@@ -14,79 +14,83 @@
 # License along with this library; if not, write to the Free Software
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
-"""
-Test suite for apt-proxy cache management cache.py
-"""
+"""Unit test for cache.py"""
 
-import shutil, os.path
+import os
+from twisted.trial import unittest
 from StringIO import StringIO
 
-from twisted.internet import reactor, error
-
-from test_apt_proxy import apTestHelper
-from apt_proxy.cache import FileVerifier
 from apt_proxy.apt_proxy_conf import apConfig
-from test_packages import get_test_deb_name
+from apt_proxy.test.test_apt_proxy import apTestHelper
+from apt_proxy.cache import CacheEntry
+from apt_proxy.apt_proxy import Factory
+
+class DummyRequest:
+    def finishCode(self, code, reason):
+        pass
 
-class FileVerifierTest(apTestHelper):
-    """
-    Create a test AptPackages using test data
-    """
+class FactoryVersionFuncsTest(apTestHelper):
     def setUp(self):
+        """
+        Make a configuration with a single backend
+        [files]
+        backends=file:///<path to test packages directory>
+        """
         apTestHelper.setUp(self)
-        self.c = apConfig(StringIO(self.config))
-        self.pkgdir=self.cache_dir+'/packages'
-        self.verifydir=self.cache_dir+'/verify'
-        shutil.copytree('../test_data/packages', self.pkgdir)
-        shutil.copytree('../test_data/verify', self.verifydir)
-        self.done = False
-        self.failure = None
-        self.timeout = reactor.callLater(5, self.failed, "test timeout")
-    
+        packagedir = self.cache_dir+"/packages"
+        filedir = os.path.normpath(os.getcwd()+"/../test_data/packages")
+        config = (self.config +
+                  "[files]\n" +
+                  "backends=file://" + filedir)
+        #print "config: " + config
+        self.c = apConfig(StringIO(config))
+        self.factory = Factory(self.c)
+        self.factory.createBackends()
     def tearDown(self):
-        try:
-            self.timeout.cancel()
-        except (error.AlreadyCancelled, error.AlreadyCalled):
-            pass
+        del(self.factory)
         apTestHelper.tearDown(self)
-    def passed(self, arg):
-        self.done = True
-    def failed(self, reason):
-        self.failure = reason
-        self.done = True
-    def runtest(self, verifier):
-        "Add callbacks and wait until result is received"
-        verifier.verify().addCallback(self.passed).addErrback(self.failed)
-        while not self.done:
-            reactor.iterate(0.1)
-        # we return when the test finishes or times out
-    def testVerifyUnknown(self):
-        v = FileVerifier(self.verifydir+'/nonempty-unknown-file', self.c)
-        self.runtest(v)
-        self.assertEquals(self.failure, None)
-    def testVerifyEmptyUnknown(self):
-        v = FileVerifier(self.verifydir+'/empty-unknown-file', self.c)
-        self.runtest(v)
-        self.assertNotEquals(self.failure, None)
-    def testVerifyGzip(self):
-        v = FileVerifier(self.verifydir+'/gzip.gz', self.c)
-        self.runtest(v)
-        self.assertEquals(self.failure, None)
-    def testVerifyInvalidGzip(self):
-        v = FileVerifier(self.verifydir+'/invalid-gzip.gz', self.c)
-        self.runtest(v)
-        self.assertNotEquals(self.failure, None)
-    def testVerifyBz(self):
-        v = FileVerifier(self.pkgdir+'/Packages.bz2', self.c)
-        self.runtest(v)
-        self.assertEquals(self.failure, None)
-    def testVerifyPackages(self):
-        v = FileVerifier(self.pkgdir+'/Packages', self.c)
-        self.runtest(v)
-        self.assertEquals(self.failure, None)
-    def testVerifyDeb(self):
-        debname = os.path.basename(get_test_deb_name())
-        v = FileVerifier(self.pkgdir+'/'+ debname, self.c)
-        self.runtest(v)
-        self.assertEquals(self.failure, None)
-
+    def testCacheEntryInit(self):
+        backend = self.factory.getBackend("files")
+        entry = CacheEntry(backend, "testdir/testfile.deb")
+        self.assertEquals(entry.backend, backend, "CacheEntry did not initialise backend")
+        self.assertEquals(entry.factory, self.factory, "CacheEntry did not initialise factory")
+        self.assertEquals(entry.path, "testdir/testfile.deb")
+        self.assertEquals(entry.file_path, self.cache_dir+"/files/testdir/testfile.deb")
+        self.assertEquals(entry.filedir, self.cache_dir+"/files/testdir")
+        self.assertEquals(entry.filetype.contype, "application/dpkg")
+        self.assertEquals(entry.filename, "testfile.deb")
+        self.assertEquals(entry.filebase, "testfile")
+        self.assertEquals(entry.fileext, ".deb")
+        self.assertEquals(len(entry.requests), 0)
+    def testCacheEntryAddClient(self):
+        backend = self.factory.getBackend("files")
+        entry = CacheEntry(backend, "testdir/testfile.deb")
+        r = DummyRequest()
+        entry.add_request(r)
+        self.assertEquals(len(entry.requests), 1)
+    def testCacheEntryAddDuplicate(self):
+        backend = self.factory.getBackend("files")
+        entry = CacheEntry(backend, "testdir/testfile.deb")
+        r = DummyRequest()
+        entry.add_request(r)
+        self.assertRaises(RuntimeError, entry.add_request, r)
+    def testCacheEntryRemove(self):
+        backend = self.factory.getBackend("files")
+        entry = CacheEntry(backend, "testdir/testfile.deb")
+        r = DummyRequest()
+        entry.add_request(r)
+        entry.remove_request(r)
+        self.assertEquals(len(entry.requests), 0)
+    def testCacheEntryStartDownload(self):
+        def start_download(entry):
+            # This test function replaces the normal
+            # Backend.start_download so we can see that
+            # it was called without starting the download
+            entry.entry_download_triggered = 1
+        backend = self.factory.getBackend("files")
+        backend.start_download = start_download
+        entry = CacheEntry(backend, "testdir/testfile.deb")
+        r = DummyRequest()
+        entry.add_request(r)
+        # Check that our special function was called
+        self.assertEquals(entry.entry_download_triggered, 1)

Modified: people/halls/rework/apt_proxy/test/test_packages.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_packages.py	(original)
+++ people/halls/rework/apt_proxy/test/test_packages.py	Thu Feb 16 22:00:16 2006
@@ -16,7 +16,8 @@
 
 """Unit test for packages.py"""
 
-from apt_proxy.packages import AptPackages, PackageFileList, get_mirror_versions, AptDpkgInfo
+from apt_proxy import packages 
+#import AptPackages, PackageFileList, get_mirror_versions, AptDpkgInfo, cleanup
 from apt_proxy.apt_proxy import Factory
 from apt_proxy.test.test_apt_proxy import apTestHelper
 from apt_proxy.apt_proxy_conf import apConfig
@@ -24,10 +25,20 @@
 from twisted.trial import unittest
 import shutil, os, re, glob
 
+class DummyCacheEntry:
+    """
+    Class that provides basic CacheEntry information
+    """
+    def __init__(self, cache_dir, backend, file):
+        self.filename = os.path.basename(file)
+        self.path = file
+        self.cache_path = backend + os.sep + file
+        self.file_path = cache_dir + os.sep + self.cache_path
+
 class PackageFileListTest(apTestHelper):
     def setUp(self):
         apTestHelper.setUp(self)
-        self.f = PackageFileList('test', self.cache_dir)
+        self.f = packages.PackageFileList('test', self.cache_dir)
     def tearDown(self):
         del(self.f) # Needed otherwise we'll get a database exception when cache dir is removed
         apTestHelper.tearDown(self)
@@ -35,9 +46,9 @@
         self.assertEqual(self.f.get_files(),[])
     def testAddPackages(self):
         shutil.copytree('../test_data/packages', self.cache_dir+'/packages')
-        self.f.update_file('packages/Packages')
+        self.f.update_file(DummyCacheEntry(self.cache_dir, 'packages','Packages'))
         self.assertEqual(self.f.get_files(),['packages/Packages'])
-        self.f.update_file('packages/Packages.gz') # This file should not be added
+        self.f.update_file(DummyCacheEntry(self.cache_dir, 'packages','Packages.gz')) # This file should not be added
         self.assertNotIn('packages/Packages.gz', self.f.get_files())
 
         # Remove packages file and check that it is removed from database
@@ -47,7 +58,7 @@
 class PackagesCacheTest(apTestHelper):
     def setUp(self):
         apTestHelper.setUp(self)
-        self.p = AptPackages('test', self.cache_dir)
+        self.p = packages.AptPackages('test', self.cache_dir)
     def tearDown(self):
         del(self.p)
         apTestHelper.tearDown(self)
@@ -56,7 +67,7 @@
     def testReLoadEmpty(self):
         self.failIfEqual(self.p.load(),True)
         del(self.p)
-        self.p = AptPackages('test', self.cache_dir)
+        self.p = packages.AptPackages('test', self.cache_dir)
 
 class PackagesTestHelper(apTestHelper):
     """
@@ -64,9 +75,9 @@
     """
     def setUp(self):
         apTestHelper.setUp(self)
-        self.p = AptPackages('test1', self.cache_dir)
+        self.p = packages.AptPackages('test1', self.cache_dir)
         shutil.copytree('../test_data/packages', self.cache_dir+'/packages')
-        self.p.file_updated('packages/Packages')
+        self.p.file_updated(DummyCacheEntry(self.cache_dir, 'packages','Packages'))
         #print "Cache dir:", self.cache_dir, '\n'
     def tearDown(self):
         del(self.p)
@@ -87,7 +98,7 @@
 
 def get_test_deb_info():
     "Return an AptDpkgInfo for our test deb"
-    return AptDpkgInfo(get_test_deb_name())
+    return packages.AptDpkgInfo(get_test_deb_name())
 
 class AptDpkgInfoTest(unittest.TestCase):
     def testGetInfo(self):
@@ -109,19 +120,23 @@
         self.factory = Factory(self.c)
         self.factory.createBackends()
         # Register test package files in db
-        self.factory.getBackend('packages').get_packages_db().file_updated('packages/Packages')
+        entry = DummyCacheEntry(self.cache_dir,'packages','Packages')
+        self.factory.getBackend('packages').get_packages_db().file_updated(entry)
         # Get version of apt used for testing
         self.aptinfo = get_test_deb_info()
         #print self.cache_dir
     def tearDown(self):
+        packages.cleanup(self.factory)
         del(self.factory)
         apTestHelper.tearDown(self)
 
     def testGetAllMirrorVersions(self):
-        aptversions = get_mirror_versions(self.factory, 'apt')
+        aptversions = packages.get_mirror_versions(self.factory, 'apt')
         self.assertEquals(self.aptinfo['Version'], aptversions[0][0])
         testdeb_name = get_test_deb_name().replace('../test_data/','') # strip test data directory
         self.assertEquals(testdeb_name, aptversions[0][1])
-        self.assertEquals([], get_mirror_versions(self.factory, 'unknown'))
+        self.assertEquals([], packages.get_mirror_versions(self.factory, 'unknown'))
     def testGetEmptyMirrorVersions(self):
-        self.assertEquals([], get_mirror_versions(self.factory, 'unknown'))
+        self.assertEquals([], packages.get_mirror_versions(self.factory, 'unknown'))
+    #def testGetMirrorPath(self):
+        #self.assertEquals([], packages.get_mirror_path(self.factory, 'unknown'))



More information about the apt-proxy-devel mailing list