[apt-proxy-devel] r634 - in trunk: apt_proxy apt_proxy/test

Chris Halls halls at costa.debian.org
Tue Aug 22 10:09:03 UTC 2006


Author: halls
Date: Tue Aug 22 10:08:57 2006
New Revision: 634

Added:
   trunk/apt_proxy/clients.py
Modified:
   trunk/apt_proxy/apt_proxy.py
   trunk/apt_proxy/cache.py
   trunk/apt_proxy/test/test_apt_proxy.py
   trunk/apt_proxy/test/test_requests.py
   trunk/debian/changelog

Log:
* Uncompress Packages.gz and Packages.bz2 on the fly


Modified: trunk/apt_proxy/apt_proxy.py
==============================================================================
--- trunk/apt_proxy/apt_proxy.py	(original)
+++ trunk/apt_proxy/apt_proxy.py	Tue Aug 22 10:08:57 2006
@@ -26,6 +26,7 @@
 import fetchers, cache, packages
 from misc import log, MirrorRecycler
 import twisted_compat
+from clients import HttpRequestClient
 
 #from posixfile import SEEK_SET, SEEK_CUR, SEEK_END
 #since posixfile is considered obsolete I'll define the SEEK_* constants
@@ -207,147 +208,6 @@
         return ('(' + self.backend.base + ') ' + self.scheme + '://' +
                self.host + ':' + str(self.port))
 
-class Request(http.Request):
-    """
-    Each new request from connected clients generates a new instance of this
-    class, and process() is called.
-    """
-    if_modified_since = None
-    local_size = None
-    serve_if_cached = 1
-    apFetcher = None
-    uriIndex = 0             # Index of backend URI
-    backend = None           # Backend for this request
-    backendServer = None     # Current server to be tried
-    cacheEntry = None        # Cache entry for file requested
-    
-    def __init__(self, channel, queued):
-        log.debug("New Request, queued=%s" % (queued),'Request');
-        self.factory=channel.factory
-        http.Request.__init__(self, channel, queued)
-
-    def process(self):
-        """
-        Each new request begins processing here
-        """
-        self.uri = self.clean_path(self.uri)
-
-        if_modified_since = self.getHeader('if-modified-since')
-        if if_modified_since != None:
-            self.if_modified_since = http.stringToDatetime(
-                    if_modified_since)
-
-        if self.uri[0] != '/':
-            log.debug("Request must include at least one '/'")
-            self.finishCode(http.FORBIDDEN, "Request must include at least one '/'")
-            return
-
-        backendName = self.uri[1:].split('/')[0]
-        log.debug("Request: %s %s backend=%s uri=%s"
-                    % (self.method, self.uri, backendName, self.uri),'Request')
-
-        if self.method != 'GET':
-            #we currently only support GET
-            log.debug("abort - method not implemented", 'Request')
-            self.finishCode(http.NOT_IMPLEMENTED)
-            return
-
-        if re.search('/\.\./', self.uri):
-            log.debug("/../ in simplified uri ("+self.uri+")", 'Request')
-            self.finishCode(http.FORBIDDEN)
-            return
-
-        self.backend = self.factory.getBackend(backendName)
-        if self.backend is None:
-            self.finishCode(http.NOT_FOUND, "NON-EXISTENT BACKEND")
-            return None
-
-        log.debug("backend: %s %s" % (self.backend.base, self.backend.uris))
-
-        backend_path = self.uri.split('/',2)[2]
-        self.cacheEntry = self.backend.get_cache_entry(backend_path)
-
-        if not self.cacheEntry.filetype:
-            log.debug("abort - unknown extension for file %s" % (backend_path), 'Request')
-            self.finishCode(http.FORBIDDEN, 'File not found - unknown extension')
-            return
-
-        self.setHeader('content-type', self.cacheEntry.filetype.contype)
-
-        if os.path.isdir(self.cacheEntry.file_path):
-            log.debug("abort - Directory listing not allowed", 'Request')
-            self.finishCode(http.FORBIDDEN, 'Directory listing not permitted')
-            return
-
-        self.cacheEntry.add_request(self)
-
-    def clean_path(self, uri):
-        # Clean up URL given
-        scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri)
-        return os.path.normpath(path)
-
-    def not_modified(self):
-        """
-        File is not modified - send http hit
-        """
-        self.setHeader("content-length", 0)
-        self.finishCode(http.NOT_MODIFIED, 'File is up to date')
-
-    def start_streaming(self, size, mtime):
-        """
-        Prepare client to stream file
-        Return false if streaming is not necessary (i.e. cache hit)
-        """
-        if self.if_modified_since is None or self.if_modified_since < mtime:
-            log.debug("start_streaming size=%s mtime=%s if_modified_since=%s" % (size, mtime, self.if_modified_since) , 'Request')
-            if mtime is not None:
-                self.setHeader('last-modified', http.datetimeToString(mtime))
-            if size is not None:
-                self.setHeader('content-length', size)
-            self.setResponseCode(http.OK, 'Streaming file')
-            return True
-        else:
-            log.debug("file not modified: mtime=%s if_modified_since=%s" % (mtime, self.if_modified_since) , 'Request')
-            self.not_modified()
-            return False
-
-    def finishCode(self, responseCode, message=None):
-        "Finish the request with a status code and no streamed data"
-        log.debug("finishCode: %s, %s" % (responseCode, message), 'Request')
-        self.setResponseCode(responseCode, message)
-        self.write("")
-        self.finish()
-
-    def finish(self):
-        "Finish request after streaming"
-        log.debug("finish. Queued: %s" % (self.queued) , 'Request')
-        http.Request.finish(self)
-
-        if self.cacheEntry:
-            self.cacheEntry.remove_request(self)
-            self.cacheEntry = None
-
-    def connectionLost(self, reason=None):
-        """
-        The connection with the client was lost, remove this request from its
-        Fetcher.
-        """
-        log.debug("connectionLost" , 'Request')
-        if self.cacheEntry:
-            self.cacheEntry.remove_request(self)
-        #self.finish()
-
-    def getFileno(self):
-        """
-        Get identifier which is unique per apt client
-        """
-        try:
-            fileno = self.channel.transport.fileno()
-        except:
-            fileno = -1
-            log.msg("could not get transport's file descriptor", 'Request')
-        return fileno
-
 class Channel(http.HTTPChannel):
     """
     This class encapsulates a channel (an HTTP socket connection with a single
@@ -355,7 +215,7 @@
 
     Each incoming request is passed to a new Request instance.
     """
-    requestFactory = Request
+    requestFactory = HttpRequestClient
     log_headers = None
 
     def headerReceived(self, line):

Modified: trunk/apt_proxy/cache.py
==============================================================================
--- trunk/apt_proxy/cache.py	(original)
+++ trunk/apt_proxy/cache.py	Tue Aug 22 10:08:57 2006
@@ -28,6 +28,8 @@
 import os, re, stat, time, sys
 from misc import log
 
+import clients
+
 class CacheEntry:
     """
     This class manages operations on a file in the cache.  Each physical
@@ -48,6 +50,20 @@
 
     bytesDownloaded = 0
 
+    def stateToString(self, state=None):
+        if state is None:
+            state = self.state
+        for statenum, name in [
+            (self.STATE_NEW, 'New'),
+            (self.STATE_CONNECTING, 'Connecting'),
+            (self.STATE_DOWNLOAD, 'Download'),
+            (self.STATE_SENDFILE, 'Sendfile'),
+            (self.STATE_SENT, 'Sent'),
+            (self.STATE_FAILED, 'Failed')]:
+            if statenum == state:
+                return name
+        return 'Unknown'
+         
     def __init__(self, backend, path):
         """
         Create a new cache entry
@@ -57,6 +73,7 @@
         self.backend = backend
         self.factory = backend.factory
         self.requests = [] # Active client requests for this cache entry
+        self.requested_by_client = False # Did a real client request this file?
         self.streamfile = None
         self.state = self.STATE_NEW
 
@@ -85,6 +102,10 @@
 
         self.fetcher = None
 
+        if self.filetype.decompressor is not None:
+            # File needs to be decompressed
+            self.filetype.decompressor(self)
+            
     def add_request(self, request):
         """
         A new request has been received for this file
@@ -93,14 +114,17 @@
             raise RuntimeError, \
                   'this request is already assigned to this CacheEntry'
         self.requests.append(request)
+        log.debug('Add request: %s total clients: %s state:%s' % (request, len(self.requests), self.stateToString()), 'CacheEntry')
+        if hasattr(request, 'is_real_client'):
+            log.msg("this is a real request:" + self.file_path, "CacheEntry")
+            self.requested_by_client = True
         if(len(self.requests)==1):
             # First request
             self.get()
         else:
             # Subsequent request - client must be brought up to date
-            if self.state == self.STATE_DOWNLOAD:
-                raise RuntimeError, \
-                      'TODO: multiple clients not implemented yet'
+            if self.state in (self.STATE_DOWNLOAD, self.STATE_SENT, self.STATE_SENDFILE):
+                self.send_cached_file(request=request)
 
     def remove_request(self,request):
         """
@@ -115,7 +139,7 @@
             return
 
         log.debug("Last request removed",'cacheEntry')
-        self.backend.entry_done(self)
+        self.requests_done()
 
         # TODO - fixme
         #if (self.factory.config.complete_clientless_downloads == False
@@ -132,27 +156,21 @@
             #self.streamfile = None
             #os.remove(name)
 
-    def start_request_stream(self, request):
+    def requests_done(self):
         """
-        Prepare a request for streaming
+        All requests have been removed
         """
-        log.msg("start_request_stream:" + self.file_path, "CacheEntry")
-        request.startStreaming(self.size, self.mtime)
-
-        if self.streamfile.size() != 0:
-            request.write(self.streamfile.read_from(start=0)) # TODO - is this efficient?
-
-
+        self.backend.entry_done(self)
+        
     def get(self):
         """
         Update current version of file in cache
         """
         if self.state == self.STATE_NEW:
-            if os.path.exists(self.file_path):
-                self.stat_file()
-                if self.check_age():
-                    self.verify()
-                    return
+            self.stat_file()
+            if self.file_size is not None and self.check_age():
+                self.verify()
+                return
 
         self.start_download()
 
@@ -176,13 +194,17 @@
         """
         Read file age
         """
-        stat_tuple = os.stat(self.file_path)
-
-        self.file_mtime = stat_tuple[stat.ST_MTIME]
-        self.file_size = stat_tuple[stat.ST_SIZE]
-        log.debug("Modification time:" + 
-                  time.asctime(time.localtime(self.file_mtime)), 
-                  "CacheEntry")
+        if os.path.exists(self.file_path):
+            stat_tuple = os.stat(self.file_path)
+    
+            self.file_mtime = stat_tuple[stat.ST_MTIME]
+            self.file_size = stat_tuple[stat.ST_SIZE]
+            log.debug("Modification time:" +
+                    time.asctime(time.localtime(self.file_mtime)),
+                    "CacheEntry")
+        else:
+            self.file_mtime = None
+            self.file_size = None
 
     def check_age(self):
         """
@@ -215,13 +237,13 @@
             log.debug("file is ok: "+self.file_path, 'CacheEntry')
             return True
 
-    def send_cached_file(self, unused=None):
+    def send_cached_file(self, unused=None, request=None):
         """
         File is up to date - send complete file from cache to clients
         """
         if self.file_mtime is not None:
             log.msg("sending file from cache:" + self.file_path, "CacheEntry")
-            self.transfer_file(self.file_path)
+            self.transfer_file(self.file_path, request=request)
         else:
             log.msg("sending hits to all clients (%s)" % (self.file_path), "CacheEntry")
             for req in self.requests:
@@ -233,10 +255,17 @@
         """
         self.file_sent()
 
-    def transfer_file(self, filename):
+    def transfer_file(self, filename,request=None):
         """
         Send given file to clients
         """
+        if request is None:
+            # Start all requests
+            requests = self.requests
+        else:
+            # Start one request
+            requests = [request]
+            
         log.msg("transfer_file:" + filename, "CacheEntry")
         try:
             stat_tuple = os.stat(filename)
@@ -360,7 +389,7 @@
         Callback from Fetcher
         File streaming is complete
         """
-        log.msg("download_data_end:" + self.file_path, "CacheEntry")
+        log.msg("download_data_end: %s (%s clients)" % (self.file_path, len(self.requests)), "CacheEntry")
         self.state = self.STATE_SENT
 
         if self.streamfile is not None:
@@ -371,12 +400,15 @@
             if self.file_mtime != None:
                 os.utime(self.file_path, (time.time(), self.file_mtime))
             else:
-                log.debug("no local time: "+self.file_path,'Fetcher')
+                log.debug("no local time: "+self.file_path,'CacheEntry')
                 os.utime(self.file_path, (time.time(), 0))
 
         for req in self.requests:
-            req.finish()
-
+            log.debug('Finish ' + str(req), 'CacheEntry')
+            try:
+                req.finish()
+            except e:
+                log.err('Error finishing request %s - %s' % (req, e), 'CacheEntry')
         self.file_sent()
 
     def download_failure(self, http_code, reason):
@@ -406,7 +438,8 @@
         self.state = self.STATE_SENT
         self.fetcher = None
         self.backend.file_served(self)
-        self.factory.file_served(self.cache_path)
+        if self.requested_by_client:
+            self.factory.file_served(self.cache_path)
         self.factory.update_times[self.cache_path] = time.time()
         self.state = self.STATE_NEW
 
@@ -430,11 +463,12 @@
     .deb and .dsc are never changed once they are created.
     
     """
-    def __init__ (self, regex, contype, mutable):
+    def __init__ (self, regex, contype, mutable, decompressor=None):
         self.name = regex
         self.regex = re.compile(regex)
         self.contype = contype
         self.mutable = mutable
+        self.decompressor = decompressor # Class to use to decompress file
 
     def check (self, name):
         "Returns true if name is of this filetype"
@@ -459,6 +493,8 @@
     FileType(r"\.txt$", "text/plain", 1),
     FileType(r"\.html$", "text/html", 1),
 
+    FileType(r"(?:^|/)Packages.gz$", "text/plain", 1, decompressor=clients.GzUncompressClient),
+    FileType(r"(?:^|/)Packages.bz2$", "text/plain", 1, decompressor=clients.Bz2UncompressClient),
     FileType(r"(?:^|/)(?:Packages|Release(?:\.gpg)?|Sources|(?:Contents|Translation)-[a-z0-9]+)"
                         r"(?:\.(?:gz|bz2))?$",
              "text/plain", 1),

Added: trunk/apt_proxy/clients.py
==============================================================================
--- (empty file)
+++ trunk/apt_proxy/clients.py	Tue Aug 22 10:08:57 2006
@@ -0,0 +1,270 @@
+#
+# Copyright (C) 2006 Chris Halls <halls at debian.org>
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of version 2.1 of the GNU Lesser General Public
+# License as published by the Free Software Foundation.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+"""
+A client is created each time a file should be downloaded/streamed.
+clients can be generated by:
+
+apt clients (HttpRequestClient)
+files that should be uncompressed such as Packages.bz2 (UncompressClient)
+
+CacheEntry objects will notify subscribed Clients of changes in state
+"""
+
+import os, re, urlparse, gzip, bz2
+from StringIO import StringIO
+
+from twisted.web import http
+from twisted.internet import reactor
+
+from misc import log
+
+class HttpRequestClient(http.Request):
+    """
+    Request generated from apt clients via http protocol
+
+    Each new request from connected clients generates a new instance of this
+    class, and process() is called.
+    """
+    if_modified_since = None # If-modified-since time requested by client
+    backend = None           # Backend for this request
+    backendServer = None     # Current server to be tried
+    cacheEntry = None        # Cache entry for file requested
+    is_real_client = True       # This class represents a real client, not a postprocessor
+    def __init__(self, channel, queued):
+        log.debug("New Request, queued=%s" % (queued),'HttpRequestClient');
+        self.factory=channel.factory
+        http.Request.__init__(self, channel, queued)
+
+    def process(self):
+        """
+        Each new request begins processing here
+        """
+        self.uri = self.clean_path(self.uri)
+
+        if_modified_since = self.getHeader('if-modified-since')
+        if if_modified_since != None:
+            self.if_modified_since = http.stringToDatetime(
+                    if_modified_since)
+
+        if self.uri[0] != '/':
+            log.debug("Request must include at least one '/'")
+            self.finishCode(http.FORBIDDEN, "Request must include at least one '/'")
+            return
+
+        backendName = self.uri[1:].split('/')[0]
+        log.debug("Request: %s %s backend=%s uri=%s"
+                    % (self.method, self.uri, backendName, self.uri),'HttpRequestClient')
+
+        if self.method != 'GET':
+            #we currently only support GET
+            log.debug("abort - method not implemented", 'HttpRequestClient')
+            self.finishCode(http.NOT_IMPLEMENTED)
+            return
+
+        if re.search('/\.\./', self.uri):
+            log.debug("/../ in simplified uri ("+self.uri+")", 'HttpRequestClient')
+            self.finishCode(http.FORBIDDEN)
+            return
+
+        self.backend = self.factory.getBackend(backendName)
+        if self.backend is None:
+            self.finishCode(http.NOT_FOUND, "NON-EXISTENT BACKEND")
+            return None
+
+        log.debug("backend: %s %s" % (self.backend.base, self.backend.uris), 'HttpRequestClient')
+
+        backend_path = self.uri.split('/',2)[2]
+        self.cacheEntry = self.backend.get_cache_entry(backend_path)
+
+        if not self.cacheEntry.filetype:
+            log.debug("abort - unknown extension for file %s" % (backend_path), 'HttpRequestClient')
+            self.finishCode(http.FORBIDDEN, 'File not found - unknown extension')
+            return
+
+        self.setHeader('content-type', self.cacheEntry.filetype.contype)
+
+        if os.path.isdir(self.cacheEntry.file_path):
+            log.debug("abort - Directory listing not allowed", 'HttpRequestClient')
+            self.finishCode(http.FORBIDDEN, 'Directory listing not permitted')
+            return
+
+        self.cacheEntry.add_request(self)
+
+    def clean_path(self, uri):
+        # Clean up URL given
+        scheme, netloc, path, params, query, fragment = urlparse.urlparse(uri)
+        return os.path.normpath(path)
+
+    def not_modified(self):
+        """
+        File is not modified - send http hit
+        """
+        self.setHeader("content-length", 0)
+        self.finishCode(http.NOT_MODIFIED, 'File is up to date')
+
+    def start_streaming(self, size, mtime):
+        """
+        Prepare client to stream file
+        Return false if streaming is not necessary (i.e. cache hit)
+        """
+        if self.if_modified_since is None or self.if_modified_since < mtime:
+            log.debug("start_streaming size=%s mtime=%s if_modified_since=%s" % (size, mtime, self.if_modified_since) , 'HttpRequestClient')
+            if mtime is not None:
+                self.setHeader('last-modified', http.datetimeToString(mtime))
+            if size is not None:
+                self.setHeader('content-length', size)
+            self.setResponseCode(http.OK, 'Streaming file')
+            return True
+        else:
+            log.debug("file not modified: mtime=%s if_modified_since=%s" % (mtime, self.if_modified_since) , 'HttpRequestClient')
+            self.not_modified()
+            return False
+
+    def finishCode(self, responseCode, message=None):
+        "Finish the request with a status code and no streamed data"
+        log.debug("finishCode: %s, %s" % (responseCode, message), 'HttpRequestClient')
+        self.setResponseCode(responseCode, message)
+        self.write("")
+        self.finish()
+
+    def finish(self):
+        "Finish request after streaming"
+        log.debug("finish. fileno:%s uri:%s" % (self.getFileno(), self.uri) , 'HttpRequestClient')
+        #import traceback
+        #traceback.print_stack()
+        http.Request.finish(self)
+
+        if self.cacheEntry:
+            reactor.callLater(0, self.cacheEntry.remove_request, self)
+            self.cacheEntry = None
+
+    def connectionLost(self, reason=None):
+        """
+        The connection with the client was lost, remove this request from its
+        Fetcher.
+        """
+        log.debug("connectionLost" , 'HttpRequestClient')
+        if self.cacheEntry:
+            reactor.callLater(0, self.cacheEntry.remove_request, self)
+            self.cacheEntry = None
+        #self.finish()
+
+    def getFileno(self):
+        """
+        Get identifier which is unique per apt client
+        """
+        try:
+            fileno = self.channel.transport.fileno()
+        except:
+            fileno = -1
+            log.msg("could not get transport's file descriptor", 'HttpRequestClient')
+        return fileno
+
+class UncompressClient:
+    """
+    Request generated from apt clients via http protocol
+
+    Each new request from connected clients generates a new instance of this
+    class, and process() is called.
+    """
+
+    logname = 'UncompressClient' # Name for log messages
+    if_modified_since = None
+        
+    class FilenameError(Exception):
+        def __init__(self, filename, msg):
+            self.filename = filename
+            self.msg = msg
+        def __str__(self):
+            return("Error in filename (%s): %s" % (self.filename, self.msg))
+
+    def __init__(self, compressedCacheEntry):
+        log.debug("New UncompressClient for %s" % (compressedCacheEntry),self.logname);
+        self.source = compressedCacheEntry
+        self.source.add_request(self)
+    
+    def get_dest_filename(self, path):
+        extlen = len(self.ext)
+        if len(path) < extlen:
+            raise self.FilenameError(path, "Filename is too short")
+        if path[-extlen:] != self.ext:
+            raise self.FilenameError(path, "Filename does not end in '%s'" % (self.ext))
+        return path[:-extlen]
+        
+    def not_modified(self):
+    	pass
+
+    def getFileno(self):
+	    return -1
+
+    def finishCode(self, responseCode, message=None):
+	    "Request aborted"
+	    self.finish()
+
+    def finish(self):
+        self.dest.download_data_end()
+    	if self.source:
+            self.source = None
+
+    def start_streaming(self, size, mtime):
+        backend_path = self.get_dest_filename(self.source.path)
+        self.dest = self.source.backend.get_cache_entry(backend_path)
+        self.dest.stat_file()
+        if self.dest.file_mtime is not None and mtime < self.dest.file_mtime:
+            log.debug("Skipping decompression of file (%s mtime=%s), destination file (%s, mtime=%s) is newer" 
+                                % (self.source.path, mtime, self.dest.filename, self.dest.file_mtime), self.logname)
+            self.finish()
+        else:
+            log.debug("Decompressing %s -> %s" % (self.source.path, self.dest.filename), self.logname)
+            self.dest.init_tempfile() # Open file for streaming
+            self.dest.download_started(None, size, mtime)
+    def write(self, data):
+        log.debug("Decompressing %s bytes" % (len(data)), self.logname)
+        uncompressed = self.uncompress(data)
+        self.dest.download_data_received(uncompressed)
+        
+class GzUncompressClient(UncompressClient):
+    """
+    Uncompress file using gzip (e.g. Packages.gz)
+    """
+    logname = 'GzUncompressClient'
+    ext = '.gz'
+    
+    def __init__(self, compressedCacheEntry):
+        self.string = StringIO()
+        self.unzipper = gzip.GzipFile(compresslevel=0, fileobj = self.string)
+        UncompressClient.__init__(self, compressedCacheEntry)
+    def uncompress(self, data):
+        buflen = len(data)
+        self.string.write(data)
+        self.string.seek(-buflen, 1)
+        buf = self.unzipper.read()
+        return buf
+        
+class Bz2UncompressClient(UncompressClient):
+    """
+    Uncompress file using bzip2 (e.g. Packages.bz2)
+    """
+    logname = 'Bz2UncompressClient'
+    ext = '.bz2'
+    
+    def __init__(self, compressedCacheEntry):
+        self.decompressor = bz2.BZ2Decompressor()
+        UncompressClient.__init__(self, compressedCacheEntry)
+    def uncompress(self, data):
+        return self.decompressor.decompress(data)
+        
\ No newline at end of file

Modified: trunk/apt_proxy/test/test_apt_proxy.py
==============================================================================
--- trunk/apt_proxy/test/test_apt_proxy.py	(original)
+++ trunk/apt_proxy/test/test_apt_proxy.py	Tue Aug 22 10:08:57 2006
@@ -23,7 +23,8 @@
 from StringIO import StringIO
 
 from apt_proxy.apt_proxy_conf import apConfig
-from apt_proxy.apt_proxy import Factory, Request
+from apt_proxy.apt_proxy import Factory
+from apt_proxy.clients import HttpRequestClient
 from apt_proxy.misc import log
 
 config1="""
@@ -323,7 +324,7 @@
         class DummyChannel:
             factory = None
             transport = None
-        self.req = Request(DummyChannel(), None)
+        self.req = HttpRequestClient(DummyChannel(), None)
     def testSimplifyPath(self):
         self.assertEquals(self.req.clean_path('/foo/bar/../baz'), '/foo/baz')
     def testRemoveHost(self):

Modified: trunk/apt_proxy/test/test_requests.py
==============================================================================
--- trunk/apt_proxy/test/test_requests.py	(original)
+++ trunk/apt_proxy/test/test_requests.py	Tue Aug 22 10:08:57 2006
@@ -68,7 +68,7 @@
             "Pass incoming connection to our request"
             return self.request
         def clientConnectionLost(self, connector, reason):
-            log.debug('Lost connection.  Reason:'+ str(reason))
+            log.debug('Lost connection.  Reason:'+ str(reason), 'requestFactory')
         def clientConnectionFailed(self, connector, reason):
             log.err('Connection failed. Reason:', reason, 'requestFactory')
             self.request.failed(reason)
@@ -178,6 +178,7 @@
                     (received_len, self.nextTest.expectedSize), 'uriRequester')
             raise self.SizeError(received_len, self.nextTest.expectedSize)
         else:
+            log.debug("test passed", 'uriRequester')
             self.passed()
 
     def passed(self):
@@ -307,13 +308,21 @@
         """
         return self.downloadFiles(file)
 
-    def getFilePaths(self, file):
+    def calcFilePaths(self, file):
         """
         Given a filename, generate real filename and request path
         """
         filename = '/' + self.backendName + file
         sourcepath = self.testfilesdir+file
         destpath = self.cache_dir + filename
+        return filename, sourcepath, destpath
+
+    def getFilePaths(self, file):
+        """
+        Given a filename, generate real filename and request path.
+        Then check that file does not exist
+        """
+        filename, sourcepath, destpath = self.calcFilePaths(file)
         # File should not be in cache
         self.assertRaises(OSError, os.stat, destpath)
         return filename, sourcepath, destpath
@@ -332,6 +341,7 @@
         def checkPath(x):
             # Check that files were really placed in cache
             for f in self.filepaths:
+                log.debug("downloadFiles checkPath: %s" % (f))
                 os.stat(f)
         d.addCallback(checkPath)
         return d
@@ -340,9 +350,23 @@
         return self.downloadFile().addCallback(self.PackagesFile2)
     def PackagesFile2(self, x):
         # Check access time datbase was updated
-        self.assertApproximates(self.factory.access_times[self.filename], time.time(), 6)
+        filename = self.calcFilePaths(self.packagesTestFile)[0]
+        self.assertApproximates(self.factory.access_times[filename], time.time(), 6)
     testPackagesFile.timeout = 2
 
+    def testPackagesGzFile(self):
+        return self.downloadFile(self.packagesTestFile+'.gz').addCallback(self.PackagesUncompressed)
+    testPackagesGzFile.timeout = 2
+    def PackagesUncompressed(self, x):
+        # Check that Packages file was registered
+        filename = self.calcFilePaths(self.packagesTestFile)[0][1:] # Remove leading '/' from path
+        backend = self.factory.getBackend(self.backendName)
+        self.assertEquals(backend.get_packages_db().packages.get_files(), [filename])
+
+    def testPackagesBz2File(self):
+        return self.downloadFile(self.packagesTestFile+'.bz2').addCallback(self.PackagesUncompressed)
+    testPackagesBz2File.timeout = 2
+
     def testNotModifiedGreater(self):
         "Check http not modified is sent for new file"
         d = self.downloadFile()
@@ -554,6 +578,7 @@
         BackendTestBase.setUp(self, uri)
     def tearDown(self):
         self.rsyncserver.stop()
+        reactor.iterate(0.1)
         BackendTestBase.tearDown(self)
     def testTempFile(self):
         "rysnc Tempfile is detected"

Modified: trunk/debian/changelog
==============================================================================
--- trunk/debian/changelog	(original)
+++ trunk/debian/changelog	Tue Aug 22 10:08:57 2006
@@ -1,3 +1,10 @@
+apt-proxy (1.9.36~svn) unstable; urgency=low
+
+  * Uncompress Packages.gz and Packages.bz2 on the fly, and
+    update databases from these files (Closes: #TODO)
+
+ -- Chris Halls <halls at debian.org>  Tue, 22 Aug 2006 11:07:26 +0100
+
 apt-proxy (1.9.35) unstable; urgency=low
 
   * http_proxy option:



More information about the apt-proxy-devel mailing list