[apt-proxy-devel] r647 - trunk/apt_proxy

Chris Halls halls at alioth.debian.org
Tue Apr 17 21:52:07 UTC 2007


Author: halls
Date: Tue Apr 17 21:52:07 2007
New Revision: 647

Modified:
   trunk/apt_proxy/fetchers.py
Log:
* Remove unused GzipFetcher
* Make log messages more consistent


Modified: trunk/apt_proxy/fetchers.py
==============================================================================
--- trunk/apt_proxy/fetchers.py	(original)
+++ trunk/apt_proxy/fetchers.py	Tue Apr 17 21:52:07 2007
@@ -334,7 +334,7 @@
     def download_complete(self):
         if self.finished: 
             return
-        log.debug("File transfer complete",'http_client')
+        log.debug("File transfer complete",'HttpFetcher')
         self.finished = True
         #if self.close_on_completion:
             #self.fetcher.disconnect()
@@ -345,29 +345,29 @@
 
     def handleStatus(self, version, code, message):
         __pychecker__ = 'unusednames=version,message'
-        log.debug('handleStatus %s - %s' % (code, message), 'http_client')
+        log.debug('handleStatus %s - %s' % (code, message), 'HttpFetcher')
         self.http_status = int(code)
 
         #self.setResponseCode(self.http_status)
 
     def handleResponse(self, buffer):
-        #log.debug('handleResponse, %s bytes' % (len(buffer)), 'http_client')
-        log.debug('handleResponse status=%s' % (self.http_status), 'http_client')
+        #log.debug('handleResponse, %s bytes' % (len(buffer)), 'HttpFetcher')
+        log.debug('handleResponse status=%s' % (self.http_status), 'HttpFetcher')
         if self.http_status == http.NOT_MODIFIED:
-            log.debug("Backend server reported file is not modified: " + self.uri,'http_client')
+            log.debug("Backend server reported file is not modified: " + self.uri,'HttpFetcher')
             self.fetcher.up_to_date()
         elif self.http_status == http.NOT_FOUND:
-            log.debug("Not found on backend server",'http_client')
+            log.debug("Not found on backend server",'HttpFetcher')
             self.fetcher.file_not_found()
         elif self.http_status == http.OK:
             self.download_complete()
         else:
-            log.debug("Unknown status code: %s" % (self.http_status),'http_client')
+            log.debug("Unknown status code: %s" % (self.http_status),'HttpFetcher')
             self.fetcher.fetcher_internal_error("Unknown status code: %s" % (self.http_status))
 
     def handleHeader(self, key, value):
 
-        log.debug("Received: " + key + " " + str(value), 'http_client')
+        log.debug("Received: " + key + " " + str(value), 'HttpFetcher')
         key = string.lower(key)
 
         if key == 'last-modified':
@@ -378,26 +378,26 @@
             self.fetcher.server_size(self.server_size)
         elif key == 'connection':
             if value == "close":
-                log.debug('will close on completion', 'http_client')
+                log.debug('will close on completion', 'HttpFetcher')
                 self.close_on_completion = True
             elif value == "keep-alive":
-                log.debug('will not close on completion', 'http_client')
+                log.debug('will not close on completion', 'HttpFetcher')
                 self.close_on_completion = False
 
     #def handleEndHeaders(self):
         #if self.http_status == http.NOT_MODIFIED:
-            #log.debug("Backend server reported file is not modified: " + self.uri,'http_client')
+            #log.debug("Backend server reported file is not modified: " + self.uri,'HttpFetcher')
             #self.fetcher.up_to_date()
         #elif self.http_status == http.NOT_FOUND:
-            #log.debug("Not found on backend server",'http_client')
+            #log.debug("Not found on backend server",'HttpFetcher')
             #self.fetcher.file_not_found()
         #else:
-            #log.debug("Unknown status code: %s" % (self.http_status),'http_client')
+            #log.debug("Unknown status code: %s" % (self.http_status),'HttpFetcher')
 
     def rawDataReceived(self, data):
         if self.http_status == http.OK:
             self.fetcher.data_received(data)
-            #log.debug("Recieved: %s expected: %s" % (self.fetcher.len_received, self.server_size),'http_client')
+            #log.debug("Recieved: %s expected: %s" % (self.fetcher.len_received, self.server_size),'HttpFetcher')
             if self.server_size is not None:
                 if self.fetcher.len_received >= self.server_size:
                     if self.fetcher.len_received == self.server_size:
@@ -405,7 +405,7 @@
                         #self.download_complete()
                     else:
                         log.err("File transfer overrun! Expected size:%s Received size:%s" % 
-                                (self.server_size, self.fetcher.len_received), 'http_client')
+                                (self.server_size, self.fetcher.len_received), 'HttpFetcher')
                         self.parent.fetcher_internal_error("Data overrun")
 
 #     def handleResponse(self, buffer):
@@ -429,23 +429,23 @@
 
     def sendCommand(self, command, path):
         "log the line and handle it to the base class."
-        log.debug(command + ":" + path,'http_client')
+        log.debug(command + ":" + path,'HttpFetcher')
         http.HTTPClient.sendCommand(self, command, path)
 
     def endHeaders(self):
         "log and handle to the base class."
         if self.log_headers != None:
-            log.debug(" Headers: " + self.log_headers, 'http_client')
+            log.debug(" Headers: " + self.log_headers, 'HttpFetcher')
             self.log_headers = None;
         http.HTTPClient.endHeaders(self)
 
     def sendHeader(self, name, value):
         "log and handle to the base class."
-        log.debug(name + " sendHeader:" + value,'http_client')
+        log.debug(name + " sendHeader:" + value,'HttpFetcher')
         http.HTTPClient.sendHeader(self, name, value)
         
     def disconnect(self):
-        log.debug("DISCONNECT:",'http_client')
+        log.debug("DISCONNECT:",'HttpFetcher')
         import traceback
         traceback.print_stack()
 
@@ -477,7 +477,7 @@
             factory = policies.ThrottlingFactory(self, readLimit=self.read_limit)
         self.timeout = self.backendServer.backend.config.timeout
         if self.timeout:
-            factory = policies.TimeoutFactory(self, timeoutPeriod=self.timeout)
+            factory = policies.TimeoutFactory(factory, timeoutPeriod=self.timeout)
         reactor.connectTCP(host, port, factory, self.backendServer.backend.config.timeout)
         return self.connectCallback
 
@@ -492,10 +492,10 @@
 
     def clientConnectionFailed(self, connector, reason):
         #self.instance.connectionFailed(reason)
-        log.debug("clientConnectionFailed reason: %s" % (reason), "http-client")
+        log.debug("clientConnectionFailed reason: %s" % (reason), "HttpFetcher")
         self.connectCallback.errback(reason)
     def clientConnectionLost(self, connector, reason):
-        log.debug("clientConnectionLost reason=%s" %(reason), "http-client")
+        log.debug("clientConnectionLost reason=%s" %(reason), "HttpFetcher")
         if self.connection is not None and self.connection.fetcher is not None:
             self.connection.fetcher.connection_closed(self)
 
@@ -690,7 +690,7 @@
             self.ftpclient.quit()
             self.ftpclient.transport.loseConnection()
             self.ftpclient = None
-
+            
     def connectionLost(self, reason=None):
         """
         Maybe we should do some recovery here, I don't know, but the Deferred
@@ -698,122 +698,6 @@
         """
         log.debug("lost connection: %s"%(reason),'ftp_client')
 
-class GzipFetcher(Fetcher, protocol.ProcessProtocol):
-    """
-    This is a fake Fetcher, it uses the real Fetcher from the request's
-    backend via LoopbackRequest to get the data and gzip's or gunzip's as
-    needed.
-
-    NOTE: We use the serve_cached=0 parameter to Request.fetch so if
-    it is cached it doesn't get uselessly read, we just get it from the cache.
-    """
-    post_convert = re.compile(r"^Should not match anything$")
-    gzip_convert = post_convert
-
-    exe = '/bin/gzip'
-    def activate(self, request, postconverting=0):
-        log.debug("FetcherGzip request:" + str(request.uri) + " postconvert:" + str(postconverting), 'gzip')
-        Fetcher.activate(self, request)
-        if not request.apFetcher:
-            return
-
-        self.args = (self.exe, '-c', '-9', '-n')
-        if(log.isEnabled('gzip',9)):
-            self.args += ('-v',)
-
-        if request.uri[-3:] == '.gz':
-            host_uri = request.uri[:-3]
-        else:
-            host_uri = request.uri+'.gz'
-            self.args += ('-d',)
-        self.host_file = self.factory.config.cache_dir + host_uri
-        self.args += (self.host_file,)
-
-        running = self.factory.runningFetchers
-        if not postconverting or running.has_key(host_uri):
-            #Make sure that the file is there
-            loop = LoopbackRequest(request, self.host_transfer_done)
-            loop.uri = host_uri
-            loop.local_file = self.host_file
-            loop.process()
-            self.loop_req = loop
-            loop.serve_if_cached=0
-            if running.has_key(host_uri):
-                #the file is on it's way, wait for it.
-                running[host_uri].insert_request(loop)
-            else:
-                #we are not postconverting, so we need to fetch the host file.
-                loop.fetch(serve_cached=0)
-        else:
-            #The file should be there already.
-            self.loop_req = None
-            self.host_transfer_done()
-
-    def host_transfer_done(self):
-        """
-        Called by our LoopbackRequest when the real Fetcher calls
-        finish() on it.
-
-        If everything went well, check mtimes and only do the work if needed.
-
-        If posible arrange things so the target file gets the same mtime as
-        the host file.
-        """
-        log.debug('transfer done', 'gzip')
-        if self.loop_req and self.loop_req.code != http.OK:
-            self.setResponseCode(self.loop_req.code,
-                                 self.loop_req.code_message)
-            self.apDataReceived("")
-            self.apDataEnd("")
-            return
-
-        if os.path.exists(self.host_file):
-            self.local_mtime = os.stat(self.host_file)[stat.ST_MTIME]
-        old_mtime = None
-        if os.path.exists(self.local_file):
-            old_mtime = os.stat(self.local_file)[stat.ST_MTIME]
-        if self.local_mtime == old_mtime:
-            self.apEndCached()
-        else:
-            log.debug("Starting process: " + self.exe + " " + str(self.args), 'gzip')
-            self.process = reactor.spawnProcess(self, self.exe, self.args)
-
-    def outReceived(self, data):
-        self.setResponseCode(http.OK)
-        self.apDataReceived(data)
-
-    def errReceived(self, data):
-        log.debug('gzip: ' + data,'gzip')
-
-    def loseConnection(self):
-        """
-        This is a bad workaround Process.loseConnection not doing it's
-        job right.
-        The problem only happends when we try to finish the process
-        while decompresing.
-        """
-        if hasattr(self, 'process') and self.process.pid:
-            try:
-                os.kill(self.process.pid, signal.SIGTERM)
-                self.process.connectionLost()
-            except exceptions.OSError, Error:
-                import errno
-                (Errno, Errstr) = Error
-                if Errno != errno.ESRCH:
-                    log.debug('Passing OSError exception '+Errstr)
-                    raise 
-                else:
-                    log.debug('Threw away exception OSError no such process')
-
-    def processEnded(self, reason=None):
-        __pychecker__ = 'unusednames=reason'
-        log.debug("Status: %d" %(self.process.status),'gzip')
-        if self.process.status != 0:
-            self.setResponseCode(http.NOT_FOUND)
-
-        self.apDataReceived("")
-        self.apDataEnd(self.transfered)
-
 class RsyncFetcher(protocol.ProcessProtocol):
     """
     Fetch a file using the rsync protocol



More information about the apt-proxy-devel mailing list