[Apt-proxy-devel] Re: apt-proxy and file:// backends
Michael Vogt
mvo@debian.org
Fri, 17 Jun 2005 23:21:32 +0200
--OaZoDhBhXzo6bW1J
Content-Type: text/plain; charset=us-ascii
Content-Disposition: inline
Hi,
attached is a patch that makes file:// uries in the backends
possible. It based on the very helpfull comments from Chris and
Otavio.
On Fri, Jun 17, 2005 at 07:48:09PM +0100, Chris Halls wrote:
[..]
> So maybe its best for now to do the following:
>
> - Keep the bits of the patch making file:// URLs be recongnised
> - Rename FetcherFile to FetcherCachedFile to make it obvious that it isn't a fetcher for a
> file:// backend
> - Make a new FetcherFile that serves the file using basic.FileSender().beginFileTransfer like
> FetcherFile does, but using the backend uri specified instead of self.local_file
>
> Can we discuss this on apt-proxy-devel so it is archived?
I attached a new version of the patch. It probably needs some more
love, but I'm tired for today and my understanding of the apt-proxy
code is somewhat limited. It works for me(tm) but
comments/improvements are more than welcome (and it still contains a
ton of debug statements that need to be removed).
Cheers,
Michael
--
Linux is not The Answer. Yes is the answer. Linux is The Question. - Neo
--OaZoDhBhXzo6bW1J
Content-Type: text/plain; charset=us-ascii
Content-Disposition: attachment; filename="apt-proxy-local-files.diff"
Index: apt_proxy/apt_proxy_conf.py
===================================================================
--- apt_proxy/apt_proxy_conf.py (revision 525)
+++ apt_proxy/apt_proxy_conf.py (working copy)
@@ -129,7 +129,7 @@
if server[-1] == '/':
log.msg ("Removing unnecessary '/' at the end of %s"%(server))
server = server[0:-1]
- if urlparse.urlparse(server)[0] in ['http', 'ftp', 'rsync']:
+ if urlparse.urlparse(server)[0] in ['http', 'ftp', 'rsync', 'file']:
uris.append(server)
else:
log.msg ("WARNING: Wrong server '%s' found in backend '%s'. It was skiped." % (server, name))
Index: apt_proxy/apt_proxy.py
===================================================================
--- apt_proxy/apt_proxy.py (revision 525)
+++ apt_proxy/apt_proxy.py (working copy)
@@ -433,7 +433,7 @@
so serve the file from the disk cache
"""
self.setResponseCode(http.OK)
- self.apEndTransfer(FetcherFile)
+ self.apEndTransfer(FetcherCachedFile)
def apEndTransfer(self, fetcher_class):
"""
@@ -452,7 +452,7 @@
self.apEnd() # Remove requests from this fetcher
fetcher = None
for req in requests:
- if (fetcher_class != FetcherFile or req.serve_if_cached):
+ if (fetcher_class != FetcherCachedFile or req.serve_if_cached):
running = req.factory.runningFetchers
if (running.has_key(req.uri)):
#If we have an active Fetcher just use that
@@ -535,6 +535,49 @@
return self.request
+class FetcherFile(Fetcher):
+
+ def activate(self, request):
+ Fetcher.activate(self, request)
+ log.debug("FetcherFile.activate(): uri='%s' server='%s'" % (request.uri, request.backendServer.uri))
+ if not request.apFetcher:
+ log.debug("no request.apFetcher")
+ return
+
+ self.factory.file_served(request.uri)
+
+ # start the transfer
+ self.local_file = request.backendServer.uri[len("file:"):]+ request.uri
+ if not os.path.exists(self.local_file):
+ log.debug("not found: %s" % self.local_file)
+ request.setResponseCode(http.NOT_FOUND)
+ request.write("")
+ request.finish()
+ self.remove_request(request)
+ Fetcher.apEnd(self)
+ return
+ self.local_size = os.stat(self.local_file)[stat.ST_SIZE]
+
+ log.debug("Serving local file: " + self.local_file + " size:" + str(self.local_size), 'FetcherCachedFile')
+ file = open(self.local_file,'rb')
+ fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
+
+ request.setHeader("Content-Length", self.local_size)
+ #request.setHeader("Last-modified",
+ # http.datetimeToString(request.local_mtime))
+ basic.FileSender().beginFileTransfer(file, request) \
+ .addBoth(self.file_transfer_complete, request) \
+ .addBoth(lambda r: file.close())
+
+ # A file transfer has completed
+ def file_transfer_complete(self, result, request):
+ log.debug("transfer complete", 'FetcherCachedFile')
+ request.finish()
+ # Remove this client from request list
+ self.remove_request(request)
+ if len(self.requests) == 0:
+ Fetcher.apEnd(self)
+
class FetcherHttp(Fetcher, http.HTTPClient):
forward_headers = [
@@ -1088,7 +1131,7 @@
-class FetcherFile(Fetcher):
+class FetcherCachedFile(Fetcher):
"""
Sends the cached file or tells the client that the file was not
'modified-since' if appropriate.
@@ -1115,6 +1158,7 @@
self.remove_request(request)
def insert_request(self, request):
+ log.debug("FetcherCachedFile.insert_request()")
if not request.serve_if_cached:
request.finish()
return
@@ -1124,15 +1168,18 @@
self.start_transfer(request)
def activate(self, request):
+ log.debug("FetcherCachedFile.actiavte() %s %s" % (request, request.uriIndex))
Fetcher.activate(self, request)
if not request.apFetcher:
+ log.debug("no request.apFetcher")
return
self.factory.file_served(request.uri)
self.size = request.local_size
-
+ log.debug("local_size: %s" % request.local_size)
self.start_transfer(request)
def start_transfer(self, request):
+ log.debug("FetcherCachedFile.start_transfer() %s (%s)" % (request,self.local_file))
self.if_modified(request)
if len(self.requests) == 0:
@@ -1141,7 +1188,7 @@
return
if self.size:
- log.debug("Serving from cache: " + self.local_file + " size:" + str(self.size), 'FetcherFile')
+ log.debug("Serving from cache: " + self.local_file + " size:" + str(self.size), 'FetcherCachedFile')
file = open(self.local_file,'rb')
fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
@@ -1153,13 +1200,13 @@
.addBoth(lambda r: file.close())
# .addBoth(lambda r: request.transport.loseConnection())
else:
- log.debug("Zero length file! " + self.local_file, 'FetcherFile')
+ log.debug("Zero length file! " + self.local_file, 'FetcherCachedFile')
self.file_transfer_complete(None, request)
request.finish()
# A file transfer has completed
def file_transfer_complete(self, result, request):
- log.debug("transfer complete", 'FetcherFile')
+ log.debug("transfer complete", 'FetcherCachedFile')
request.finish()
# Remove this client from request list
self.remove_request(request)
@@ -1252,18 +1299,20 @@
'http' : FetcherHttp,
'ftp' : FetcherFtp,
'rsync': FetcherRsync,
+ 'file' : FetcherFile,
}
ports = {
'http' : 80,
'ftp' : 21,
'rsync': 873,
+ 'file' : 0,
}
def __init__(self, backend, uri):
self.backend = backend
self.uri = uri
log.debug("Created new BackendServer: " + uri)
-
+
# hack because urlparse doesn't support rsync
if uri[0:5] == 'rsync':
uri = 'http'+uri[5:]
@@ -1357,7 +1406,8 @@
self.backend = Backend(self.uri[1:].split('/')[0], self.factory,
("http://" + self.uri[1:].split('/')[0],))
self.backend_uri = self.backend.get_path(self.uri)
-
+
+ log.debug("backend: %s %s" % (self.backend.base, self.backend.uris))
self.backendServer = self.backend.get_first_server()
self.filetype = findFileType(self.uri)
@@ -1373,6 +1423,7 @@
self.finishCode(http.FORBIDDEN)
return
+ log.debug("now running fetch")
self.fetch()
def fetch(self, serve_cached=1):
@@ -1395,6 +1446,9 @@
integrity check or may be outdated.
"""
__pychecker__ = 'unusednames=result'
+
+ log.debug("fetch_real()")
+
if len(dummyFetcher.requests)==0:
#The request's are gone, the clients probably closed the
#conection
@@ -1403,12 +1457,15 @@
dummyFetcher.apEnd()
return
+
req = dummyFetcher.request
+
+ log.debug("cached: %s" % cached)
if cached:
msg = ("Using cached copy of %s"
%(dummyFetcher.request.local_file))
- fetcher_class = FetcherFile
+ fetcher_class = FetcherCachedFile
else:
msg = ("Consulting server about %s"
%(dummyFetcher.request.local_file))
@@ -1443,12 +1500,14 @@
running[self.uri].insert_request(self)
return running[self.uri]
else:
+ log.debug("no active fetcher for: "+self.uri)
#we make a FetcherDummy instance to hold other requests for the
#same file while the check is in process. We will transfer all
#the requests to a real fetcher when the check is done.
dummyFetcher = FetcherDummy(self)
#Standard Deferred practice
d = self.check_cached()
+ log.debug("after self.check_cached()")
d.addCallbacks(fetch_real, fetch_real,
(dummyFetcher, 1, running,), None,
(dummyFetcher, 0, running,), None)
@@ -1534,6 +1593,7 @@
deferred.callback(None)
log.debug("check_cached: "+self.local_file, 'file_ok')
+ log.debug("check_cached: "+self.local_file)
deferred = defer.Deferred()
if os.path.exists(self.local_file):
verifier = FileVerifier(self)
--OaZoDhBhXzo6bW1J--