[apt-proxy-devel] r593 - in people/halls/rework/apt_proxy: . test
Chris Halls
halls at costa.debian.org
Mon Feb 20 09:39:22 UTC 2006
Author: halls
Date: Mon Feb 20 09:39:20 2006
New Revision: 593
Added:
people/halls/rework/apt_proxy/test/test_requests.py
Modified:
people/halls/rework/apt_proxy/apt_proxy.py
people/halls/rework/apt_proxy/cache.py
people/halls/rework/apt_proxy/fetchers.py
people/halls/rework/apt_proxy/test/test_apt_proxy.py
people/halls/rework/apt_proxy/test/test_cache.py
Log:
* Add test_requests with end to end client requests (needs internet)
* Added more tests for CacheEntry and fetchers
* Requests for http backends work enough to return files to clients,
most of the time :)
Modified: people/halls/rework/apt_proxy/apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy.py (original)
+++ people/halls/rework/apt_proxy/apt_proxy.py Mon Feb 20 09:39:20 2006
@@ -58,8 +58,10 @@
self.factory = factory
self.config = config # apBackendConfig configuration information
self.base = config.name # Name of backend
- self.uris=[]
+ self.uris = [] # Sequence of BackendServers, in order of preference
self.queue = fetchers.DownloadQueue()
+ self.entries = {} # Hash of active cache entries
+ self.packages = None # Packages database for this backend
log.debug("Created Backend: " + self.base)
for uri in config.backends:
@@ -91,8 +93,10 @@
a new object is created if it does not already exist
"""
if self.entries.has_key(path):
+ log.debug("Cache entry exists: %s, %s entries" %(path,len(self.entries)))
return self.entries[path]
else:
+ log.debug("New Cache entry: "+path)
e = cache.CacheEntry(self, path)
self.entries[path] = e
return e
@@ -282,7 +286,7 @@
def finish(self):
"Finish request after streaming"
- log.debug("finish" , 'Fetcher')
+ log.debug("finish" , 'Request')
http.Request.finish(self)
if self.factory.config.disable_pipelining:
if hasattr(self.transport, 'loseConnection'):
@@ -358,7 +362,15 @@
self.packages: all versions of a certain package name.
"""
- databases=('update_times', 'access_times', 'packages')
+
+
+
+ def __init__ (self, config):
+ self.runningFetchers = {}
+ self.backends = {}
+ self.config = config
+ self.periodicCallback = None
+ self.databases = databaseManager(self)
def periodic(self):
"Called periodically as configured mainly to do mirror maintanace."
@@ -376,63 +388,15 @@
self.periodicCallback.cancel()
self.periodicCallback = None
def __del__(self):
- for f in self.databases:
- try:
- if hasattr(self, f):
- getattr(self, f).close()
- except Exception:
- pass
- def __init__ (self, config):
- self.runningFetchers = {}
- self.backends = []
- self.config = config
- self.periodicCallback = None
+ pass
+ #self.closeDatabases()
def __getattr__ (self, name):
- def open_shelve(dbname):
- from bsddb3 import db,dbshelve
-
- shelve = dbshelve.DBShelf()
- db_dir = self.config.cache_dir+'/'+status_dir+'/db'
- if not os.path.exists(db_dir):
- os.makedirs(db_dir)
-
- filename = db_dir + '/' + dbname + '.db'
- if os.path.exists(filename):
- try:
- log.debug('Verifying database: ' + filename)
- shelve.verify(filename)
- except:
- os.rename(filename, filename+'.error')
- log.msg(filename+' could not be opened, moved to '+filename+'.error','db', 1)
- log.msg('Recreating '+ filename,'db', 1)
- try:
- log.debug('Opening database ' + filename)
- shelve = dbshelve.open(filename)
-
- # Handle upgrade to new format included on 1.9.20.
- except db.DBInvalidArgError:
- log.msg('Upgrading from previous database format: %s' % filename + '.previous')
- import bsddb.dbshelve
- os.rename(filename, filename + '.previous')
- previous_shelve = bsddb.dbshelve.open(filename + '.previous')
- shelve = dbshelve.open(filename)
-
- for k in previous_shelve.keys():
- shelve[k] = previous_shelve[k]
- log.msg('Upgrade complete')
-
- return shelve
-
- if name == 'update_times':
- self.update_times = open_shelve('update')
- return self.update_times
- elif name == 'access_times':
- self.access_times = open_shelve('access')
- return self.access_times
- elif name == 'packages':
- self.packages = open_shelve('packages')
- return self.packages
+ # Auto open database if requested
+ if name in self.databases.table_names:
+ db = self.databases.get(name)
+ setattr(self, name, db)
+ return db
else:
raise AttributeError(name)
@@ -599,15 +563,25 @@
self.packages[package] = packages
self.dumpdbs()
+ def closeDatabases(self):
+ log.msg('---------closeDBS----------')
+ for db in self.databases.table_names:
+ log.debug("hasattr" + db, 'db')
+ if getattr(self.databases, db) is not None:
+ log.debug("closing " + db, 'db')
+ getattr(self,db).close()
+ delattr(self,db)
+ setattr(self.databases, db, None)
+
def stopFactory(self):
+ log.msg('---------stop----------')
import packages
- self.dumpdbs()
- self.update_times.close()
- self.access_times.close()
- self.packages.close()
+ # self.dumpdbs()
+ self.backends = {}
packages.cleanup(self)
self.recycler.stop()
self.stopPeriodic()
+ #self.closeDatabases()
def dumpdbs (self):
def dump_update(key, value):
@@ -648,3 +622,56 @@
def debug(self, message):
log.debug(message)
+
+class databaseManager:
+ update_times = None
+ access_times = None
+ packages = None
+ table_names=['update_times', 'access_times', 'packages']
+ database_files=['update', 'access', 'packages']
+
+ def __init__(self, factory):
+ self.factory = factory
+
+ def get(self, name):
+ idx = self.table_names.index(name)
+ db = getattr(self,name)
+ if db is None:
+ db = self.open_shelve(self.database_files[idx])
+ setattr(self, name, db)
+ return db
+
+ def open_shelve(self, dbname):
+ from bsddb3 import db,dbshelve
+
+ shelve = dbshelve.DBShelf()
+ db_dir = self.factory.config.cache_dir+'/'+status_dir+'/db'
+ if not os.path.exists(db_dir):
+ os.makedirs(db_dir)
+
+ filename = db_dir + '/' + dbname + '.db'
+ if os.path.exists(filename):
+ try:
+ log.debug('Verifying database: ' + filename)
+ shelve.verify(filename)
+ except:
+ os.rename(filename, filename+'.error')
+ log.msg(filename+' could not be opened, moved to '+filename+'.error','db', 1)
+ log.msg('Recreating '+ filename,'db', 1)
+ try:
+ log.debug('Opening database ' + filename)
+ shelve = dbshelve.open(filename)
+
+ # Handle upgrade to new format included on 1.9.20.
+ except db.DBInvalidArgError:
+ log.msg('Upgrading from previous database format: %s' % filename + '.previous')
+ import bsddb.dbshelve
+ os.rename(filename, filename + '.previous')
+ previous_shelve = bsddb.dbshelve.open(filename + '.previous')
+ shelve = dbshelve.open(filename)
+
+ for k in previous_shelve.keys():
+ shelve[k] = previous_shelve[k]
+ log.msg('Upgrade complete')
+
+ return shelve
Modified: people/halls/rework/apt_proxy/cache.py
==============================================================================
--- people/halls/rework/apt_proxy/cache.py (original)
+++ people/halls/rework/apt_proxy/cache.py Mon Feb 20 09:39:20 2006
@@ -25,7 +25,7 @@
from twisted.internet import protocol, defer, reactor
from twisted.web import http
from twisted.protocols import basic
-import os, re, stat
+import os, re, stat, time, sys
from misc import log
class CacheEntry:
@@ -142,6 +142,7 @@
"""
if self.state == self.STATE_NEW:
if os.path.exists(self.file_path):
+ self.stat_file()
if self.check_age():
self.verify()
return
@@ -154,29 +155,37 @@
return a Deferred to be trigered when we find out.
"""
log.debug("check_cached: "+self.path, 'CacheEntry')
- verifier = FileVerifier(self, self.path, self.factory.config)
+ verifier = FileVerifier(self.file_path, self.factory.config)
d = verifier.verify()
d.addCallback(self.send_cached_file)
d.addErrback(self.start_download)
- def check_age(self):
+ def stat_file(self):
"""
- Read file age and check if file should be updated / refreshed
+ Read file age
"""
- stat_tuple = os.stat(self.path)
+ stat_tuple = os.stat(self.file_path)
self.file_mtime = stat_tuple[stat.ST_MTIME]
self.file_size = stat_tuple[stat.ST_SIZE]
log.debug("Modification time:" +
time.asctime(time.localtime(self.file_mtime)),
- "file_ok")
+ "CacheEntry")
+
+ def check_age(self):
+ """
+ Read file age and check if file should be updated / refreshed
+
+ @return True if file is still valid, False if file is out of date
+ """
+
update_times = self.factory.update_times
- if update_times.has_key(self.uri):
- last_access = update_times[self.uri]
+ if update_times.has_key(self.cache_path):
+ last_access = update_times[self.cache_path]
log.debug("last_access from db: " +
time.asctime(time.localtime(last_access)),
- "file_ok")
+ "CacheEntry")
else:
last_access = self.file_mtime
@@ -186,53 +195,56 @@
if not self.filetype.mutable:
log.debug("file is immutable: "+self.file_path, 'CacheEntry')
- deferred.callback(None)
+ return True
elif last_access < min_time:
log.debug("file is too old: "+self.file_path, 'CacheEntry')
- update_times[self.uri] = cur_time # TODO: Is this right?
- deferred.errback()
+ return False
else:
log.debug("file is ok: "+self.file_path, 'CacheEntry')
- deferred.callback(None)
+ return True
- def send_cached_file(self):
+ def send_cached_file(self, unused=None):
"""
File is up to date - send complete file from cache to clients
"""
log.msg("sending file from cache:" + self.file_path, "CacheEntry")
- self.transfer_file(self.path)
+ self.transfer_file(self.file_path)
def end_send_cached(self):
"""
Processing continues here when the file has been sent from the cache
"""
- pass
+ self.file_sent()
def transfer_file(self, filename):
"""
Send given file to clients
"""
- log.msg("transfer_file:" + self.file_path, "CacheEntry")
- stat_tuple = os.stat(filename)
- self.file_mtime = stat_tuple[stat.ST_MTIME]
- self.file_size = stat_tuple[stat.ST_SIZE]
- size = os.stat(filename)[stat.ST_SIZE]
-
- self.state = self.STATE_SENDFILE
- if size > 0:
- log.debug("Sending file to clients:%s size:%s" % (filename, size), 'CacheEntry')
- self.streamfile = open(filename,'rb')
- #fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
+ log.msg("transfer_file:" + filename, "CacheEntry")
+ try:
+ stat_tuple = os.stat(filename)
+ self.file_mtime = stat_tuple[stat.ST_MTIME]
+ self.file_size = stat_tuple[stat.ST_SIZE]
+ size = os.stat(filename)[stat.ST_SIZE]
- for request in self.requests:
- if request.start_streaming(self.file_size, self.file_mtime):
- basic.FileSender().beginFileTransfer(self.streamfile, request) \
- .addBoth(self.file_transfer_complete, request, filename)
- else:
- log.debug("Sending empty file to clients:%s" % (filename), 'CacheEntry')
- for request in self.requests:
- if request.start_streaming(self.file_size, self.file_mtime):
- request.finish()
+ self.state = self.STATE_SENDFILE
+ if size > 0:
+ log.debug("Sending file to clients:%s size:%s" % (filename, size), 'CacheEntry')
+ self.streamfile = open(filename,'rb')
+ #fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
+
+ for request in self.requests:
+ if request.start_streaming(self.file_size, self.file_mtime):
+ basic.FileSender().beginFileTransfer(self.streamfile, request) \
+ .addBoth(self.file_transfer_complete, request, filename)
+ else:
+ log.debug("Sending empty file to clients:%s" % (filename), 'CacheEntry')
+ for request in self.requests:
+ if request.start_streaming(self.file_size, self.file_mtime):
+ request.finish()
+ except Exception, e:
+ log.debug("Unexpected error: %s" % (e), 'CacheEntry')
+ raise
def file_transfer_complete(self, result, request, filename):
log.debug("transfer complete: " + filename, 'CacheEntry')
@@ -248,7 +260,7 @@
if(not os.path.exists(self.filedir)):
os.makedirs(self.filedir)
- def start_download(self):
+ def start_download(self, parm=None):
"""
Start file transfer from backend server
"""
@@ -339,6 +351,8 @@
self.state = self.STATE_SENT
self.backend.file_served(self)
self.factory.file_served(self.file_path)
+ self.factory.update_times[self.cache_path] = time.time()
+ self.state = self.STATE_NEW
def init_tempfile(self):
self.create_directory()
Modified: people/halls/rework/apt_proxy/fetchers.py
==============================================================================
--- people/halls/rework/apt_proxy/fetchers.py (original)
+++ people/halls/rework/apt_proxy/fetchers.py Mon Feb 20 09:39:20 2006
@@ -166,7 +166,7 @@
os.utime(self.local_file, (time.time(), 0))
self.cacheEntry.rename_file(self.streamFilename)
- self.cacheEntry.download_data_end()
+ self.download_complete()
def connection_failed(self, reason = None):
"""
@@ -215,6 +215,7 @@
so the file is sent from our cache
"""
self.cacheEntry.send_cached_file()
+ self.deferred.callback((True, ""))
class FileFetcher:
"""
@@ -270,6 +271,7 @@
def download(self, fetcher, uri, mtime):
# Request file from backend
self.fetcher = fetcher
+ self.uri = uri
backendServer = self.parent.backendServer
if self.proxy is None:
serverpath = backendServer.path
@@ -321,8 +323,8 @@
def handleEndHeaders(self):
if self.http_status == http.NOT_MODIFIED:
- log.debug("NOT_MODIFIED " + str(self.status_code),'http_client')
- self.parent.up_to_date()
+ log.debug("Backend server reported file is not modified: " + self.uri,'http_client')
+ self.fetcher.up_to_date()
elif self.http_status == http.NOT_FOUND:
log.debug("Not found on backend server",'http_client')
self.fetcher.file_not_found()
@@ -966,7 +968,9 @@
self.queue.append(cacheEntry)
if self.activeFile is None:
self.startNextDownload()
-
+ else:
+ log.debug("queue file " + cacheEntry.cache_path, 'DownloadQueue')
+
def startNextDownload(self):
if len(self.queue)>0:
log.debug("start next download", 'DownloadQueue')
@@ -985,7 +989,7 @@
log.debug("download queue is empty", 'DownloadQueue')
def downloadFinished(self, result):
- success, messaage = result
+ success, message = result
if success:
log.debug("download complete", 'DownloadQueue')
else:
Modified: people/halls/rework/apt_proxy/test/test_apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_apt_proxy.py (original)
+++ people/halls/rework/apt_proxy/test/test_apt_proxy.py Mon Feb 20 09:39:20 2006
@@ -23,6 +23,7 @@
from apt_proxy.apt_proxy_conf import apConfig
from apt_proxy.apt_proxy import Factory
+from apt_proxy.misc import log
config1="""
[DEFAULT]
@@ -73,8 +74,10 @@
self.cache_dir = tempfile.mkdtemp('.aptproxy')
self.config = self.default_config.replace('[DEFAULT]','[DEFAULT]\ncache_dir=' + self.cache_dir)
def tearDown(self):
+ log.debug('Removing temporary directory: ' + self.cache_dir)
shutil.rmtree(self.cache_dir)
-
+ self.assertRaises(OSError, os.stat, self.cache_dir)
+
class FactoryInitTest(apTestHelper):
def setUp(self):
self.default_config = config1
Modified: people/halls/rework/apt_proxy/test/test_cache.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_cache.py (original)
+++ people/halls/rework/apt_proxy/test/test_cache.py Mon Feb 20 09:39:20 2006
@@ -16,20 +16,27 @@
"""Unit test for cache.py"""
-import os
+import os, time, shutil
from twisted.trial import unittest
+from twisted.internet import reactor
from StringIO import StringIO
from apt_proxy.apt_proxy_conf import apConfig
from apt_proxy.test.test_apt_proxy import apTestHelper
from apt_proxy.cache import CacheEntry
from apt_proxy.apt_proxy import Factory
+from apt_proxy.misc import log
class DummyRequest:
+ def __init__(self):
+ self.finished = False
+ self.streamed = 0
def finishCode(self, code, reason):
- pass
+ self.finished = True
+ def start_streaming(self, file_size, file_mtime):
+ self.streamed = self.streamed + 1
-class FactoryVersionFuncsTest(apTestHelper):
+class CacheEntryTest(apTestHelper):
def setUp(self):
"""
Make a configuration with a single backend
@@ -46,13 +53,17 @@
self.c = apConfig(StringIO(config))
self.factory = Factory(self.c)
self.factory.createBackends()
+ self.backend = self.factory.getBackend("files")
+ self.entry = CacheEntry(self.backend, "testdir/testfile.deb")
+ self.request = DummyRequest()
+
def tearDown(self):
del(self.factory)
apTestHelper.tearDown(self)
- def testCacheEntryInit(self):
- backend = self.factory.getBackend("files")
- entry = CacheEntry(backend, "testdir/testfile.deb")
- self.assertEquals(entry.backend, backend, "CacheEntry did not initialise backend")
+
+ def testInit(self):
+ entry = self.entry
+ self.assertEquals(entry.backend, self.backend, "CacheEntry did not initialise backend")
self.assertEquals(entry.factory, self.factory, "CacheEntry did not initialise factory")
self.assertEquals(entry.path, "testdir/testfile.deb")
self.assertEquals(entry.file_path, self.cache_dir+"/files/testdir/testfile.deb")
@@ -62,35 +73,112 @@
self.assertEquals(entry.filebase, "testfile")
self.assertEquals(entry.fileext, ".deb")
self.assertEquals(len(entry.requests), 0)
- def testCacheEntryAddClient(self):
- backend = self.factory.getBackend("files")
- entry = CacheEntry(backend, "testdir/testfile.deb")
- r = DummyRequest()
- entry.add_request(r)
- self.assertEquals(len(entry.requests), 1)
- def testCacheEntryAddDuplicate(self):
- backend = self.factory.getBackend("files")
- entry = CacheEntry(backend, "testdir/testfile.deb")
- r = DummyRequest()
- entry.add_request(r)
- self.assertRaises(RuntimeError, entry.add_request, r)
- def testCacheEntryRemove(self):
- backend = self.factory.getBackend("files")
- entry = CacheEntry(backend, "testdir/testfile.deb")
- r = DummyRequest()
- entry.add_request(r)
- entry.remove_request(r)
- self.assertEquals(len(entry.requests), 0)
- def testCacheEntryStartDownload(self):
+
+ def testAddClient(self):
+ self.entry.add_request(self.request)
+ self.assertEquals(len(self.entry.requests), 1)
+
+ def testAddDuplicate(self):
+ self.entry.add_request(self.request)
+ self.assertRaises(RuntimeError, self.entry.add_request, self.request)
+
+ def testRemove(self):
+ self.entry.add_request(self.request)
+ self.entry.remove_request(self.request)
+ self.assertEquals(len(self.entry.requests), 0)
+
+ def testStartDownload(self):
def start_download(entry):
# This test function replaces the normal
# Backend.start_download so we can see that
# it was called without starting the download
- entry.entry_download_triggered = 1
- backend = self.factory.getBackend("files")
- backend.start_download = start_download
- entry = CacheEntry(backend, "testdir/testfile.deb")
- r = DummyRequest()
- entry.add_request(r)
+ entry.entry_download_triggered = True
+ self.backend.start_download = start_download
+ self.entry.add_request(self.request)
# Check that our special function was called
- self.assertEquals(entry.entry_download_triggered, 1)
+ self.assertTrue(self.entry.entry_download_triggered)
+
+ def testCachedFile(self):
+ """
+ Create a valid text file and check that CacheEntry starts
+ to stream the file
+ """
+ def start_download(entry):
+ # This test function replaces the normal
+ # Backend.start_download so we can see that
+ # it was called without starting the download
+ entry.test_download = True
+ self.backend.start_download = start_download
+ entry = CacheEntry(self.backend, "testdir/test.txt")
+ entry.test_download = False
+ entry.create_directory()
+ f = open(entry.file_path, 'w')
+ f.write('12345')
+ f.close()
+ entry.add_request(self.request)
+ while not entry.test_download and not self.request.streamed:
+ #print "iterate.."
+ reactor.iterate(0.1)
+ # Check that our special function was not called
+ self.assertFalse(entry.test_download)
+ self.assertTrue(self.request.streamed)
+
+ def testVerifyFail(self):
+ """
+ Create a bogus .deb and check that CacheEntry starts
+ a download
+ """
+ def start_download(entry):
+ # This test function replaces the normal
+ # Backend.start_download so we can see that
+ # it was called without starting the download
+ entry.test_download = True
+ self.backend.start_download = start_download
+ entry = CacheEntry(self.backend, "testdir/test.deb")
+ entry.test_download = False
+ entry.create_directory()
+ f = open(entry.file_path, 'w')
+ f.write('this is not a real .deb')
+ f.close()
+ entry.add_request(self.request)
+ while not entry.test_download and not self.request.streamed:
+ #print "iterate.."
+ reactor.iterate(0.1)
+ # Check that our special function was not called
+ self.assertTrue(entry.test_download)
+ self.assertFalse(self.request.streamed)
+
+ def testCheckAgeImmutable(self):
+ # testfile.deb is immutable
+ self.entry.file_mtime = 0
+ self.assertTrue(self.entry.check_age())
+
+ self.entry.file_mtime = time.time()+1000
+ self.assertTrue(self.entry.check_age())
+
+ def testCheckAgeMmutable(self):
+ # pretend that testfile.deb is immutable, i.e.
+ # it will be updated like Packages, Release
+ self.entry.filetype.mutable = True
+ self.entry.file_mtime = 0
+ self.assertFalse(self.entry.check_age())
+
+ self.entry.file_mtime = time.time()+1000
+ self.assertTrue(self.entry.check_age())
+
+ def testCreateDirectory(self):
+ dirname = self.cache_dir+"/files/testdir"
+ self.assertRaises(OSError, os.stat, dirname) # Will return exception if directory does not exist
+ self.entry.create_directory()
+ os.stat(dirname) # Will return exception if directory does not exist
+
+ def testStatFile(self):
+ filename = self.cache_dir+"/files/testdir/testfile.deb"
+ self.entry.create_directory()
+ f = open(filename, 'w')
+ f.write('12345')
+ f.close()
+ close_time = time.time()
+ self.entry.stat_file()
+ self.assertApproximates(self.entry.file_mtime, close_time, 1)
+ self.assertEquals(self.entry.file_size, 5)
Added: people/halls/rework/apt_proxy/test/test_requests.py
==============================================================================
--- (empty file)
+++ people/halls/rework/apt_proxy/test/test_requests.py Mon Feb 20 09:39:20 2006
@@ -0,0 +1,204 @@
+#
+# Copyright (C) 2006 Chris Halls <halls at debian.org>
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of version 2.1 of the GNU Lesser General Public
+# License as published by the Free Software Foundation.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+"""This module tests the client protocol itself"""
+
+import os
+from twisted.trial import unittest
+from twisted.internet import protocol, reactor
+from twisted.web import http
+from StringIO import StringIO
+
+from apt_proxy.apt_proxy_conf import apConfig
+from apt_proxy.test.test_apt_proxy import apTestHelper
+from apt_proxy.cache import CacheEntry
+from apt_proxy.apt_proxy import Factory
+from apt_proxy.misc import log
+
+class uriRequester(http.HTTPClient):
+ """
+ Helper class to request files and parse responses
+ """
+ def __init__(self, factory):
+ self.factory = factory
+ def connectionMade(self):
+ """
+ Http connection made
+ """
+ # print "connection made! requesting:", self.factory.filename
+ self.sendCommand("GET", self.factory.filename)
+ self.sendHeader('host', self.factory.host)
+ self.endHeaders()
+
+ #def handleStatus(self, version, code, message):
+ def handleStatus(self, version, code, message):
+ log.debug('handleStatus: (%s) %s - %s, expected:%s' %
+ (version, code, message, self.factory.expectedResponse), 'uriRequesterTest')
+ self.http_status = int(code)
+
+
+ #def dataReceived(self, data):
+ # print "Data received: "+data
+ def handleResponse(self, buffer):
+ log.debug('data received: %s bytes' % (len(buffer)), 'uriRequesterTest')
+ self.received_len = len(buffer)
+ if self.http_status != self.factory.expectedResponse:
+ self.factory.failed()
+ else:
+ self.factory.passed()
+
+class requestFactory(protocol.ClientFactory):
+ """
+ Helper factory to connect to apt-proxy and send
+ HTTP requests using uriRequester
+ """
+ def __init__(self, filename, host, expectedResponse):
+ self.filename = filename
+ self.host = host
+ self.testDone = False
+ self.testPassed = False
+ self.timedOut = False
+ self.expectedResponse = expectedResponse
+ self.timeout = reactor.callLater(5, self.timeout)
+ #def startedConnecting(self, connector):
+ # print 'Started to connect.'
+ def buildProtocol(self, addr):
+ p = uriRequester(self)
+ p.factory = self
+ self.protocol = p
+ return p
+ def clientConnectionLost(self, connector, reason):
+ log.debug('Lost connection. Reason:'+ str(reason))
+ self.testDone = True
+ self.cancelTimeout()
+
+ def cancelTimeout(self):
+ if self.timeout is not None:
+ self.timeout.cancel()
+ self.timeout = None
+
+ def clientConnectionFailed(self, connector, reason):
+ print 'Connection failed. Reason:', reason
+ self.failed()
+
+ def timeout(self):
+ # print 'Test timeout'
+ self.timeout = None
+ self.testDone = True
+ self.timedOut = True
+ def passed(self):
+ self.testDone = True
+ self.testPassed = True
+ self.cancelTimeout()
+ def failed(self):
+ self.testDone = True
+ self.cancelTimeout()
+
+class TestRequestHelper(apTestHelper):
+ def setUp(self, config):
+ apTestHelper.setUp(self)
+ config = self.config + '\n' + config
+ self.c = apConfig(StringIO(config))
+ self.factory = Factory(self.c)
+ self.factory.configurationChanged()
+ self.port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
+
+ def tearDown(self):
+ self.port.stopListening()
+ self.factory.stopFactory()
+ del(self.factory)
+ apTestHelper.tearDown(self)
+ self.assertRaises(OSError, os.stat, self.cache_dir)
+
+ def doRequest(self, file, responseCode):
+ portno = self.port.getHost().port
+ clientFactory = requestFactory(file, "localhost:%s"% (portno), responseCode)
+ connection = reactor.connectTCP("localhost", portno, clientFactory)
+
+ while clientFactory.testDone == False:
+ #print "iterate.."
+ reactor.iterate(0.1)
+ self.assertNotEquals(clientFactory.timedOut, True)
+ self.assertEquals(clientFactory.testPassed, True)
+
+ connection.disconnect()
+
+class FileBackendTest(TestRequestHelper):
+ def setUp(self):
+ """
+ Make a configuration with a single backend
+ [files]
+ backends=file:///<path to test packages directory>
+ """
+ filedir = os.path.normpath(os.getcwd()+"/../test_data/packages")
+ config = ("dynamic_backends=off\n"+
+ "[files]\n" +
+ "backends=file://" + filedir)
+ #print "config: " + config
+ TestRequestHelper.setUp(self, config)
+
+ def testNotFound(self):
+ self.doRequest('/files/test.gz', http.NOT_FOUND)
+ def testPackagesFile(self):
+ self.doRequest('/files/Packages.gz', http.OK)
+ def testForbidden(self):
+ self.doRequest('/notbackend/Release', http.NOT_FOUND)
+
+class DebianHttpBackendTest(TestRequestHelper):
+ def setUp(self):
+ """
+ Make a configuration with a single backend
+ [files]
+ backends=file:///<path to test packages directory>
+ """
+ config = ("""
+dynamic_backends=off
+[debian]
+backends=http://ftp.debian.org/debian
+ """)
+ #print "config: " + config
+ TestRequestHelper.setUp(self, config)
+
+ def testNotFound(self):
+ self.doRequest('/debian/NotHere.gz', http.NOT_FOUND)
+ def testReleaseFile(self):
+ file = '/debian/dists/stable/Release.gpg'
+ filepath = self.cache_dir + file
+
+ # File should not be in cache
+ self.assertRaises(OSError, os.stat, filepath)
+ self.doRequest(file, http.OK)
+
+ # Check that file was really placed in cache
+ os.stat(filepath)
+
+ def testCached(self):
+ backend = 'debian'
+ file = '/' + backend + '/dists/stable/Release.gpg'
+ filepath = self.cache_dir + file
+
+ self.assertRaises(OSError, os.stat, filepath) # File is not in cache
+
+ self.doRequest(file, http.OK)
+ os.stat(filepath) # File is in cache
+
+ self.doRequest(file, http.OK)
+
+ #b = self.factory.getBackend(backend)
+ # TODO
+ log.debug("Downloading second copy", 'DebianHttpBackendTest')
+ self.factory.config.min_refresh_delay = 0
+ self.doRequest(file, http.OK)
More information about the apt-proxy-devel
mailing list