[apt-proxy-devel] r596 - in people/halls/rework: . apt_proxy
apt_proxy/test
Chris Halls
halls at costa.debian.org
Thu Mar 2 12:04:22 UTC 2006
Author: halls
Date: Thu Mar 2 12:04:21 2006
New Revision: 596
Modified:
people/halls/rework/apt_proxy/apt_proxy.py
people/halls/rework/apt_proxy/apt_proxy_conf.py
people/halls/rework/apt_proxy/cache.py
people/halls/rework/apt_proxy/fetchers.py
people/halls/rework/apt_proxy/test/test_apt_proxy.py
people/halls/rework/apt_proxy/test/test_cache.py
people/halls/rework/apt_proxy/test/test_packages.py
people/halls/rework/apt_proxy/test/test_requests.py
people/halls/rework/runtests
Log:
* More bugfixing for http requests
* Add read bandwidth limiting
* More unit tests
Modified: people/halls/rework/apt_proxy/apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy.py (original)
+++ people/halls/rework/apt_proxy/apt_proxy.py Thu Mar 2 12:04:21 2006
@@ -102,6 +102,8 @@
return e
def entry_done(self, entry):
"A cache entry is finished and clients are disconnected"
+ #if self.entries.has_key(path):
+
def get_packages_db(self):
"Return packages parser object for the backend, creating one if necessary"
@@ -371,6 +373,11 @@
self.config = config
self.periodicCallback = None
self.databases = databaseManager(self)
+ self.recycler = None
+
+ def __del__(self):
+ pass
+ #self.closeDatabases()
def periodic(self):
"Called periodically as configured mainly to do mirror maintanace."
@@ -379,17 +386,16 @@
self.clean_old_files()
self.recycler.start()
log.debug("Periodic cleaning done")
- startPeriodic()
+ self.startPeriodic()
+
def startPeriodic(self):
if (self.config.cleanup_freq != None and self.periodicCallback is None):
self.periodicCallback = reactor.callLater(self.config.cleanup_freq, self.periodic)
+
def stopPeriodic(self):
if self.periodicCallback is not None:
self.periodicCallback.cancel()
self.periodicCallback = None
- def __del__(self):
- pass
- #self.closeDatabases()
def __getattr__ (self, name):
# Auto open database if requested
@@ -475,6 +481,8 @@
from packages import AptDpkgInfo, get_mirror_versions
for uri in packages[:]:
if not os.path.exists(cache_dir +'/'+ uri):
+ log.debug("clean_versions: file %s no longer exists"%(uri),
+ 'versions')
packages.remove(uri)
else:
try:
@@ -483,17 +491,17 @@
package_name = info['Package']
except SystemError:
log.msg("Found problems with %s, aborted cleaning"%(uri),
- 'max_versions')
+ 'versions')
return
- if len(info):
+ if len(cached_packages) > 0:
import apt_pkg
cached_packages.sort(reverse_compare)
- log.debug(str(cached_packages), 'max_versions')
+ log.debug(str(cached_packages), 'versions')
current_packages = get_mirror_versions(self, package_name)
current_packages.sort(reverse_compare)
- log.debug("Current Versions: " + str(current_packages), 'max_versions')
+ log.debug("Current Versions: " + str(current_packages), 'versions')
version_count = 0
@@ -515,6 +523,7 @@
if version_count > self.config.max_versions:
log.msg("Deleting " + cache_dir +'/'+ cached_packages[0][1], 'max_versions')
os.unlink(cache_dir +'/'+ cached_packages[0][1])
+ packages.remove(cached_packages[0][1])
del cached_packages[0]
def clean_old_files(self):
@@ -546,27 +555,30 @@
log.debug("old_file: non-existent "+file)
del self.update_times[file]
- def file_served(self, uri):
- "Update the databases, this file has just been served."
- self.access_times[uri]=time.time()
- if re.search("\.deb$", uri):
- package = re.sub("^.*/", "", uri)
+ def file_served(self, cache_path):
+ """
+ Update the databases, this file has just been served.
+ @param cache_path: path of file within cache e.g. debian/dists/stable/Release.gpg
+ """
+ log.debug("File served: %s" % (cache_path))
+ #path = os.sep + cache_path # Backwards compat
+ path = cache_path
+ self.access_times[path]=time.time()
+ if re.search("\.deb$", path):
+ package = re.sub("^.*/", "", path)
package = re.sub("_.*$", "", package)
if not self.packages.has_key(package):
- packages = [uri]
- self.packages[package] = packages
+ packages = [path]
else:
packages = self.packages[package]
- if not uri in packages:
- packages.append(uri)
+ if not path in packages:
+ packages.append(path)
self.clean_versions(packages)
- self.packages[package] = packages
+ self.packages[package] = packages
self.dumpdbs()
def closeDatabases(self):
- log.msg('---------closeDBS----------')
for db in self.databases.table_names:
- log.debug("hasattr" + db, 'db')
if getattr(self.databases, db) is not None:
log.debug("closing " + db, 'db')
getattr(self,db).close()
@@ -574,12 +586,13 @@
setattr(self.databases, db, None)
def stopFactory(self):
- log.msg('---------stop----------')
import packages
# self.dumpdbs()
self.backends = {}
packages.cleanup(self)
- self.recycler.stop()
+ if self.recycler is not None:
+ self.recycler.stop()
+ self.recycler = None
self.stopPeriodic()
#self.closeDatabases()
Modified: people/halls/rework/apt_proxy/apt_proxy_conf.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy_conf.py (original)
+++ people/halls/rework/apt_proxy/apt_proxy_conf.py Thu Mar 2 12:04:21 2006
@@ -98,7 +98,8 @@
['passive_ftp', 'on', 'boolean'],
['dynamic_backends', 'on', 'boolean'],
['http_proxy', None , 'proxyspec'],
- ['username', 'aptproxy', 'string']
+ ['username', 'aptproxy', 'string'],
+ ['read_limit', 0, '*int']
]
"""
@@ -112,7 +113,8 @@
['timeout', None, 'time'],
['passive_ftp', None, 'boolean'],
['backends', '', 'stringlist'],
- ['http_proxy', None , 'proxyspec']
+ ['http_proxy', None , 'proxyspec'],
+ ['read_limit', None, '*int']
]
DEFAULT_CONFIG_FILE = ['/etc/apt-proxy/apt-proxy-v2.conf',
Modified: people/halls/rework/apt_proxy/cache.py
==============================================================================
--- people/halls/rework/apt_proxy/cache.py (original)
+++ people/halls/rework/apt_proxy/cache.py Thu Mar 2 12:04:21 2006
@@ -44,7 +44,7 @@
STATE_SENDFILE = 4 # File is being sent from cache
STATE_SENT = 5 # Post download processing / waiting for clients to complete
- state = STATE_NEW
+
bytesDownloaded = 0
@@ -58,6 +58,7 @@
self.factory = backend.factory
self.requests = [] # Active client requests for this cache entry
self.streamfile = None
+ self.state = self.STATE_NEW
# Path of file within backend e.g. 'dists/stable/Release.gpg'
self.path = path
@@ -158,7 +159,12 @@
verifier = FileVerifier(self.file_path, self.factory.config)
d = verifier.verify()
d.addCallback(self.send_cached_file)
- d.addErrback(self.start_download)
+ d.addErrback(self.verify_failed)
+
+ def verify_failed(self, parm=None):
+ self.file_mtime = None
+ self.file_size = None
+ self.start_download()
def stat_file(self):
"""
@@ -223,9 +229,8 @@
log.msg("transfer_file:" + filename, "CacheEntry")
try:
stat_tuple = os.stat(filename)
- self.file_mtime = stat_tuple[stat.ST_MTIME]
- self.file_size = stat_tuple[stat.ST_SIZE]
- size = os.stat(filename)[stat.ST_SIZE]
+ mtime = stat_tuple[stat.ST_MTIME]
+ size = stat_tuple[stat.ST_SIZE]
self.state = self.STATE_SENDFILE
if size > 0:
@@ -234,13 +239,13 @@
#fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
for request in self.requests:
- if request.start_streaming(self.file_size, self.file_mtime):
+ if request.start_streaming(size, mtime):
basic.FileSender().beginFileTransfer(self.streamfile, request) \
.addBoth(self.file_transfer_complete, request, filename)
else:
log.debug("Sending empty file to clients:%s" % (filename), 'CacheEntry')
for request in self.requests:
- if request.start_streaming(self.file_size, self.file_mtime):
+ if request.start_streaming(size, mtime):
request.finish()
except Exception, e:
log.debug("Unexpected error: %s" % (e), 'CacheEntry')
@@ -260,7 +265,7 @@
if(not os.path.exists(self.filedir)):
os.makedirs(self.filedir)
- def start_download(self, parm=None):
+ def start_download(self):
"""
Start file transfer from backend server
"""
@@ -276,6 +281,7 @@
self.state = self.STATE_DOWNLOAD
self.create_directory()
self.fetcher = fetcher
+ self.file_mtime = mtime
"""
Use post_convert and gzip_convert regular expresions of the Fetcher
@@ -328,6 +334,12 @@
self.streamfile.close_and_rename(self.file_path)
self.streamfile = None
+ if self.file_mtime != None:
+ os.utime(self.file_path, (time.time(), self.file_mtime))
+ else:
+ log.debug("no local time: "+self.file_path,'Fetcher')
+ os.utime(self.file_path, (time.time(), 0))
+
for req in self.requests:
req.finish()
@@ -349,6 +361,7 @@
log.msg("file_sent:" + self.file_path, "CacheEntry")
self.state = self.STATE_SENT
+ self.fetcher = None
self.backend.file_served(self)
self.factory.file_served(self.file_path)
self.factory.update_times[self.cache_path] = time.time()
Modified: people/halls/rework/apt_proxy/fetchers.py
==============================================================================
--- people/halls/rework/apt_proxy/fetchers.py (original)
+++ people/halls/rework/apt_proxy/fetchers.py Thu Mar 2 12:04:21 2006
@@ -25,6 +25,7 @@
from twisted.web import static, http
from twisted.internet import protocol, reactor, defer
from twisted.python import failure
+from twisted.protocols import policies
from misc import log
@@ -155,17 +156,6 @@
All data has been transferred
"""
log.debug("Finished receiving data: " + self.cacheEntry.filename, 'Fetcher');
- if self.transfered is not None:
- self.transfered.close()
- self.transfered = None
-
- if self.fetcher.server_mtime != None:
- os.utime(self.local_file, (time.time(), self.fetcher.server_mtime))
- else:
- log.debug("no local time: "+self.local_file,'Fetcher')
- os.utime(self.local_file, (time.time(), 0))
- self.cacheEntry.rename_file(self.streamFilename)
-
self.download_complete()
def connection_failed(self, reason = None):
@@ -260,6 +250,8 @@
self.log_headers = None
self.fetcher = None
self.close_on_completion = False
+ self.server_mtime = None
+ self.server_size = None
def connectionMade(self):
"""
@@ -333,16 +325,17 @@
if self.http_status == http.OK:
self.fetcher.data_received(data)
#log.debug("Recieved: %s expected: %s" % (self.fetcher.len_received, self.server_size),'http_client')
- if self.fetcher.len_received >= self.server_size:
- if self.fetcher.len_received == self.server_size:
- log.debug("File transfer complete",'http_client')
- self.fetcher.download_complete()
- if self.close_on_completion:
- self.transport.loseConnection()
- else:
- log.err("File transfer overrun! Expected size:%s Received size:%s" %
- (self.server_size, self.fetcher.len_received), 'http_client')
- self.parent.download_failure(http.INTERNAL_SERVER_ERROR, "Data overrun")
+ if self.server_size is not None:
+ if self.fetcher.len_received >= self.server_size:
+ if self.fetcher.len_received == self.server_size:
+ log.debug("File transfer complete",'http_client')
+ self.fetcher.download_complete()
+ if self.close_on_completion:
+ self.transport.loseConnection()
+ else:
+ log.err("File transfer overrun! Expected size:%s Received size:%s" %
+ (self.server_size, self.fetcher.len_received), 'http_client')
+ self.parent.download_failure(http.INTERNAL_SERVER_ERROR, "Data overrun")
# def handleResponse(self, buffer):
# if self.length == 0:
@@ -398,7 +391,13 @@
else:
host = self.proxy.host
port = self.proxy.port
- reactor.connectTCP(host, port, self, self.backendServer.backend.config.timeout)
+ self.read_limit = self.backendServer.backend.config.read_limit
+ if self.read_limit is None:
+ factory = self
+ else:
+ # Limit download rate
+ factory = policies.ThrottlingFactory(self, readLimit=self.read_limit)
+ reactor.connectTCP(host, port, factory, self.backendServer.backend.config.timeout)
return self.connectCallback
def buildProtocol(self, addr):
@@ -411,7 +410,9 @@
self.connectCallback.callback(None)
def clientConnectionFailed(self, connector, reason):
- self.instance.connectionFailed(reason)
+ #self.instance.connectionFailed(reason)
+ log.debug("clientConnectionFailed reason: %s" % (reason), "http-client")
+ self.connectCallback.errback(reason)
def clientConnectionLost(self, connector, reason):
log.debug("clientConnectionLost", "http-client")
if self.connection is not None and self.connection.fetcher is not None:
Modified: people/halls/rework/apt_proxy/test/test_apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_apt_proxy.py (original)
+++ people/halls/rework/apt_proxy/test/test_apt_proxy.py Thu Mar 2 12:04:21 2006
@@ -31,6 +31,7 @@
port=9999
address=
cleanup_freq=off
+max_versions=off
[backend1]
backends = http://a.b.c/d
@@ -78,6 +79,26 @@
shutil.rmtree(self.cache_dir)
self.assertRaises(OSError, os.stat, self.cache_dir)
+class FactoryTestHelper(apTestHelper):
+ """
+ Set up a cache dir and a factory
+ """
+ def setUp(self, config):
+ """
+ Set up a factory using the additional config given
+ """
+ apTestHelper.setUp(self)
+ config = self.config + '\n' + config
+ self.apConfig = apConfig(StringIO(config))
+ self.factory = Factory(self.apConfig)
+ self.factory.configurationChanged()
+
+ def tearDown(self):
+ self.factory.stopFactory()
+ del(self.factory)
+ apTestHelper.tearDown(self)
+ self.assertRaises(OSError, os.stat, self.cache_dir)
+
class FactoryInitTest(apTestHelper):
def setUp(self):
self.default_config = config1
@@ -104,7 +125,14 @@
shutil.rmtree(self.cache_dir)
def testFactoryStart(self):
factory = Factory(self.c)
+ self.assertEquals(factory.recycler, None)
factory.startFactory
+ self.assertEquals(factory.recycler, None)
+ def testPeriodicOff(self):
+ "Verify periodic callback is off"
+ factory = Factory(self.c)
+ factory.startFactory
+ self.assertEquals(factory.periodicCallback, None)
class ConfigChangeTest(unittest.TestCase):
def setUp(self):
@@ -138,3 +166,98 @@
self.loadNewConfig()
self.assertEquals(self.factory.backends.keys(), ['backend2', 'backend3', 'backend4', 'backend5'])
self.assertEquals(self.factory.backends['backend3'].uris[0].host, 'l.m.n')
+
+class FactoryFnsTest(FactoryTestHelper):
+ """
+ Set up a cache dir and a factory
+ """
+ def setUp(self):
+ """
+ Set up a factory using the additional config given
+ """
+ FactoryTestHelper.setUp(self, config1.replace("cleanup_freq=off", "cleanup_freq=1h"))
+
+ def testPeriodicControl(self):
+ "Start & stop periodic callback"
+ self.assertNotEquals(self.factory.periodicCallback, None)
+ self.factory.stopPeriodic()
+ self.assertEquals(self.factory.periodicCallback, None)
+ self.factory.startPeriodic()
+ self.assertNotEquals(self.factory.periodicCallback, None)
+ self.factory.stopPeriodic()
+ self.assertEquals(self.factory.periodicCallback, None)
+ def testPeriodic(self):
+ "Run periodic cleaning"
+ self.factory.startFactory() # Start recycler
+ self.factory.stopPeriodic() # Stop periodic callback
+ self.factory.periodic() # And trigger it manually
+ self.assertNotEquals(self.factory.periodicCallback, None)
+ self.factory.stopPeriodic() # Cancel new callback
+ self.assertEquals(self.factory.periodicCallback, None)
+
+
+ def testDumpDbs(self):
+ "Test that factory.dumpdbs() runs to completion"
+ self.factory.dumpdbs()
+
+class FactoryVersionsTest(FactoryTestHelper):
+ """
+ Set up a cache dir and a factory
+ """
+ def setUp(self):
+ """
+ Set up a factory using the additional config given
+ """
+ FactoryTestHelper.setUp(self, config1.replace("max_versions=off", "max_versions=2"))
+
+ def testFirstFileServed(self):
+ "Add non-.deb to databases"
+ file = 'debian/dists/stable/Release.gpg'
+ path = os.sep + file
+ self.failIf(self.factory.access_times.has_key(path))
+ self.factory.file_served(path)
+ self.failUnless(self.factory.access_times.has_key(path))
+ # This is not a versioned file
+ self.failIf(self.factory.packages.has_key(path))
+
+ def testDebServed1(self):
+ "Add new .deb to databases"
+ file = 'debian/nonexistent_1.0.deb'
+ path = os.sep + file
+ packagename = 'nonexistent'
+ self.failIf(self.factory.access_times.has_key(path))
+ self.failIf(self.factory.packages.has_key(packagename))
+ self.factory.file_served(path)
+ self.failUnless(self.factory.access_times.has_key(path))
+ # This is not a versioned file
+ self.failUnless(self.factory.packages.has_key(packagename))
+ pkgs = self.factory.packages[packagename]
+ self.assertEquals(len(pkgs), 1)
+
+ def testDebServed2(self):
+ "Add two .debs to databases"
+ file1 = 'debian/nonexistent_1.0.deb'
+ file2 = file1.replace('1.0', '1.1')
+ packagename = 'nonexistent'
+ self.factory.file_served(file1)
+ self.factory.file_served(file2)
+ self.failUnless(self.factory.packages.has_key(packagename))
+ pkgs = self.factory.packages[packagename]
+ self.assertEquals(len(pkgs), 2)
+
+ def testDebServed3(self):
+ "Test max_versions algorithm"
+ files = []
+ versions = ['0.0.1', '0.0.2', '0.0.3']
+ packagename = 'apt'
+ os.mkdir(self.cache_dir + os.sep + 'backend1')
+ for ver in versions:
+ package_filename='apt_'+ver+'_test.deb'
+ file = 'backend1'+os.sep+package_filename
+ shutil.copy2('../test_data/packages/'+package_filename, self.cache_dir + os.sep + file)
+ self.factory.file_served(file)
+ files.append(file)
+ pkgs = self.factory.packages[packagename]
+ # Max versions should have deleted one file
+ self.assertEquals(len(pkgs), 2)
+
Modified: people/halls/rework/apt_proxy/test/test_cache.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_cache.py (original)
+++ people/halls/rework/apt_proxy/test/test_cache.py Thu Mar 2 12:04:21 2006
@@ -96,7 +96,7 @@
self.backend.start_download = start_download
self.entry.add_request(self.request)
# Check that our special function was called
- self.assertTrue(self.entry.entry_download_triggered)
+ self.failUnless(self.entry.entry_download_triggered)
def testCachedFile(self):
"""
@@ -119,8 +119,8 @@
#print "iterate.."
reactor.iterate(0.1)
# Check that our special function was not called
- self.assertFalse(entry.test_download)
- self.assertTrue(self.request.streamed)
+ self.failIf(entry.test_download)
+ self.failUnless(self.request.streamed)
def testVerifyFail(self):
"""
@@ -144,26 +144,28 @@
#print "iterate.."
reactor.iterate(0.1)
# Check that our special function was not called
- self.assertTrue(entry.test_download)
- self.assertFalse(self.request.streamed)
+ self.failUnless(entry.test_download)
+ self.failIf(self.request.streamed)
+ self.assertEquals(entry.file_mtime, None)
+ self.assertEquals(entry.file_size, None)
def testCheckAgeImmutable(self):
# testfile.deb is immutable
self.entry.file_mtime = 0
- self.assertTrue(self.entry.check_age())
+ self.failUnless(self.entry.check_age())
self.entry.file_mtime = time.time()+1000
- self.assertTrue(self.entry.check_age())
+ self.failUnless(self.entry.check_age())
def testCheckAgeMmutable(self):
# pretend that testfile.deb is immutable, i.e.
# it will be updated like Packages, Release
self.entry.filetype.mutable = True
self.entry.file_mtime = 0
- self.assertFalse(self.entry.check_age())
+ self.failIf(self.entry.check_age())
self.entry.file_mtime = time.time()+1000
- self.assertTrue(self.entry.check_age())
+ self.failUnless(self.entry.check_age())
def testCreateDirectory(self):
dirname = self.cache_dir+"/files/testdir"
Modified: people/halls/rework/apt_proxy/test/test_packages.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_packages.py (original)
+++ people/halls/rework/apt_proxy/test/test_packages.py Thu Mar 2 12:04:21 2006
@@ -94,7 +94,7 @@
def get_test_deb_name():
"Return filename of test deb file"
debs = glob.glob('../test_data/packages/apt_*_*.deb')
- return debs[0]
+ return debs[-1]
def get_test_deb_info():
"Return an AptDpkgInfo for our test deb"
Modified: people/halls/rework/apt_proxy/test/test_requests.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_requests.py (original)
+++ people/halls/rework/apt_proxy/test/test_requests.py Thu Mar 2 12:04:21 2006
@@ -34,11 +34,13 @@
"""
def __init__(self, factory):
self.factory = factory
+ self.http_status = None
+
def connectionMade(self):
"""
Http connection made
"""
- # print "connection made! requesting:", self.factory.filename
+ log.debug("connection made to test apt-proxy server. requesting:" + self.factory.filename, 'uriRequesterTest')
self.sendCommand("GET", self.factory.filename)
self.sendHeader('host', self.factory.host)
self.endHeaders()
@@ -50,8 +52,8 @@
self.http_status = int(code)
- #def dataReceived(self, data):
- # print "Data received: "+data
+ def dataReceived(self, data):
+ log.debug("data received, len: %s" % (len(data)), 'uriRequesterTest')
def handleResponse(self, buffer):
log.debug('data received: %s bytes' % (len(buffer)), 'uriRequesterTest')
self.received_len = len(buffer)
@@ -72,7 +74,7 @@
self.testPassed = False
self.timedOut = False
self.expectedResponse = expectedResponse
- self.timeout = reactor.callLater(5, self.timeout)
+ self.timeout = reactor.callLater(30, self.timeout)
#def startedConnecting(self, connector):
# print 'Started to connect.'
def buildProtocol(self, addr):
@@ -96,6 +98,7 @@
def timeout(self):
# print 'Test timeout'
+ log.debug("Test timeout", 'uriRequesterTest')
self.timeout = None
self.testDone = True
self.timedOut = True
@@ -125,8 +128,9 @@
def doRequest(self, file, responseCode):
portno = self.port.getHost().port
- clientFactory = requestFactory(file, "localhost:%s"% (portno), responseCode)
- connection = reactor.connectTCP("localhost", portno, clientFactory)
+ log.debug("Starting test connection to 127.0.0.1:%s, file:%s:" %(portno, file), 'uriRequesterTest')
+ clientFactory = requestFactory(file, "127.0.0.1:%s"% (portno), responseCode)
+ connection = reactor.connectTCP("127.0.0.1", portno, clientFactory)
while clientFactory.testDone == False:
#print "iterate.."
@@ -153,7 +157,13 @@
def testNotFound(self):
self.doRequest('/files/test.gz', http.NOT_FOUND)
def testPackagesFile(self):
+ import twisted
+ twisted.internet.base.DelayedCall.debug = True
self.doRequest('/files/Packages.gz', http.OK)
+ backend = self.factory.getBackend('files')
+ # Check that request was deleted from backend
+ self.assertEquals(len(backend.entries), 0)
+
def testForbidden(self):
self.doRequest('/notbackend/Release', http.NOT_FOUND)
@@ -202,3 +212,21 @@
log.debug("Downloading second copy", 'DebianHttpBackendTest')
self.factory.config.min_refresh_delay = 0
self.doRequest(file, http.OK)
+
+ def testTimeout(self):
+ pass
+ testTimeout.todo = True
+
+ def setFileTime(self):
+ "cache file modification time is set to same time as server time"
+ pass
+ setFileTime.todo = True
+
+ def doubleDownload(self):
+ "download, delete from cache, re-request file"
+ pass
+ doubleDownload.todo = True
+
+ # More TODO tests:
+ # - file mtime is same as server mtime
+ # - correct file path is entered in databases after download
\ No newline at end of file
Modified: people/halls/rework/runtests
==============================================================================
--- people/halls/rework/runtests (original)
+++ people/halls/rework/runtests Thu Mar 2 12:04:21 2006
@@ -3,10 +3,21 @@
set -e
testfiles="packages/Packages packages/Packages.gz packages/Packages.bz2
- verify/invalid-gzip.gz"
+ verify/invalid-gzip.gz packages/apt_0.0.1_test.deb packages/apt_0.0.2_test.deb packages/apt_0.0.3_test.deb"
rootdir="$(cd $(dirname $0); pwd)"
testdir="$rootdir/test_data"
+make_pkg()
+{
+ dir=$1
+ version=$2
+ pkgdir=${dir}_${version}_test
+ cp -ra $dir $pkgdir
+ sed "s/^Version: .*/Version: ${version}/" < $dir/DEBIAN/control > $pkgdir/DEBIAN/control
+ dpkg --build $pkgdir
+ rm -r $pkgdir
+}
+
make_test_data()
{
mkdir -p $testdir
@@ -24,10 +35,17 @@
fi
echo "Creating test data"
+ [ ! -d $testdir/packages ] || rm -r $testdir/packages
mkdir -p $testdir/packages
cd $testdir/packages
- echo Creating apt package from system
- fakeroot -u dpkg-repack apt
+ echo Creating apt packages from system
+ fakeroot -u dpkg-repack --generate apt
+ mv dpkg-repack* apt
+ make_pkg apt 0.0.1
+ make_pkg apt 0.0.2
+ make_pkg apt 0.0.3
+ rm -r apt
+
echo Creating Packages file for package
dpkg-scanpackages . /dev/null | tee Packages | gzip -c > Packages.gz
bzip2 -c < Packages > Packages.bz2
@@ -46,4 +64,5 @@
if [ $# -eq 0 ]; then
set -- apt_proxy.test
fi
-PYTHONPATH="`pwd`" trial --verbose --logfile `pwd`/unitttests.log $@
+set -x
+PYTHONPATH="`pwd`" trial --logfile `pwd`/unitttests.log $@
More information about the apt-proxy-devel
mailing list