[apt-proxy-devel] r598 - in people/halls/rework: . apt_proxy
apt_proxy/test
Chris Halls
halls at costa.debian.org
Mon Mar 20 14:40:09 UTC 2006
Author: halls
Date: Mon Mar 20 14:40:08 2006
New Revision: 598
Modified:
people/halls/rework/apt_proxy/apt_proxy.py
people/halls/rework/apt_proxy/fetchers.py
people/halls/rework/apt_proxy/test/test_apt_proxy.py
people/halls/rework/apt_proxy/test/test_requests.py
people/halls/rework/runtests
Log:
* Queues now close their fetcher after no requests are received within a certain time
* Updates to tests
Modified: people/halls/rework/apt_proxy/apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/apt_proxy.py (original)
+++ people/halls/rework/apt_proxy/apt_proxy.py Mon Mar 20 14:40:08 2006
@@ -55,6 +55,7 @@
name = None
def __init__(self, factory, config):
+ log.debug("Creating Backend: " + config.name)
self.factory = factory
self.config = config # apBackendConfig configuration information
self.base = config.name # Name of backend
@@ -63,7 +64,6 @@
self.entries = {} # Hash of active cache entries
self.packages = None # Packages database for this backend
- log.debug("Created Backend: " + self.base)
for uri in config.backends:
self.addURI(uri)
@@ -593,8 +593,13 @@
setattr(self.databases, db, None)
def stopFactory(self):
+ log.debug('Main factory stop', 'factory')
import packages
# self.dumpdbs()
+ for b in self.backends.values():
+ log.debug('backend: %s queue: %s' %(b, b.queue))
+ del b.queue
+ b.queue = None
self.backends = {}
packages.cleanup(self)
if self.recycler is not None:
Modified: people/halls/rework/apt_proxy/fetchers.py
==============================================================================
--- people/halls/rework/apt_proxy/fetchers.py (original)
+++ people/halls/rework/apt_proxy/fetchers.py Mon Mar 20 14:40:08 2006
@@ -977,28 +977,46 @@
"""
This class manages a list of files to download and schedules downloads
"""
-
+ closeTimeout = 5 # Time to close fetcher connections after lsat download (seconds)
def __init__(self):
+ log.debug("-------new queue: %s" % (self), 'DownloadQueue')
+ #import traceback
+ #traceback.print_stack()
self.queue = [] # List of cacheEntry classes waiting
self.activeFile = None
self.fetcher = None
+ self.timeoutCB = None
def addFile(self, cacheEntry):
"""
Add a file to the queue and start downloading if necessary
+ @param cacheEntry Cache entry of file to download
+ @return Deferred that is triggered when file has been downloaded
"""
+ if len(self.queue) == 0 and self.timeoutCB is not None:
+ log.debug("cancel timeout: %s, %s" % (self, self.timeoutCB), 'DownloadQueue')
+ self.timeoutCB.cancel()
self.queue.append(cacheEntry)
if self.activeFile is None:
self.startNextDownload()
else:
log.debug("queue file " + cacheEntry.cache_path, 'DownloadQueue')
-
+
+ def downloadFinished(self, result):
+ success, message = result
+ if success:
+ log.debug("download complete", 'DownloadQueue')
+ else:
+ log.debug("download failed: %s" % (message), 'DownloadQueue')
+ self.activeFile = None
+ self.startNextDownload()
+
def startNextDownload(self):
if len(self.queue)>0:
log.debug("start next download", 'DownloadQueue')
self.activeFile = self.queue[0]
self.queue = self.queue[1:]
-
+
if self.fetcher is not None:
if self.fetcher.backendServer.backend != self.activeFile.backend:
log.debug("old:%s new:%s" %(self.fetcher.backendServer.backend,self.activeFile.backend)
@@ -1014,13 +1032,28 @@
d = self.fetcher.start(self.activeFile)
d.addCallback(self.downloadFinished)
else:
+ #twisted.internet.base.DelayedCall.debug = True
log.debug("download queue is empty", 'DownloadQueue')
+ if self.closeTimeout:
+ self.timeoutCB = reactor.callLater(self.closeTimeout, self.closeFetcher)
+ log.debug("create timeout: %s, %s" % (self, self.timeoutCB), 'DownloadQueue')
+ print "ACTIVE" + str(self.timeoutCB)
+ #timeoutCB.cancel()
- def downloadFinished(self, result):
- success, message = result
- if success:
- log.debug("download complete", 'DownloadQueue')
- else:
- log.debug("download failed: %s" % (message), 'DownloadQueue')
- self.activeFile = None
- self.startNextDownload()
+ def closeFetcher(self):
+ "Close active fetcher - called after queue has been empty for closeTimeout seconds"
+
+ log.debug("closing fetcher", 'DownloadQueue')
+
+ self.timeoutCB = None
+ if self.fetcher is not None:
+ log.debug("queue is idle - closing fetcher", 'DownloadQueue')
+ self.fetcher.disconnect()
+
+ def stop(self):
+ log.debug("stop queue: %s, %s" % (self, self.timeoutCB), 'DownloadQueue')
+ if self.timeoutCB is not None:
+ log.debug("cancel timeout %s" (dir(self.timeoutCB)), 'DownloadQueue')
+ print dir(self.timeoutCB)
+ #self.timeoutCB.cancel()
+ self.closeFetcher()
Modified: people/halls/rework/apt_proxy/test/test_apt_proxy.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_apt_proxy.py (original)
+++ people/halls/rework/apt_proxy/test/test_apt_proxy.py Mon Mar 20 14:40:08 2006
@@ -254,7 +254,7 @@
for ver in versions:
package_filename='apt_'+ver+'_test.deb'
file = 'backend1'+os.sep+package_filename
- shutil.copy2('../test_data/packages/'+package_filename, self.cache_dir + os.sep + file)
+ shutil.copy2('../test_data/apt/'+package_filename, self.cache_dir + os.sep + file)
self.factory.file_served(file)
files.append(file)
pkgs = self.factory.packages[packagename]
Modified: people/halls/rework/apt_proxy/test/test_requests.py
==============================================================================
--- people/halls/rework/apt_proxy/test/test_requests.py (original)
+++ people/halls/rework/apt_proxy/test/test_requests.py Mon Mar 20 14:40:08 2006
@@ -123,7 +123,7 @@
log.debug("config:\n" + config, 'TestRequestHelper')
self.c = apConfig(StringIO(config))
self.factory = Factory(self.c)
- self.factory.configurationChanged()
+ #self.factory.configurationChanged()
self.port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
def tearDown(self):
@@ -236,7 +236,7 @@
self.downloadFile()
# Check access time datbase was updated
- self.assertApproximates(self.factory.access_times[self.filename], time.time(), 2)
+ self.assertApproximates(self.factory.access_times[self.filename], time.time(), 6)
def testNotModifiedGreater(self):
"Check http not modified is sent for new file"
Modified: people/halls/rework/runtests
==============================================================================
--- people/halls/rework/runtests (original)
+++ people/halls/rework/runtests Mon Mar 20 14:40:08 2006
@@ -3,7 +3,9 @@
set -e
testfiles="packages/Packages packages/Packages.gz packages/Packages.bz2
- verify/invalid-gzip.gz packages/apt_0.0.1_test.deb packages/apt_0.0.2_test.deb packages/apt_0.0.3_test.deb"
+ verify/invalid-gzip.gz
+ packages/apt_0.0.1_test.deb
+ apt/apt_0.0.1_test.deb apt/apt_0.0.2_test.deb apt/apt_0.0.3_test.deb"
rootdir="$(cd $(dirname $0); pwd)"
testdir="$rootdir/test_data"
@@ -35,9 +37,9 @@
fi
echo "Creating test data"
- [ ! -d $testdir/packages ] || rm -r $testdir/packages
- mkdir -p $testdir/packages
- cd $testdir/packages
+ [ ! -d $testdir/apt ] || rm -r $testdir/apt
+ mkdir -p $testdir/apt
+ cd $testdir/apt
echo Creating apt packages from system
fakeroot -u dpkg-repack --generate apt
mv dpkg-repack* apt
@@ -46,9 +48,19 @@
make_pkg apt 0.0.3
rm -r apt
+ echo Creating Packages file for apt directory
+ dpkg-scanpackages . /dev/null | tee Packages | gzip -c > Packages.gz
+ bzip2 -c < Packages > Packages.bz2
+ cd ..
+
+ [ ! -d $testdir/packages ] || rm -r $testdir/packages
+ mkdir -p $testdir/packages
+ cd $testdir/packages
+ cp ../apt/apt_0.0.1_test.deb .
echo Creating Packages file for package
dpkg-scanpackages . /dev/null | tee Packages | gzip -c > Packages.gz
bzip2 -c < Packages > Packages.bz2
+ cd ..
mkdir -p $testdir/verify
cd $testdir/verify
@@ -64,5 +76,6 @@
if [ $# -eq 0 ]; then
set -- apt_proxy.test
fi
+rm -f `pwd`/unittests.log
set -x
-PYTHONPATH="`pwd`" trial --logfile `pwd`/unitttests.log $@
+PYTHONPATH="`pwd`" trial --logfile `pwd`/unittests.log $@
More information about the apt-proxy-devel
mailing list