r258 - in /debtorrent/trunk: ./ DebTorrent/BT1/AptListener.py DebTorrent/BT1/track.py DebTorrent/HTTPCache.py DebTorrent/HTTPHandler.py DebTorrent/SocketHandler.py DebTorrent/launchmanycore.py TODO test.py
camrdale-guest at users.alioth.debian.org
camrdale-guest at users.alioth.debian.org
Thu Aug 16 17:29:38 UTC 2007
Author: camrdale-guest
Date: Thu Aug 16 17:29:38 2007
New Revision: 258
URL: http://svn.debian.org/wsvn/debtorrent/?sc=1&rev=258
Log:
Merged revisions 202-242,244-257 via svnmerge from
svn+ssh://camrdale-guest@svn.debian.org/svn/debtorrent/debtorrent/branches/http1.1
........
r202 | camrdale-guest | 2007-08-05 22:11:35 -0700 (Sun, 05 Aug 2007) | 1 line
Switch the AptListener to queue requests by file name and then connection to allow for multiple requests per HTTP connection.
........
r203 | camrdale-guest | 2007-08-06 16:17:57 -0700 (Mon, 06 Aug 2007) | 1 line
Upgrade the HTTP server to support HTTP/1.1 connections, including persistent connections and pipelining.
........
r247 | camrdale-guest | 2007-08-14 18:23:35 -0700 (Tue, 14 Aug 2007) | 1 line
Introduce protocol tracking in anticipation of new protocols.
........
r249 | camrdale-guest | 2007-08-14 20:14:09 -0700 (Tue, 14 Aug 2007) | 1 line
Add support for the DEBTORRENT protocol.
........
r251 | camrdale-guest | 2007-08-15 00:02:30 -0700 (Wed, 15 Aug 2007) | 1 line
Return DEBTORRENT requests in any order.
........
r257 | camrdale-guest | 2007-08-16 02:59:54 -0700 (Thu, 16 Aug 2007) | 1 line
Update the HTTP cache to not thread too many requests at a time.
........
Modified:
debtorrent/trunk/ (props changed)
debtorrent/trunk/DebTorrent/BT1/AptListener.py
debtorrent/trunk/DebTorrent/BT1/track.py
debtorrent/trunk/DebTorrent/HTTPCache.py
debtorrent/trunk/DebTorrent/HTTPHandler.py
debtorrent/trunk/DebTorrent/SocketHandler.py
debtorrent/trunk/DebTorrent/launchmanycore.py
debtorrent/trunk/TODO
debtorrent/trunk/test.py
Propchange: debtorrent/trunk/
------------------------------------------------------------------------------
--- svnmerge-integrated (original)
+++ svnmerge-integrated Thu Aug 16 17:29:38 2007
@@ -1,1 +1,1 @@
-/debtorrent/branches/http1.1:1-200 /debtorrent/branches/unique:1-204
+/debtorrent/branches/http1.1:1-257 /debtorrent/branches/unique:1-204
Modified: debtorrent/trunk/DebTorrent/BT1/AptListener.py
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/trunk/DebTorrent/BT1/AptListener.py?rev=258&op=diff
==============================================================================
--- debtorrent/trunk/DebTorrent/BT1/AptListener.py (original)
+++ debtorrent/trunk/DebTorrent/BT1/AptListener.py Thu Aug 16 17:29:38 2007
@@ -93,11 +93,13 @@
are lists of L{DebTorrent.HTTPHandler.HTTPConnection} objects which are the
requests that are pending for that path.
@type request_queue: C{dictionary}
- @ivar request_queue: the pending HTTP get requests that are waiting for download.
- Keys are L{DebTorrent.HTTPHandler.HTTPConnection} objects, values are
- (L{DebTorrent.download_bt1.BT1Download}, C{int}, C{list} of C{int}, C{float})
- which are the torrent downloader, file index, list of pieces needed, and
- the time of the original request.
+ @ivar request_queue: the pending HTTP package requests that are waiting for download.
+ Keys are the file names (including mirror) requested, values are dictionaries
+ with keys of L{DebTorrent.HTTPHandler.HTTPConnection} objects and values of
+ (L{DebTorrent.download_bt1.BT1Download}, C{int},
+ L{DebTorrent.HTTPHandler.HTTPRequest}, C{list} of C{int}, C{float})
+ which are the torrent downloader, file index, HTTP request object to answer,
+ list of pieces needed, and the time of the original request.
"""
@@ -148,26 +150,34 @@
self.request_queue = {}
rawserver.add_task(self.process_queue, 1)
- def enqueue_request(self, connection, downloader, file_num, pieces_needed):
+ def enqueue_request(self, connection, file, downloader, file_num, httpreq, pieces_needed):
"""Add a new download request to the queue of those waiting for pieces.
@type connection: L{DebTorrent.HTTPHandler.HTTPConnection}
@param connection: the conection the request came in on
+ @type file: C{string}
+ @param file: the file to download, starting with the mirror name
@type downloader: L{DebTorrent.download_bt1.BT1Download}
@param downloader: the torrent download that has the file
@type file_num: C{int}
@param file_num: the index of the file in the torrent
+ @type httpreq: L{DebTorrent.HTTPHandler.HTTPRequest}
+ @param httpreq: the HTTP request object to answer (for queueing)
@type pieces_needed: C{list} of C{int}
@param pieces_needed: the list of pieces in the torrent that still
need to download
"""
- assert not self.request_queue.has_key(connection)
-
- logger.info('queueing request as file '+str(file_num)+' needs pieces: '+str(pieces_needed))
-
- self.request_queue[connection] = (downloader, file_num, pieces_needed, clock())
+ # Get the file's queue and check it for this connection
+ queue = self.request_queue.setdefault(file, {})
+ if connection in queue:
+ logger.error('Received multiple requests for the same file on one connection')
+ return
+
+ logger.info('queueing request as file '+file+' needs pieces: '+str(pieces_needed))
+
+ queue[connection] = (downloader, file_num, httpreq, pieces_needed, clock())
def process_queue(self):
"""Process the queue of waiting requests."""
@@ -176,29 +186,32 @@
self.rawserver.add_task(self.process_queue, 1)
closed_conns = []
- for c, v in self.request_queue.items():
- # Check for a closed connection
- if c.closed:
- closed_conns.append(c)
- logger.warning('connection closed while request queued for file '+str(v[1]))
- continue
-
- # Remove the downloaded pieces from the list of needed ones
- for piece in list(v[2]):
- if v[0].storagewrapper.do_I_have(piece):
- logger.debug('queued request for file '+str(v[1])+' got piece '+str(piece))
- v[2].remove(piece)
+ for file, queue in self.request_queue.items():
+ for c, v in queue.items():
+ # Check for a closed connection
+ if c.closed:
+ closed_conns.append((file, c))
+ logger.warning('connection closed while request queued for file '+file)
+ continue
- # If no more pieces are needed, return the answer and remove the request
- if not v[2]:
- logger.info('queued request for file '+str(v[1])+' is complete')
- del self.request_queue[c]
- v[0].storagewrapper.set_file_readonly(v[1])
- self.answer_package(c, v[0], v[1])
-
- # Remove closed connections from the queue
- for c in closed_conns:
- del self.request_queue[c]
+ # Remove the downloaded pieces from the list of needed ones
+ for piece in list(v[3]):
+ if v[0].storagewrapper.do_I_have(piece):
+ logger.debug('queued request for file '+file+' got piece '+str(piece))
+ v[3].remove(piece)
+
+ # If no more pieces are needed, return the answer and remove the request
+ if not v[3]:
+ logger.info('queued request for file '+file+' is complete')
+ closed_conns.append((file, c))
+ v[0].storagewrapper.set_file_readonly(v[1])
+ self.answer_package(c, file, v[0], v[1], v[2])
+
+ # Remove closed/finished connections from the queue
+ for (file, c) in closed_conns:
+ self.request_queue[file].pop(c)
+ if not self.request_queue[file]:
+ self.request_queue.pop(file)
def get_infopage(self):
@@ -307,7 +320,7 @@
return (200, 'OK', {'Server': VERSION, 'Content-Type': 'text/html; charset=iso-8859-1'}, """<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n<html><head><title>Meow</title>\n</head>\n<body style="color: rgb(255, 255, 255); background-color: rgb(0, 0, 0);">\n<div><big style="font-weight: bold;"><big><big><span style="font-family: arial,helvetica,sans-serif;">I IZ TAKIN BRAKE</span></big></big></big><br></div>\n<pre><b><tt> .-o=o-.<br> , /=o=o=o=\ .--.<br> _|\|=o=O=o=O=| \<br> __.' a`\=o=o=o=(`\ /<br> '. a 4/`|.-""'`\ \ ;'`) .---.<br> \ .' / .--' |_.' / .-._)<br> `) _.' / /`-.__.' /<br> `'-.____; /'-.___.-'<br> `\"""`</tt></b></pre>\n<div><big style="font-weight: bold;"><big><big><span style="font-family: arial,helvetica,sans-serif;">FRM GETIN UR PACKAGES</span></big></big></big><br></div>\n</body>\n</html>""")
- def get_cached(self, connection, path, headers):
+ def get_cached(self, connection, path, headers, httpreq):
"""Proxy the (possibly cached) download of a file from a mirror.
@type connection: L{DebTorrent.HTTPHandler.HTTPConnection}
@@ -316,6 +329,8 @@
@param path: the path of the file to download, starting with the mirror name
@type headers: C{dictionary}
@param headers: the headers from the request
+ @type httpreq: L{DebTorrent.HTTPHandler.HTTPRequest}
+ @param httpreq: the HTTP request object to answer (for queueing)
@rtype: (C{int}, C{string}, C{dictionary}, C{string})
@return: the HTTP status code, status message, headers, and downloaded file
(or None if the file is being downloaded)
@@ -335,10 +350,10 @@
if r[0] not in (200, 304):
# Get Debs from the debtorrent download, others are straight download
if path[-1][-4:] == '.deb':
- return self.get_package(connection, path)
+ return self.get_package(connection, path, httpreq)
else:
# Save the connection info and start downloading the file
- self.cache_waiting.setdefault('/'.join(path), []).append(connection)
+ self.cache_waiting.setdefault('/'.join(path), []).append((connection, httpreq))
self.Cache.download_get(path, self.get_cached_callback)
return None
@@ -383,21 +398,23 @@
if r[0] == 200 and path[-1] in ('Packages', 'Packages.gz', 'Packages.bz2'):
self.got_Packages(path, r[3])
- for connection in connections:
+ for (connection, httpreq) in connections:
# Check to make sure the requester is still waiting
if connection.closed:
logger.warning('Retrieved the file, but the requester is gone: '+'/'.join(path))
continue
- connection.answer(r)
-
- def get_package(self, connection, path):
+ connection.answer(r, httpreq)
+
+ def get_package(self, connection, path, httpreq):
"""Download a package file from a torrent.
@type connection: L{DebTorrent.HTTPHandler.HTTPConnection}
@param connection: the conection the request came in on
@type path: C{list} of C{string}
@param path: the path of the file to download, starting with the mirror name
+ @type httpreq: L{DebTorrent.HTTPHandler.HTTPRequest}
+ @param httpreq: the HTTP request object to answer (for queueing)
@rtype: (C{int}, C{string}, C{dictionary}, C{string})
@return: the HTTP status code, status message, headers, and package data
(or None if the package is to be downloaded)
@@ -419,7 +436,9 @@
if not d.storagewrapper.do_I_have(piece):
pieces_needed.append(piece)
elif not pieces_needed:
- data = data + d.storagewrapper.get_piece(piece, 0, -1).getarray().tostring()
+ piecebuf = d.storagewrapper.get_piece(piece, 0, -1)
+ data += piecebuf.getarray().tostring()
+ piecebuf.release()
if not pieces_needed:
return (200, 'OK', {'Server': VERSION, 'Content-Type': 'text/plain'}, data)
@@ -436,20 +455,24 @@
d.fileselector.set_priority(f, 1)
# Add the connection to the list of those needing responses
- self.enqueue_request(connection, d, f, pieces_needed)
+ self.enqueue_request(connection, '/'.join(path), d, f, httpreq, pieces_needed)
return None
- def answer_package(self, connection, d, f):
+ def answer_package(self, connection, file, d, f, httpreq):
"""Send the newly downloaded package file to the requester.
@type connection: L{DebTorrent.HTTPHandler.HTTPConnection}
@param connection: the conection the request came in on
+ @type file: C{string}
+ @param file: the file to download, starting with the mirror name
@type d: L{DebTorrent.download_bt1.BT1Download}
@param d: the torrent download that has the file
@type f: C{int}
@param f: the index of the file in the torrent
+ @type httpreq: L{DebTorrent.HTTPHandler.HTTPRequest}
+ @param httpreq: the HTTP request object to answer (for queueing)
"""
@@ -465,15 +488,17 @@
if not d.storagewrapper.do_I_have(piece):
pieces_needed.append(piece)
elif not pieces_needed:
- data = data + d.storagewrapper.get_piece(piece, 0, -1).getarray().tostring()
+ piecebuf = d.storagewrapper.get_piece(piece, 0, -1)
+ data += piecebuf.getarray().tostring()
+ piecebuf.release()
if not pieces_needed:
- connection.answer((200, 'OK', {'Server': VERSION, 'Content-Type': 'text/plain'}, data))
+ connection.answer((200, 'OK', {'Server': VERSION, 'Content-Type': 'text/plain'}, data), httpreq)
return
# Something strange has happened, requeue it
logger.warning('requeuing request for file '+str(f)+' as it still needs pieces: '+str(pieces_needed))
- self.enqueue_request(connection, d, f, pieces_needed)
+ self.enqueue_request(connection, file, d, f, httpreq, pieces_needed)
def got_Packages(self, path, data):
@@ -596,7 +621,7 @@
response)
- def get(self, connection, path, headers):
+ def get(self, connection, path, headers, httpreq):
"""Respond to a GET request.
Process a GET request from APT/browser/other. Process the request,
@@ -609,6 +634,8 @@
@param path: the URL being requested
@type headers: C{dictionary}
@param headers: the headers from the request
+ @type httpreq: L{DebTorrent.HTTPHandler.HTTPRequest}
+ @param httpreq: the HTTP request object to answer (for queueing)
@rtype: (C{int}, C{string}, C{dictionary}, C{string})
@return: the HTTP status code, status message, headers, and message body
@@ -680,7 +707,7 @@
if 'Packages.diff' in path:
return (404, 'Not Found', {'Server': VERSION, 'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
- return self.get_cached(connection, path, headers)
+ return self.get_cached(connection, path, headers, httpreq)
except ValueError, e:
logger.exception('Bad request from: '+ip)
Modified: debtorrent/trunk/DebTorrent/BT1/track.py
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/trunk/DebTorrent/BT1/track.py?rev=258&op=diff
==============================================================================
--- debtorrent/trunk/DebTorrent/BT1/track.py (original)
+++ debtorrent/trunk/DebTorrent/BT1/track.py Thu Aug 16 17:29:38 2007
@@ -1126,7 +1126,7 @@
return data
- def get(self, connection, path, headers):
+ def get(self, connection, path, headers, httpreq):
"""Respond to a GET request to the tracker.
Process a GET request from a peer/tracker/browser. Process the request,
@@ -1139,6 +1139,8 @@
@param path: the URL being requested
@type headers: C{dictionary}
@param headers: the headers from the request
+ @type httpreq: L{DebTorrent.HTTPHandler.HTTPRequest}
+ @param httpreq: not used since HTTP 1.1 is not used by the tracker
@rtype: (C{int}, C{string}, C{dictionary}, C{string})
@return: the HTTP status code, status message, headers, and message body
Modified: debtorrent/trunk/DebTorrent/HTTPCache.py
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/trunk/DebTorrent/HTTPCache.py?rev=258&op=diff
==============================================================================
--- debtorrent/trunk/DebTorrent/HTTPCache.py (original)
+++ debtorrent/trunk/DebTorrent/HTTPCache.py Thu Aug 16 17:29:38 2007
@@ -13,12 +13,16 @@
@var VERSION: the UserAgent identifier sent to all sites
@type alas: C{string}
@var alas: the message to send when the data is not found
+ at type TIMEOUT: C{float}
+ at var TIMEOUT: the number of seconds after which an idle connection is closed
"""
-from httplib import HTTPConnection
+from httplib import HTTPConnection, BadStatusLine
+from socket import gaierror
from threading import Thread
from DebTorrent.__init__ import product_name,version_short
+from clock import clock
from os.path import join, split, getmtime, getsize, exists
from os import utime, makedirs, listdir
from time import strftime, strptime, gmtime
@@ -35,41 +39,24 @@
time_format = '%a, %d %b %Y %H:%M:%S'
VERSION = product_name+'/'+version_short
alas = 'your file may exist elsewhere in the universe\nbut alas, not here\n'
+TIMEOUT = 60.0
class CacheRequest:
- """Download a file needed for the HTTP download cache.
-
- @type handler: L{HTTPCache}
- @ivar handler: the cache manager for the download
+ """A new request to send to the server for the cache.
+
@type path: C{list} of C{string}
@ivar path: the server and path to download
- @type server: C{string}
- @ivar server: the webserver address and port to connect to
- @type url: C{string}
- @ivar url: the URL to request from the site
@type func: C{method}
@ivar func: the method to call when the download completes
- @type connection: C{HTTPConnection}
- @ivar connection: the connection to the HTTP server
- @type headers: C{dictionary}
- @ivar headres: the HTTP headers to send in the request, and the headers
- returned by the response
- @type active: C{boolean}
- @ivar active: whether there is a download underway
- @type received_data: C{string}
- @ivar received_data: the data returned from the server
- @type connection_status: C{int}
- @ivar connection_status: the status code returned by the server
- @type connection_response: C{string}
- @ivar connection_response: the status message returned by the server
+ @type response: (C{int}, C{string}, C{dictionary}, C{string})
+ @ivar response: the HTTP status code, status message, headers, and
+ downloaded data
"""
- def __init__(self, handler, path, func):
+ def __init__(self, path, func):
"""Initialize the instance.
- @type handler: L{HTTPCache}
- @param handler: the cache manager for the download
@type path: C{list} of C{string}
@param path: the server and path to download
@type func: C{method}
@@ -77,24 +64,124 @@
"""
+ self.path = path
+ self.func = func
+ self.response = None
+
+ def save_response(self, r):
+ """Save a returned response from the server.
+
+ @type r: C{httplib.HTTPResponse}
+ @param r: the response from the server
+
+ """
+
+ self.response = (r.status, r.reason, dict(r.getheaders()), r.read())
+
+ def error(self, error_msg):
+ """Save an error response.
+
+ @type error_msg: C{string}
+ @param error_msg: the error that occurred
+
+ """
+
+ self.response = (502, 'Bad Gateway', {},
+ 'error accessing http server: '+error_msg)
+
+class CacheConnection:
+ """Download files needed for the HTTP download cache from a single server.
+
+ @type handler: L{HTTPCache}
+ @ivar handler: the cache manager for the download
+ @type server: C{string}
+ @ivar server: the webserver address and port to connect to
+ @type request: L{CacheRequest}
+ @ivar request: the request currently in progress
+ @type request_queue: C{list} of L{CacheRequest}
+ @ivar request_queue: the waiting requests
+ @type connection: C{HTTPConnection}
+ @ivar connection: the connection to the HTTP server
+ @type url: C{string}
+ @ivar url: the URL to request from the site
+ @type headers: C{dictionary}
+ @ivar headers: the HTTP headers to send in the request
+ @type active: C{boolean}
+ @ivar active: whether there is a download underway
+ @type closed: C{boolean}
+ @ivar closed: whether ther connection has been closed
+ @type last_action: C{float}
+ @ivar last_action: the last time an action occurred
+
+ """
+
+ def __init__(self, handler, server):
+ """Initialize the instance.
+
+ @type handler: L{HTTPCache}
+ @param handler: the cache manager for the download
+ @type server: C{string}
+ @param server: the server name to send the requests to
+
+ """
+
self.handler = handler
- self.path = path
- self.server = path[0]
- self.url = '/' + '/'.join(path[1:])
- self.func = func
+ self.server = server
+ self.request = None
+ self.request_queue = []
+ self.headers = {'User-Agent': VERSION}
+ self.active = False
+ self.closed = False
+ self.last_action = clock()
+
try:
self.connection = HTTPConnection(self.server)
except:
- logger.exception('cannot connect to http seed: '+self.server)
+ logger.exception('cannot connect to http server: '+self.server)
+ self.close()
+
+ def queue(self, path, func):
+ """Queue a download for later starting.
+
+ @type path: C{list} of C{string}
+ @param path: the server and path to download
+ @type func: C{method}
+ @param func: the method to call when the download completes
+ @rtype: C{boolean}
+ @return: whether the download was successfully queued
+
+ """
+
+ assert path[0] == self.server
+ if self.closed:
+ return False
+
+ logger.debug('queueing request for '+'/'.join(path))
+ self.request_queue.append(CacheRequest(path, func))
+ self._run_queue()
+
+ return True
+
+ def _run_queue(self):
+ """Start the next element in the queue downloading."""
+
+ # Check if one is already running
+ if self.active or self.closed:
return
- self.headers = {'User-Agent': VERSION}
- self.active = False
+ # If the queue is empty, then we are done
+ if not self.request_queue:
+ self.handler.rawserver.add_task(self.auto_close, int(TIMEOUT)+1)
+ return
+
+ self.active = True
+ self.last_action = clock()
+ self.request = self.request_queue.pop(0)
+ self.url = '/' + '/'.join(self.request.path[1:])
logger.debug('starting thread to download '+self.url)
- rq = Thread(target = self._request, name = 'HTTPCache.CacheRequest._request')
+ rq = Thread(target = self._request, name = 'CacheRequest('+self.server+')')
rq.setDaemon(False)
rq.start()
- self.active = True
def _request(self):
"""Do the request."""
@@ -104,30 +191,77 @@
try:
logger.debug('sending request GET '+self.url+', '+str(self.headers))
- self.connection.request('GET',self.url, None, self.headers)
-
- r = self.connection.getresponse()
+ self.connection.request('GET', self.url, None, self.headers)
+
+ # Check for closed persistent connection due to server timeout
+ try:
+ r = self.connection.getresponse()
+ except BadStatusLine:
+ # Reopen the connection to get a new socket
+ logger.debug('persistent connection closed, attempting to reopen')
+ self.connection.close()
+ self.connection.connect()
+ logger.debug('sending request GET '+self.url+', '+str(self.headers))
+ self.connection.request('GET',self.url, None, self.headers)
+ r = self.connection.getresponse()
logger.debug('got response '+str(r.status)+', '+r.reason+', '+str(r.getheaders()))
- self.connection_status = r.status
- self.connection_response = r.reason
- self.headers = dict(r.getheaders())
- self.received_data = r.read()
+ self.request.save_response(r)
+ except gaierror, e:
+ logger.warning('could not contact http server '+self.server+': '+str(e))
+ self.request.error('could not contact http server '+self.server+': '+str(e))
except Exception, e:
logger.exception('error accessing http server')
- self.connection_status = 500
- self.connection_response = 'Internal Server Error'
- self.headers = {}
- self.received_data = 'error accessing http server: '+str(e)
+ self.request.error(str(e))
+ self.last_action = clock()
self.handler.rawserver.add_task(self.request_finished)
def request_finished(self):
"""Process the completed request."""
+
+ # Save the result
+ request = self.request
+ self.request = None
+
+ # Start the next queued item running
+ self.active = False
+ self._run_queue()
+
+ # Return the result
+ self.handler.download_complete(request.path, request.func,
+ request.response)
+
+ def auto_close(self):
+ """Close the connection if it has been idle."""
+ if (not self.active and not self.closed and not self.request and
+ not self.request_queue and (clock() - self.last_action) >= TIMEOUT):
+ self.close()
+
+ def close(self):
+ """Close the connection."""
+ logger.info('Closing the connection to: '+self.server)
+ self.closed = True
self.connection.close()
- self.active = False
- self.handler.download_complete(self, self.path, self.func,
- (self.connection_status, self.connection_response,
- self.headers, self.received_data))
+
+ # Process the current request
+ if self.request:
+ if not self.request.response:
+ self.request.error('connection closed prematurely')
+ self.handler.download_complete(self.request.path,
+ self.request.func,
+ self.request.response)
+ self.request = None
+
+ # Process any waiting requests
+ for request in self.request_queue:
+ if not request.response:
+ request.error('connection closed prematurely')
+ self.handler.download_complete(request.path, request.func,
+ request.response)
+ del self.request_queue[:]
+
+ # Remove the connection to the server
+ self.handler.remove(self, self.server)
class HTTPCache:
@@ -135,8 +269,9 @@
@type rawserver: L{Debtorrent.RawServer.RawServer}
@ivar rawserver: the server
- @type downloads: C{list} of L{CacheRequest}
- @ivar downloads: the list of all current downloads for the cache
+ @type downloads: C{dictionary}
+ @ivar downloads: the current downloads, keys are the server names, values
+ are the L{CacheConnection} objects used to download from the server
@type cachedir: C{string}
@ivar cachedir: the directory to save cache files in
@@ -153,7 +288,7 @@
"""
self.rawserver = rawserver
- self.downloads = []
+ self.downloads = {}
self.cachedir = cachedir
def download_get(self, path, func):
@@ -166,18 +301,35 @@
"""
- logger.info('Starting a downloader for: http://'+'/'.join(path))
- self.downloads.append(CacheRequest(self, path, func))
-
- def download_complete(self, d, path, func, r):
- """Remove a completed download from the list and process the data.
-
- Once a download has been completed, remove the downloader from the
- list and save the downloaded file in the file system. Then return the
- data to the callback function.
-
- @type d: L{CacheRequest}
- @param d: the cache request that is completed
+ if path[0] not in self.downloads:
+ logger.info('Opening a connection to server: '+path[0])
+ self.downloads[path[0]] = CacheConnection(self, path[0])
+
+ if not self.downloads[path[0]].queue(path, func):
+ func(path, (500, 'Internal Server Error',
+ {'Server': VERSION,
+ 'Content-Type': 'text/html; charset=iso-8859-1'},
+ 'Server could not be contacted'))
+
+ def remove(self, d, server):
+ """Remove a completed download connection.
+
+ @type d: L{CacheConnection}
+ @param d: the server connection that is no longer needed
+ @type server: C{string}
+ @param server: the server the connection was to
+
+ """
+
+ assert self.downloads[server] == d
+ del self.downloads[server]
+
+ def download_complete(self, path, func, r):
+ """Process the returned data from a request.
+
+ Once a download has been completed, save the downloaded file in the
+ file system. Then return the data to the callback function.
+
@type path: C{list} of C{string}
@param path: the server and path that was downloaded
@type func: C{method}
@@ -188,7 +340,6 @@
"""
logger.info('download completed for: http://'+'/'.join(path))
- self.downloads.remove(d)
file = self.get_filename(path)
headers = {'Server': VERSION}
Modified: debtorrent/trunk/DebTorrent/HTTPHandler.py
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/trunk/DebTorrent/HTTPHandler.py?rev=258&op=diff
==============================================================================
--- debtorrent/trunk/DebTorrent/HTTPHandler.py (original)
+++ debtorrent/trunk/DebTorrent/HTTPHandler.py Thu Aug 16 17:29:38 2007
@@ -34,6 +34,8 @@
months = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+DEBTORRENT_PROTOCOL = "0.1"
+
def isotime(secs = None):
"""Create an ISO formatted string of the time.
@@ -48,6 +50,89 @@
if secs == None:
secs = time.time()
return time.strftime('%Y-%m-%d %H:%M UTC', time.gmtime(secs))
+
+class HTTPRequest:
+ """A single request on an HTTP connection.
+
+ Handles one of possibly many HTTP GET or HEAD requests from a client using
+ HTTP/1.1.
+
+ @type header: C{string}
+ @ivar header: the first header line received from the request
+ @type command: C{string}
+ @ivar command: the requested command ('GET' or 'HEAD')
+ @type path: C{string}
+ @ivar path: the requested path to get
+ @type encoding: C{string}
+ @ivar encoding: the encoding to use when sending the response
+ @type headers: C{dictionary}
+ @ivar headers: the headers received with the request
+ @type answer: (C{int}, C{string}, C{dictionary}, C{string})
+ @ivar answer: the HTTP status code, status message, headers, and package
+ data, or None if the answer is not yet available
+
+ """
+
+ def __init__(self, header, command, path, encoding, headers):
+ """Initialize the instance.
+
+ @type header: C{string}
+ @param header: the first header line received from the request
+ @type command: C{string}
+ @param command: the requested command ('GET' or 'HEAD')
+ @type path: C{string}
+ @param path: the requested path to get
+ @type encoding: C{string}
+ @param encoding: the encoding to use when sending the response
+ @type headers: C{dictionary}
+ @param headers: the headers received with the request
+
+ """
+
+ self.header = header
+ self.command = command
+ self.path = path
+ self.encoding = encoding
+ self.headers = headers
+ self.answer = None
+
+ def save_answer(self, r):
+ """Save an answer, replacing the old one if it's better.
+
+ @type r: (C{int}, C{string}, C{dictionary}, C{string})
+ @param r: the HTTP status code, status message, headers, and package data
+
+ """
+
+ # Queue the answer
+ if self.answer:
+ logger.error('An answer already exists for this request, keeping the better one')
+ # Better means lower code, or newer response if codes are the same
+ if r[0] <= self.answer[0]:
+ self.answer = r
+ else:
+ self.answer = r
+
+ def has_answer(self):
+ """Determine whether an answer is available for the request.
+
+ @rtype: C{boolean}
+ @return: whether the answer is available yet
+
+ """
+
+ return not not self.answer
+
+ def get_answer(self):
+ """Get the saved answer.
+
+ @rtype: (C{int}, C{string}, C{dictionary}, C{string})
+ @return: the HTTP status code, status message, headers, and package
+ data, or None if the answer is not yet available
+
+ """
+
+ return self.answer
class HTTPConnection:
"""A single connection from an HTTP client.
@@ -60,6 +145,15 @@
@ivar connection: the new connection that was created
@type buf: C{string}
@ivar buf: the buffered data received on the connection
+ @type requests: C{list} of L{HTTPRequest}
+ @ivar requests: the outstanding requests for paths
+ @type protocol: C{string}
+ @ivar protocol: the protocol used to make the request
+ @type version: (C{int}, C{int})
+ @ivar version: the protocol version of the request
+ @type close_connection: C{boolean}
+ @ivar close_connection: whether the connection will be closed after this
+ request
@type closed: C{boolean}
@ivar closed: whether the connection has been closed
@type done: C{boolean}
@@ -72,8 +166,8 @@
@ivar header: the first header line received from the request
@type command: C{string}
@ivar command: the requested command ('GET' or 'HEAD')
- @type pre1: C{boolean}
- @ivar pre1: whether the request is from a pre version 1.0 client
+ @type path: C{string}
+ @ivar path: the requested path to get
@type headers: C{dictionary}
@ivar headers: the headers received with the request
@type encoding: C{string}
@@ -94,9 +188,14 @@
self.handler = handler
self.connection = connection
self.buf = ''
+ self.requests = []
+ self.protocol = ''
+ self.version = None
+ self.close_connection = True
self.closed = False
self.done = False
self.donereading = False
+ self.req_count = 0
self.next_func = self.read_type
def get_ip(self):
@@ -148,23 +247,61 @@
"""
+ self.req_count += 1
self.header = data.strip()
words = data.split()
if len(words) == 3:
- self.command, self.path, garbage = words
- self.pre1 = False
+ # Must be HTTP 1.0 or greater
+ self.command, self.path, version = words
+
+ try:
+ # Extract the protocol from the request
+ self.protocol, base_version_number = version.split('/', 1)
+ except:
+ logger.error("Bad request protocol (%r)", version)
+ return None
+
+ if self.handler.protocol >= "HTTP/1.1":
+ try:
+ # Extract the version number from the request
+ self.protocol, base_version_number = version.split('/', 1)
+ version_number = base_version_number.split(".")
+ if len(version_number) != 2:
+ logger.error("Bad request version (%r)", version)
+ return None
+ self.version = int(version_number[0]), int(version_number[1])
+ except (ValueError, IndexError):
+ logger.error("Bad request version (%r)", version)
+ return None
+
+ # Use persistent connections for DEBTORRENT/HTTP1.1
+ if (self.protocol == "DEBTORRENT" or
+ (self.protocol == "HTTP" and self.version >= (1, 1))):
+ self.close_connection = False
+
+ elif self.protocol != "HTTP":
+ logger.error("Unsupported protocol (%r)", version)
+ return None
+ else:
+ self.version = (1, 0)
+
elif len(words) == 2:
+ # Old HTTP 0.9 connections don't include the version and only support GET
self.command, self.path = words
- self.pre1 = True
+ self.protocol = 'HTTP'
+ self.version = (0, 9)
if self.command != 'GET':
logger.warning('connection closed, improper command: '+self.command)
return None
else:
logger.warning('connection closed, corrupt header line: '+data)
return None
+
if self.command not in ('HEAD', 'GET'):
logger.warning('connection closed, improper command: '+self.command)
return None
+
+ logger.info(str(self.req_count)+': '+self.protocol+' '+self.header)
self.headers = {}
return self.read_header
@@ -179,16 +316,51 @@
"""
data = data.strip()
+
+ # A blank line indicates the headers are done
if data == '':
- self.donereading = True
+ # Get the encoding to use for the answer
if self.headers.get('accept-encoding','').find('gzip') > -1:
self.encoding = 'gzip'
else:
self.encoding = 'identity'
- r = self.handler.getfunc(self, self.path, self.headers)
+
+ # Check for persistent connection headers
+ conntype = self.headers.get('Connection', "").lower()
+ if conntype == 'close':
+ self.close_connection = True
+ elif conntype == 'keep-alive' and self.handler.protocol >= "HTTP/1.1":
+ self.close_connection = False
+
+ # If this is not the last request
+ newrequest = None
+ if not self.close_connection or self.requests:
+ newrequest = HTTPRequest(self.header, self.command, self.path,
+ self.encoding, self.headers)
+ self.requests.append(newrequest)
+
+ # Call the function to process the request
+ r = self.handler.getfunc(self, self.path, self.headers, newrequest)
+
+ # Send the answer if available
if r is not None:
- self.answer(r)
- return None
+ if newrequest:
+ # Multiple requests, so queue it for possible sending
+ self.answer(r, newrequest)
+ else:
+ # It's the only request, so just send it
+ self.send_answer(r, self.header, self.command, self.path,
+ self.encoding, self.headers)
+
+ # Request complete, close or wait for more
+ if self.close_connection:
+ self.donereading = True
+ return None
+ else:
+ self.close_connection = True
+ return self.read_type
+
+ # Process the header line
try:
i = data.index(':')
except ValueError:
@@ -198,8 +370,52 @@
logger.debug(data[:i].strip() + ": " + data[i+1:].strip())
return self.read_header
- def answer(self, (responsecode, responsestring, headers, data)):
- """Send a response to the client on the connection and close it.
+ def answer(self, r, httpreq):
+ """Add a response to the queued responses and check if any are ready to send.
+
+ @type r: (C{int}, C{string}, C{dictionary}, C{string})
+ @param r: the HTTP status code, status message, headers, and package data
+ @type httpreq: L{HTTPRequest}
+ @param httpreq: the request the answer is for
+
+ """
+
+ if self.closed:
+ logger.warning('connection closed before anwswer, dropping data')
+ return
+
+ if not self.requests:
+ if httpreq is None:
+ # There's only one request allowed, so send the answer
+ self.send_answer(r, self.header, self.command, self.path,
+ self.encoding, self.headers)
+ else:
+ logger.error('got answer for unknown request')
+ return
+
+ if httpreq:
+ if httpreq not in self.requests:
+ logger.error('Got an answer for an unknown request')
+ else:
+ if self.protocol == "DEBTORRENT":
+ # DEBTORRENT requests get sent immediately
+ self.requests.remove(httpreq)
+ self.send_answer(r, httpreq.header, httpreq.command,
+ httpreq.path, httpreq.encoding,
+ httpreq.headers)
+ else:
+ httpreq.save_answer(r)
+
+ # Answer all possible requests
+ while self.requests and self.requests[0].has_answer():
+ httpreq = self.requests.pop(0)
+ r = httpreq.get_answer()
+ self.send_answer(r, httpreq.header, httpreq.command, httpreq.path,
+ httpreq.encoding, httpreq.headers)
+
+ def send_answer(self, (responsecode, responsestring, headers, data),
+ header, command, path, encoding, req_headers):
+ """Send out the complete request.
@type responsecode: C{int}
@param responsecode: the response code to send
@@ -209,53 +425,86 @@
@param headers: the headers to send with the response
@type data: C{string}
@param data: the data to send with the response
-
- """
-
- if self.closed:
- logger.warning('connection closed before anwswer, dropping data')
- return
- if self.encoding == 'gzip':
+ @type header: C{string}
+ @param header: the first header line received from the request
+ @type command: C{string}
+ @param command: the requested command ('GET' or 'HEAD')
+ @type path: C{string}
+ @param path: the requested path to get
+ @type encoding: C{string}
+ @param encoding: the encoding to use when sending the response
+ @type req_headers: C{dictionary}
+ @param req_headers: the headers received with the request
+
+ """
+
+ # Encode the response data
+ if encoding == 'gzip':
compressed = StringIO()
gz = GzipFile(fileobj = compressed, mode = 'wb', compresslevel = 9)
gz.write(data)
gz.close()
cdata = compressed.getvalue()
if len(cdata) >= len(data):
- self.encoding = 'identity'
+ encoding = 'identity'
else:
logger.debug('Compressed: '+str(len(cdata))+' Uncompressed: '+str(len(data)))
data = cdata
headers['Content-Encoding'] = 'gzip'
# i'm abusing the identd field here, but this should be ok
- if self.encoding == 'identity':
+ if encoding == 'identity':
ident = '-'
else:
ident = self.encoding
self.handler.write_log( self.connection.get_ip(), ident, '-',
- self.header, responsecode, len(data),
- self.headers.get('referer','-'),
- self.headers.get('user-agent','-') )
- self.done = True
- logger.info('sending response: '+str(responsecode)+' '+responsestring+
+ header, responsecode, len(data),
+ req_headers.get('referer', '-'),
+ req_headers.get('user-agent', '-') )
+
+ logger.info('sending response: '+self.protocol+' '+str(responsecode)+' '+responsestring+
' ('+str(len(data))+' bytes)')
+
r = StringIO()
- r.write('HTTP/1.0 ' + str(responsecode) + ' ' +
- responsestring + '\r\n')
- if not self.pre1:
+
+ # Write the header line
+ if self.protocol == "HTTP":
+ r.write(self.handler.protocol + ' ' + str(responsecode) + ' ' +
+ responsestring + '\r\n')
+ elif self.protocol == "DEBTORRENT":
+ r.write('DEBTORRENT/'+DEBTORRENT_PROTOCOL+' '+path+' '+
+ str(responsecode)+' '+responsestring+'\r\n')
+
+ # Write the individual headers
+ if self.version >= (1, 0) or self.protocol != 'HTTP':
headers['Content-Length'] = len(data)
for key, value in headers.items():
r.write(key + ': ' + str(value) + '\r\n')
r.write('\r\n')
- if self.command != 'HEAD':
+
+ # Don't write the body if only the headers are requested
+ if command != 'HEAD':
r.write(data)
+
self.connection.write(r.getvalue())
- if self.connection.is_flushed():
- self.connection.shutdown(1)
-
+
+ def close(self):
+ """Close the connection and drop all pending requests/answers."""
+ logger.debug('HTTP connection closed')
+ self.closed = True
+ del self.connection
+ self.next_func = None
+ for httpreq in self.requests:
+ if httpreq.has_answer():
+ logger.debug('Connection lost before answer could be sent: '+httpreq.path)
+ del self.requests[:]
+
+
class HTTPHandler:
"""The handler for all new and existing HTTP connections.
+
+ Supports HTTP/1.1 persistent connections with pipelining if the protocol
+ is set to 'HTTP/1.1'.
@type connections: C{dictionary}
@ivar connections: all the existing connections, keys are the connection
@@ -270,10 +519,13 @@
@ivar logfile: the file name to write the logs to
@type log: C{file}
@ivar log: the file to write the logs to
+ @type protocol: C{string}
+ @ivar protocol: the HTTP protocol version to use
"""
- def __init__(self, getfunc, minflush, logfile = None, hupmonitor = None):
+ def __init__(self, getfunc, minflush, logfile = None, hupmonitor = None,
+ protocol = 'HTTP/1.0'):
"""Initialize the instance.
@type getfunc: C{method}
@@ -286,6 +538,9 @@
@type hupmonitor: C{boolean}
@param hupmonitor: whether to reopen the log file on a HUP signal
(optional, default is False)
+ @type protocol: C{string}
+ @param protocol: the HTTP protocol version to use
+ (optional, defaults to HTTP/1.0)
"""
@@ -295,6 +550,7 @@
self.lastflush = clock()
self.logfile = None
self.log = None
+ self.protocol = protocol
if (logfile) and (logfile != '-'):
try:
self.logfile = logfile
@@ -322,6 +578,7 @@
"""
+ logger.debug('new external connection')
self.connections[connection] = HTTPConnection(self, connection)
def connection_flushed(self, connection):
@@ -332,7 +589,9 @@
"""
+ logger.debug('connection flushed')
if self.connections[connection].done:
+ logger.debug('connection shutdown')
connection.shutdown(1)
def connection_lost(self, connection):
@@ -343,10 +602,9 @@
"""
+ logger.debug('connection lost')
ec = self.connections[connection]
- ec.closed = True
- del ec.connection
- del ec.next_func
+ ec.close()
del self.connections[connection]
def data_came_in(self, connection, data):
@@ -361,6 +619,7 @@
c = self.connections[connection]
if not c.data_came_in(data) and not c.closed:
+ logger.debug('closing connection')
c.connection.shutdown(1)
def write_log(self, ip, ident, username, header,
Modified: debtorrent/trunk/DebTorrent/SocketHandler.py
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/trunk/DebTorrent/SocketHandler.py?rev=258&op=diff
==============================================================================
--- debtorrent/trunk/DebTorrent/SocketHandler.py (original)
+++ debtorrent/trunk/DebTorrent/SocketHandler.py Thu Aug 16 17:29:38 2007
@@ -92,6 +92,7 @@
self.ip = 'unknown'
else:
self.ip = ip
+ logger.debug('new socket: ' + self.ip)
def get_ip(self, real=False):
"""Get the IP address of the socket.
@@ -115,6 +116,7 @@
def close(self):
"""Close the socket."""
assert self.socket
+ logger.debug('close socket')
self.connected = False
sock = self.socket
self.socket = None
@@ -132,6 +134,7 @@
"""
+ logger.debug('socket shutdown:'+str(val))
self.socket.shutdown(val)
def is_flushed(self):
Modified: debtorrent/trunk/DebTorrent/launchmanycore.py
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/trunk/DebTorrent/launchmanycore.py?rev=258&op=diff
==============================================================================
--- debtorrent/trunk/DebTorrent/launchmanycore.py (original)
+++ debtorrent/trunk/DebTorrent/launchmanycore.py Thu Aug 16 17:29:38 2007
@@ -366,7 +366,8 @@
reuse = True, ipv6_socket_style = config['ipv6_binds_v4'])
self.rawserver.set_handler(HTTPHandler(self.aptlistener.get,
config['min_time_between_log_flushes'],
- logfile, config['hupmonitor']),
+ logfile, config['hupmonitor'],
+ 'HTTP/1.1'),
config['apt_port'])
self.ratelimiter = RateLimiter(self.rawserver.add_task,
Modified: debtorrent/trunk/TODO
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/trunk/TODO?rev=258&op=diff
==============================================================================
--- debtorrent/trunk/TODO (original)
+++ debtorrent/trunk/TODO Thu Aug 16 17:29:38 2007
@@ -38,15 +38,6 @@
more efficient by adding callbacks to PiecePicker or StorageWrapper, so that
when a piece comes in and passes the hash check, then the AptListener will
process any queued requests for that piece.
-
-
-HTTPHandler should support HTTP/1.1 and persistent connections/pipelining
-
-Currently HTTPHandler is HTTP/1.0, and so doesn't support persistent
-connections. These would be useful as APT could then pipeline multiple requests
-at a time to DebTorrent for processing. This would require something like the
-AptListener callbacks, as the connections would then have to support multiple
-queued package requests.
Different forms of HTTP Downloading may open too many connections
Modified: debtorrent/trunk/test.py
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/trunk/test.py?rev=258&op=diff
==============================================================================
--- debtorrent/trunk/test.py (original)
+++ debtorrent/trunk/test.py Thu Aug 16 17:29:38 2007
@@ -172,6 +172,84 @@
(1, ['update']),
]),
+ '7': ('Test pipelining of multiple simultaneous downloads.',
+ {1: []},
+ {1: (1, [], {})},
+ [(1, ['update']),
+ (1, ['install', 'aboot-base', 'aap-doc', 'ada-reference-manual',
+ 'aspectj-doc', 'fop-doc', 'jswat-doc', 'asis-doc',
+ 'bison-doc', 'crash-whitepaper', 'doc-iana',
+ 'bash-doc', 'apt-howto-common', 'autotools-dev',
+ 'aptitude-doc-en', 'armagetron-common', 'asr-manpages',
+ 'atomix-data', 'alcovebook-sgml-doc', 'alamin-doc',
+ 'aegis-doc', 'afbackup-common', 'airstrike-common',
+ ]),
+ ]),
+
+ '8': ('Test pipelining of multiple simultaneous downloads with many peers.',
+ {1: []},
+ {1: (1, [], {}),
+ 2: (1, [], {}),
+ 3: (1, [], {}),
+ 4: (1, [], {}),
+ 5: (1, [], {}),
+ 6: (1, [], {})},
+ [(1, ['update']),
+ (1, ['install', 'aboot-base', 'aap-doc', 'ada-reference-manual',
+ 'aspectj-doc', 'fop-doc', 'jswat-doc', 'asis-doc',
+ 'bison-doc', 'crash-whitepaper', 'doc-iana',
+ 'bash-doc', 'apt-howto-common', 'autotools-dev',
+ 'aptitude-doc-en', 'armagetron-common', 'asr-manpages',
+ 'atomix-data', 'alcovebook-sgml-doc', 'alamin-doc',
+ 'aegis-doc', 'afbackup-common', 'airstrike-common',
+ ]),
+ (2, ['update']),
+ (2, ['install', 'aboot-base', 'aap-doc', 'ada-reference-manual',
+ 'aspectj-doc', 'fop-doc', 'jswat-doc', 'asis-doc',
+ 'bison-doc', 'crash-whitepaper', 'doc-iana',
+ 'bash-doc', 'apt-howto-common', 'autotools-dev',
+ 'aptitude-doc-en', 'armagetron-common', 'asr-manpages',
+ 'atomix-data', 'alcovebook-sgml-doc', 'alamin-doc',
+ 'aegis-doc', 'afbackup-common', 'airstrike-common',
+ ]),
+ (3, ['update']),
+ (3, ['install', 'aboot-base', 'aap-doc', 'ada-reference-manual',
+ 'aspectj-doc', 'fop-doc', 'jswat-doc', 'asis-doc',
+ 'bison-doc', 'crash-whitepaper', 'doc-iana',
+ 'bash-doc', 'apt-howto-common', 'autotools-dev',
+ 'aptitude-doc-en', 'armagetron-common', 'asr-manpages',
+ 'atomix-data', 'alcovebook-sgml-doc', 'alamin-doc',
+ 'aegis-doc', 'afbackup-common', 'airstrike-common',
+ ]),
+ (4, ['update']),
+ (4, ['install', 'aboot-base', 'aap-doc', 'ada-reference-manual',
+ 'aspectj-doc', 'fop-doc', 'jswat-doc', 'asis-doc',
+ 'bison-doc', 'crash-whitepaper', 'doc-iana',
+ 'bash-doc', 'apt-howto-common', 'autotools-dev',
+ 'aptitude-doc-en', 'armagetron-common', 'asr-manpages',
+ 'atomix-data', 'alcovebook-sgml-doc', 'alamin-doc',
+ 'aegis-doc', 'afbackup-common', 'airstrike-common',
+ ]),
+ (5, ['update']),
+ (5, ['install', 'aboot-base', 'aap-doc', 'ada-reference-manual',
+ 'aspectj-doc', 'fop-doc', 'jswat-doc', 'asis-doc',
+ 'bison-doc', 'crash-whitepaper', 'doc-iana',
+ 'bash-doc', 'apt-howto-common', 'autotools-dev',
+ 'aptitude-doc-en', 'armagetron-common', 'asr-manpages',
+ 'atomix-data', 'alcovebook-sgml-doc', 'alamin-doc',
+ 'aegis-doc', 'afbackup-common', 'airstrike-common',
+ ]),
+ (6, ['update']),
+ (6, ['install', 'aboot-base', 'aap-doc', 'ada-reference-manual',
+ 'aspectj-doc', 'fop-doc', 'jswat-doc', 'asis-doc',
+ 'bison-doc', 'crash-whitepaper', 'doc-iana',
+ 'bash-doc', 'apt-howto-common', 'autotools-dev',
+ 'aptitude-doc-en', 'armagetron-common', 'asr-manpages',
+ 'atomix-data', 'alcovebook-sgml-doc', 'alamin-doc',
+ 'aegis-doc', 'afbackup-common', 'airstrike-common',
+ ]),
+ ]),
+
}
assert 'all' not in tests
@@ -468,7 +546,7 @@
# Create apt's config files
f = open(join([downloader_dir, 'etc', 'apt', 'sources.list']), 'w')
- f.write('deb http://localhost:' + str(num_down) + '988/' + mirror + '/ stable ' + suites + '\n')
+ f.write('deb debtorrent://localhost:' + str(num_down) + '988/' + mirror + '/ stable ' + suites + '\n')
f.close()
f = open(join([downloader_dir, 'etc', 'apt', 'apt.conf']), 'w')
More information about the Debtorrent-commits
mailing list