r348 - /debtorrent/trunk/DebTorrent/download_bt1.py
camrdale-guest at users.alioth.debian.org
camrdale-guest at users.alioth.debian.org
Thu Jan 24 00:44:13 UTC 2008
Author: camrdale-guest
Date: Thu Jan 24 00:44:13 2008
New Revision: 348
URL: http://svn.debian.org/wsvn/debtorrent/?sc=1&rev=348
Log:
Upgrade the saved downloader state to the new format when starting.
Modified:
debtorrent/trunk/DebTorrent/download_bt1.py
Modified: debtorrent/trunk/DebTorrent/download_bt1.py
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/trunk/DebTorrent/download_bt1.py?rev=348&op=diff
==============================================================================
--- debtorrent/trunk/DebTorrent/download_bt1.py (original)
+++ debtorrent/trunk/DebTorrent/download_bt1.py Thu Jan 24 00:44:13 2008
@@ -842,42 +842,117 @@
self.errorfunc(reason)
- def find_files(self):
- """Search through the save directory to find files which already exist.
-
- @rtype: C{list} of C{boolean}, C{list} of alternating C{string}, C{int}
- @return: a list of which entries in the list L{files} already exist,
- and a list of the file names and priorities for them (see
- L{FileSelector.unpickle})
-
- """
-
- logger.info('Searching %s for already downloaded files' % self.filename)
- found_files = {}
- for root, dirs, files in walk(self.filename):
- for file in files:
- found_files[path.join(root, file)] = 1
-
- if not found_files:
- logger.info('No downloaded files were found')
- return None, []
-
- logger.info('Found %d downloaded files' % len(found_files.keys()))
+ def loadState(self):
+ """Load the save download state from a previous run.
+
+ @rtype: C{list} of C{boolean}
+ @return: a list of the enabled files in the torrent
+ (first call only, subsequent calls will return None)
+
+ """
+
+ # If it's already loaded, do nothing
+ if self.pickled_data is not None:
+ return
+
+ try:
+ self.pickled_data = self.appdataobj.getTorrentData(self.infohash)
+ except:
+ logger.exception('Could not retrieve the pickled data')
+ self.pickled_data = None
+
+ must_find_files = False
+
+ # Upgrade the saved state
+ if self.pickled_data and self.pickled_data.get('version', 0) < 1:
+ if 'resume data' in self.pickled_data:
+ new_priority = {}
+ if 'priority' in self.pickled_data['resume data']:
+ try:
+ old_priority = self.pickled_data['resume data']['priority']
+ for i in xrange(min(len(old_priority), len(self.files))):
+ if old_priority[i] >= 0:
+ new_priority[self.files[i][0]] = old_priority[i]
+ logger.info('Upgraded the old priority state')
+ except:
+ logger.exception('Previously saved priority state is corrupt, resetting')
+ must_find_files = True
+ else:
+ must_find_files = True
+ self.pickled_data['resume data']['priority'] = new_priority
+ new_files = {}
+ if 'files' in self.pickled_data['resume data']:
+ try:
+ old_files = self.pickled_data['resume data']['files']
+ assert len(old_files) % 3 == 0
+ old_files = [old_files[x:x+3] for x in xrange(0,len(old_files),3)]
+ for i, size, mtime in old_files:
+ if i < len(self.files):
+ new_files[self.files[i][0]] = [size, mtime]
+ logger.info('Upgraded the old saved files state')
+ except:
+ logger.exception('Previously saved file state is corrupt, resetting')
+ self.pickled_data['resume data']['files'] = new_files
+ if 'partial files' in self.pickled_data['resume data']:
+ del self.pickled_data['resume data']['partial files']
+ else:
+ self.pickled_data['resume data'] = {'priority': {}, 'files': {}}
+ must_find_files = True
+ self.pickled_data['stats'] = {'upload': 0L, 'download': 0L}
+ self.pickled_data['version'] = 1
+
+ # Initialize the saved state it if it wasn't found
+ if not self.pickled_data:
+ must_find_files = True
+ self.pickled_data['resume data'] = {'priority': {}, 'files': {}}
+ self.pickled_data['stats'] = {'upload': 0L, 'download': 0L}
+ self.pickled_data['version'] = 1
+
+ enabled_files = []
found = 0
- enabled_files = []
- priority = []
- for file, length in self.files:
- if file in found_files:
- found += 1
- priority.append(file)
- priority.append(1)
- enabled_files.append(True)
- else:
- enabled_files.append(False)
-
- logging.info('%d of the found files were also present in the %d files of the torrent' % (found, len(self.files)))
- return enabled_files, priority
-
+ if must_find_files:
+ # No state, so search the download directory for old files to use
+ logger.info('no cached data, manually finding and hash checking old files')
+ self.pickled_data['resume data']['priority'] = {}
+ try:
+ logger.info('Searching %s for already downloaded files' % self.filename)
+ found_files = {}
+ for root, dirs, files in walk(self.filename):
+ for file in files:
+ found_files[path.join(root, file)] = 1
+
+ if found_files:
+ logger.info('Found %d downloaded files' % len(found_files.keys()))
+ priority = {}
+ for file, length in self.files:
+ if file in found_files:
+ found += 1
+ priority[file] = 1
+ enabled_files.append(True)
+ else:
+ enabled_files.append(False)
+ logging.info('%d of the found files were also present in the %d files of the torrent' % (found, len(self.files)))
+ self.pickled_data['resume data']['priority'] = priority
+ else:
+ logger.info('No downloaded files were found')
+
+ except:
+ logger.exception('Error occurred when manually finding the old files')
+ enabled_files = None
+ else:
+ # Have state, so build the list of enabled files
+ d = self.pickled_data['resume data']['priority']
+ for file, length in self.files:
+ if file in d and d[file] >= 0:
+ found += 1
+ enabled_files.append(True)
+ else:
+ enabled_files.append(False)
+ logger.info('Of %d previous files, %d are still valid for this torrent'
+ % (len(d.keys()), found))
+
+ return enabled_files
+
def initFiles(self, old_style = False):
"""Initialize the files for the download.
@@ -895,37 +970,7 @@
if self.doneflag.isSet():
return None
- try:
- self.pickled_data = self.appdataobj.getTorrentData(self.infohash)
- except:
- logger.exception('Could not retrieve the pickled data')
-
- enabled_files = None
- d = None
- if self.pickled_data:
- try:
- d = self.pickled_data['resume data']['priority']
- except:
- logger.exception('pickled data is corrupt')
- if not self.pickled_data or d is None:
- logger.info('no cached data, manually finding and hash checking old files')
- self.pickled_data = None
- try:
- enabled_files, priority = self.find_files()
- if priority:
- self.pickled_data = {'resume data': {'priority': priority}}
- except:
- logger.exception('Error occurred when manually finding the old files')
- else:
- enabled_files = []
- found = 0
- for file, length in self.files:
- if file in d:
- found += 1
- enabled_files.append(True)
- else:
- enabled_files.append(False)
- logger.info('Of %d previous files, %d are still valid for this torrent' % (len(d)/2, found))
+ enabled_files = self.loadState()
try:
try:
@@ -958,10 +1003,7 @@
self.storage, self.storagewrapper,
self.rawserver.add_task, self.picker,
self._failed)
- if self.pickled_data:
- data = self.pickled_data.get('resume data')
- if data:
- self.fileselector.unpickle(data)
+ self.fileselector.unpickle(self.pickled_data.get('resume data', {}))
self.checking = True
if old_style:
@@ -1107,15 +1149,11 @@
if self.storagewrapper.do_I_have(i):
self.picker.complete(i)
- total_up = 0L
- total_down = 0L
- if self.pickled_data:
- try:
- total_up = long(self.pickled_data['stats']['upload'])
- total_down = long(self.pickled_data['stats']['download'])
- logger.info('Initializing measures with previously downloaded %d, uploaded %d' % (total_down, total_up))
- except:
- logger.exception('Pickled stats from previous run are corrupt')
+ # Just to be sure
+ self.loadState()
+
+ total_up = long(self.pickled_data['stats']['upload'])
+ total_down = long(self.pickled_data['stats']['download'])
self.upmeasure = Measure(self.config['max_rate_period'],
self.config['upload_rate_fudge'], saved_total = total_up)
self.downmeasure = Measure(self.config['max_rate_period'], saved_total = total_down)
@@ -1278,6 +1316,7 @@
self.storage.close()
self.rerequest_stopped()
if self.fileselector and self.started:
+ torrentdata['version'] = 1
torrentdata['stats'] = {'upload': self.upmeasure.get_total(),
'download': self.downmeasure.get_total()}
if not self.failed:
More information about the Debtorrent-commits
mailing list