r209 - /debtorrent/branches/unique/uniquely.py

camrdale-guest at users.alioth.debian.org camrdale-guest at users.alioth.debian.org
Thu Aug 9 07:22:24 UTC 2007


Author: camrdale-guest
Date: Thu Aug  9 07:22:24 2007
New Revision: 209

URL: http://svn.debian.org/wsvn/debtorrent/?sc=1&rev=209
Log:
Add initial uniquely script to manage unique piece numbers (not yet working).

Added:
    debtorrent/branches/unique/uniquely.py   (with props)

Added: debtorrent/branches/unique/uniquely.py
URL: http://svn.debian.org/wsvn/debtorrent/debtorrent/branches/unique/uniquely.py?rev=209&op=file
==============================================================================
--- debtorrent/branches/unique/uniquely.py (added)
+++ debtorrent/branches/unique/uniquely.py Thu Aug  9 07:22:24 2007
@@ -1,0 +1,184 @@
+#!/usr/bin/env python
+
+import bsddb, sha, binascii
+import os, sys
+import gzip
+from StringIO import StringIO
+from math import ceil, log
+
+# Some default values
+default_piecesize = 512*1024
+extension = ".gz"
+default_hash_fields = ["Codename", "Suite", "Component", "Architecture",
+                       "PieceSize", "OriginalDate"]
+header_order = ["Torrent", "Infohash", "OriginalDate", "Date", "PieceSize",
+                "Codename", "Suite", "Component", "Architecture",
+                "TorrentHashFields"]
+
+#cache_file = sys.argv[1]
+#%cache = bsddb.btopen(cache_file, "w")
+
+# The only input is the Release file to process
+releasefile = sys.argv[1]
+print "Processing: %s" % releasefile
+
+# Initialize the Release file variables
+origin = ""
+label = ""
+suite = ""
+codename = ""
+date = ""
+components = []
+archs = []
+read_files = False
+packages = []
+packages_sha1 = {}
+packages_size = {}
+
+f = open(releasefile, 'r')
+
+for line in f:
+    line = line.rstrip()
+    
+    # Read the various headers from the file
+    if line[:7] == "Origin:":
+        origin = line[7:]
+    if line[:6] == "Label:":
+        label = line[7:]
+    if line[:6] == "Suite:":
+        suite = line[7:]
+    if line[:9] == "Codename:":
+        codename = line[10:]
+    if line[:5] == "Date:":
+        date = line[6:]
+    if line[:11] == "Components:":
+        components = line[12:].split()
+    if line[:14] == "Architectures:":
+        archs = line[15:].split()
+        
+    # Read multiple lines from the SHA1 section of the file
+    if line[:1] != " ":
+        read_files = False
+    if read_files:
+        p = line.split()
+        if len(p) == 3 and p[2].EndsWith("Packages"+extension):
+            packages.append(p[2])
+            packages_sha1[p[2]] = binascii.a2b_hex(p[0])
+            packages_size[p[2]] = long(p[2])
+    if line[:5] == "SHA1:":
+        read_files = True
+
+f.close()
+
+torrent_prefix = "dists_" + codename + "_"
+torrent_suffix = "_Packages-torrent.gz"
+
+for component in components:
+    # Get the old 'all' data
+    all_file = torrent_prefix + component + "_binary-all" + torrent_suffix
+    old_all_files, all_headers = get_old(all_file)
+
+    for arch in archs:
+        # Find the Packages file that will be parsed
+        found = False
+        for filename in packages:
+            if filename.find(component) >= 0 and filename.find("binary-"+arch) >= 0:
+                found = True
+                break
+        if not found:
+            print "WARNING: no matching Packages file for component %s, arch %s" % component, arch
+            continue
+        packages.pop(packages.index(filename))
+
+        # Get the old data for this torrent, if any existed
+        torrent_file = torrent_prefix + component + "_binary-" + arch + torrent_suffix
+        print torrent_file + ": reading ... ",
+        old_files, headers = get_old(torrent_file)
+
+        # Create the headers
+        if "OriginalDate" not in headers:
+            headers["OriginalDate"] = date
+        if "PieceSize" not in headers:
+            headers["PieceSize"] = default_piecesize
+        headers["Codename"] = codename
+        headers["Suite"] = suite
+        headers["Component"] = component
+        headers["Architecture"] = arch
+        if "TorrentHashFields" not in headers:
+            headers["TorrentHashFields"] = " ".join(default_hash_fields)
+        if "Torrent" not in headers:
+            sha1 = sha.new()
+            for header in headers["TorrentHashFields"].split():
+                sha1.update(headers[header])
+            headers["Torrent"] = sha1.hexdigest()
+
+        # Parse the Packages file for the new data
+        print "updating ... ",
+        new_files, removed_files = get_new(filename, old_files, headers["PieceSize"])
+
+        # Write the headers
+        print "writing ... ",
+        f = gzip.open(torrent_file, 'w')
+        for header in header_order:
+            if header in headers:
+                f.write("%s: %s\n" % header, headers[header])
+        f.write("PieceNumbers:\n")
+        
+        # Write the starting piece numbers
+        pieces = new_pieces.keys()
+        pieces.sort()
+        format_string = " %"+str(int(ceil(log(max(pieces), 10))))+"d %s\n"
+        for piece in pieces:
+            f.write(format_string % piece, new_pieces[piece])
+        
+        f.close()
+        print "done."
+
+    # Create the all headers
+    if "OriginalDate" not in all_headers:
+        all_headers["OriginalDate"] = date
+    if "PieceSize" not in all_headers:
+        all_headers["PieceSize"] = default_piecesize
+    all_headers["Codename"] = codename
+    all_headers["Suite"] = suite
+    all_headers["Component"] = component
+    all_headers["Architecture"] = "all"
+    if "TorrentHashFields" not in all_headers:
+        all_headers["TorrentHashFields"] = " ".join(default_hash_fields)
+    if "Torrent" not in all_headers:
+        sha1 = sha.new()
+        for header in all_headers["TorrentHashFields"].split():
+            sha1.update(all_headers[header])
+        all_headers["Torrent"] = sha1.hexdigest()
+
+    # Write the all_headers
+    print all_file + ": writing ... ",
+    f = gzip.open(all_file, 'w')
+    for header in header_order:
+        if header in all_headers:
+            f.write("%s: %s\n" % header, all_headers[header])
+    f.write("PieceNumbers:\n")
+        
+    # Write the all starting piece numbers
+    pieces = all_new_pieces.keys()
+    pieces.sort()
+    format_string = " %"+str(int(ceil(log(max(pieces), 10))))+"d %s\n"
+    for piece in pieces:
+        f.write(format_string % piece, all_new_pieces[piece])
+    
+    f.close()
+    print "done."
+
+if packages:
+    print "The following packages files were not used:"
+    for package in packages:
+        print "    %s" % package
+        
+"""        
+    fnkey = filename + ":pc"
+    if cache.has_key(fnkey):
+    	sha1, result = str2hash(cache[fnkey])
+	cache[fnkey] = values
+"""
+#cache.sync()
+#cache.close()

Propchange: debtorrent/branches/unique/uniquely.py
------------------------------------------------------------------------------
    svn:executable = *




More information about the Debtorrent-commits mailing list