r323 - branches/rewrite/src
Marco Presi
partial-mirror-devel@lists.alioth.debian.org
Tue, 16 Nov 2004 13:57:47 -0700
Author: zufus
Date: Tue Nov 16 13:57:47 2004
New Revision: 323
Modified:
branches/rewrite/src/Pool.py
Log:
Pool.py: added methods to find package that need upgrade. I have added two way to realize the search. More about this on comments included in the source file
Modified: branches/rewrite/src/Pool.py
==============================================================================
--- branches/rewrite/src/Pool.py (original)
+++ branches/rewrite/src/Pool.py Tue Nov 16 13:57:47 2004
@@ -21,48 +21,117 @@
from FileSystem import *
from PackageList import *
import Config
+import apt_pkg
class Pool:
"""
- This class provides methods to create dist dirs into the
- partial-mirror
+ This class provides methods to manage pool dirs into the
+ partial-mirror.
"""
def __init__(self, backend):
self._backend = backend
- self._backendBinList = backend._bin
- self._backendSourceList = backen._source
- self._poolPkgList = PackageList()
+ #self._backendBinList = backend._bin
+ #self._backendSourceList = backend._source
+ self._plist = {}
self._needUpdate = PackageList()
+ self._dir = FileSystem(backend["mirror_dir"],
+ os.path.join(backend["name"], 'pool'))
self._server = "%s/pool/" % (self._backend["server"])
- self._local = "%/pool/" % (self._backend["mirror_dor"])
+ self._local = "%/pool/" % (self._backend["mirror_dir"])
+ # The following two methods can be used to deterine which packages
+ # in the pool need to be upgraded because a new version is present
+ # in repository we are mirroring. Those methods work in a way
+ # similar to the trunk debpartial-mirror, but I think there are
+ # some drawbacks.
+
+
+ def buildDownloadedList (self):
+ """
+ Walk into the pool dir and build a dictionary of packages stored
+ in the Backend. For each item we record package name and
+ md5sum. This dictionary can be used to determine which
+ packages are to be upgraded. Return the number of downloaded
+ packages.
+ """
+ #Warning: this method can fail if the pool contains more
+ #versions of the same package... Every time the package is
+ #found (no matter of the version or if is a binary or a source
+ #package) his md5dum is replaced.
+
+ for root, dirs, files in os.walk(self._dir):
+ for f in files:
+ self._plist[f.split('_')[0]]=self._dir.md5_on (f)
+ return self._plist.__len__
+
def find_updates (self, backendList):
"""
- Look into the Backend PackageList and look for packages that
- have newer version on remote repository
+ Scan into a Backend PackageList (_bin or _source) and look for
+ packages that have newer version on remote (or local)
+ repository. Return number of packages to be downloaded.
"""
for (pkgName, pkg) in backendList.items():
- try:
- p = self._poolPkgList.get(pkg)
- if p['md5sum'] != pkg['md5sum']:
- self._needUpdate.add(pkg)
- except PackageDoesNotExist:
+ if not self._plist.has_key(pkgName):
self._needUpdate.add(pkg)
+ else:
+ if self._plist[pkgName] != pkg['md5sum'] or
+ self._plist[pkgName] == -1:
+ self._needUpdate.add(pkg)
+ return len(self._needUpdate.items)
+
+########################################################################
+
+ # The following methods are used to find wich packages need to be
+ # upgraded. Those methods use a new way: Create a local packages
+ # and Source lists that are used in a way similar to a local
+ # cache. In this way the problem of multiple version of the same
+ # package should be solved.
+
+ # The metohds apply only for binary packages, but their extension
+ # to Sources packages is (should be) trivial.
+
+ def createPackages (self):
+ """
+ Run dpkg-scanpackages into the pool.
+ """
+ scan_command = "dpkg-scanpackages -a%s ./ ./override > Packages 2> /dev/null" % (self.backend["architectures"])
+ os.system(scan_command)
+ def createBinList (self):
+ """
+ Create a PackageList object with the list of Packages already
+ downloaed into this pool.
+ """
+
+ pkgfile = os.join(self._dir, "Packages")
+ if not os.path.exists(pkgfile)
+ self.createPackages
+
+ parse = apt_pkg.ParseTagFile(open(pkgfile, "r"))
+ self._bin = PackageList()
+ while parse.Step() == 1:
+ try:
+ p = Package(parse.Section)
+ self._bin.add(p)
+ except PackageAlreadyExists, pkg_name:
+ print "Package %s seems duplicated..\n" % (pkg_name)
+
+#####################################################################
- def update (self, self._needUpdate):
+ def update (self):
"""
- Get from remote repository new version of packages that were
- previously downloaded
+ Get from remote (or local) repository new version of packages
+ that were previously downloaded.
"""
for (pkg_name, pkg) in self._needUpdate.items():
filename = self._local + pkg['Section'] + '/' + pkg['Filename']
self._dir.create(os.path.dirname(filename))
self._get(pkg, filename)
+ self.__needUpdate.remove(pkg)
- def _get (self, plist):
+ def _get (self, pkg, filename):
"""
Get the a PackageList
"""
@@ -75,19 +144,19 @@
def __init__(self, backend):
Pool.__init__(self, backend)
- def self._get (self, pkg, filename):
+ def _get (self, pkg, filename):
Download (self._server + pkg['Section'] + '/' + pkg['Filename'],
filename)
class LocalPool (Pool):
"""
- This class provides methods to fill pool dir referring to local files
+ This class provides methods to fill pool dir linking from local files
"""
def __init__(self, backend):
Pool.__init__(self, backend)
self._server = self._server.lstrip ('file://')
- def self._get (self, pkg, filename):
+ def _get (self, pkg, filename):
os.link (self._server + pkg['Section'] + '/' + pkg['Filename'],
filename)