[Collab-qa-commits] r2087 - multi-arch
Jakub Wilk
jwilk at alioth.debian.org
Tue Nov 22 22:24:14 UTC 2011
Author: jwilk
Date: 2011-11-22 22:24:14 +0000 (Tue, 22 Nov 2011)
New Revision: 2087
Added:
multi-arch/md5sum-validator
Removed:
multi-arch/multi-arch-same-validator
Log:
Rename: multi-arch-same-validator -> md5sum-validator.
Copied: multi-arch/md5sum-validator (from rev 2086, multi-arch/multi-arch-same-validator)
===================================================================
--- multi-arch/md5sum-validator (rev 0)
+++ multi-arch/md5sum-validator 2011-11-22 22:24:14 UTC (rev 2087)
@@ -0,0 +1,238 @@
+#!/usr/bin/python3
+
+# Copyright © 2011 Jakub Wilk <jwilk at debian.org>
+
+# Redistribution and use in source and compiled forms, with or without
+# modification, are permitted under any circumstances. No warranty.
+
+'''
+Check MD5 sums for 'Multi-Arch: same' packages.
+'''
+
+import argparse
+import collections
+import dbm
+import os
+import pipes
+import re
+import sys
+import subprocess as ipc
+
+import apt_pkg
+
+default_mirror = 'http://ftp.debian.org/debian'
+default_distribution = 'unstable'
+log_file = None
+
+def setup_proxies():
+ apt_pkg.init_config()
+ os.environ['http_proxy'] = apt_pkg.config.get('Acquire::http::Proxy', '')
+ os.environ['ftp_proxy'] = apt_pkg.config.get('Acquire::ftp::Proxy', '')
+
+def setup_locale():
+ os.environ['LC_ALL'] = 'C'
+
+def setup_log_file(file):
+ global log_file
+ log_file = file
+
+def log_download(url):
+ print('D: {url}'.format(url=url), file=log_file)
+
+def log_action(package, version, action):
+ print(
+ 'I: {pkg} {ver} => {action}'.format(pkg=package, ver=version, action=action),
+ file=log_file
+ )
+
+def log_error(package, version, message):
+ print(
+ 'E: {pkg} {ver} => {message}'.format(pkg=package, ver=version, message=message),
+ file=log_file
+ )
+
+class DownloadError(IOError):
+ pass
+
+class DummyCache(object):
+
+ def __getitem__(self, key):
+ raise KeyError
+
+ def __setitem__(self, key, value):
+ pass
+
+ def close(self):
+ pass
+
+class download:
+
+ def __init__(self, url, pipe=None):
+ self._url = url
+ self._pipe = pipe
+
+ def __enter__(self):
+ log_download(self._url)
+ quoted_url = pipes.quote(self._url)
+ if self._url.startswith(('/', '.')):
+ if self._pipe is not None:
+ commandline = '< {url} {pipe}'.format(url=quoted_url, pipe=self._pipe)
+ else:
+ commandline = 'cat {url}'.format(url=quoted_url)
+ else:
+ commandline = 'wget -O- -q {url}'.format(url=quoted_url)
+ if self._pipe is not None:
+ commandline += ' | ' + self._pipe
+ self._child = ipc.Popen(commandline, shell=True,
+ stdout=ipc.PIPE, stderr=ipc.PIPE
+ )
+ return self._child.stdout
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ stderr = self._child.stderr.read()
+ if self._child.wait() != 0:
+ stderr = stderr.decode('ASCII', 'replace').strip()
+ raise DownloadError(stderr)
+
+def parse_md5sums_line(pkgdata, line, architecture):
+ md5sum = line[:32]
+ filename = line[34:-1]
+ pkgdata[filename][md5sum].add(architecture)
+
+def do_qa(options):
+ if options.cache:
+ try:
+ os.makedirs(os.path.dirname(options.cache))
+ except OSError:
+ pass
+ cache = dbm.open(options.cache, 'c')
+ else:
+ cache = DummyCache()
+ data = collections.defaultdict(dict)
+ if options.architectures is None:
+ release_dist = options.distribution
+ if release_dist in ('unstable', 'sid', 'experimental', 'rc-buggy'):
+ release_dist = 'testing'
+ url = '{mirror}/dists/{dist}/Release'.format(
+ mirror=options.mirror,
+ dist=release_dist
+ )
+ with download(url) as release_tags:
+ for para in apt_pkg.TagFile(release_tags):
+ options.architectures = para['Architectures'].split()
+ for architecture in options.architectures:
+ for section in 'main', 'contrib', 'non-free':
+ url = '{mirror}/dists/{dist}/{section}/binary-{arch}/Packages.gz'.format(
+ mirror=options.mirror,
+ dist=options.distribution,
+ section=section,
+ arch=architecture
+ )
+ with download(url, pipe='gzip -dc') as package_tags:
+ for pkgdata in apt_pkg.TagFile(package_tags):
+ if pkgdata.get('Multi-Arch', '') == 'same':
+ pkgname = pkgdata['Package']
+ if pkgname not in options.packages:
+ continue
+ pkgversion = pkgdata['Version']
+ url = '{mirror}/{path}'.format(mirror=options.mirror, path=pkgdata['Filename'])
+ data[pkgname, pkgversion][architecture] = url
+ last = None
+ for (pkgname, pkgversion), urls in data.items():
+ if len(urls) <= 1:
+ log_action(pkgname, pkgversion, 'skip')
+ continue
+ log_action(pkgname, pkgversion, 'download ({archs})'.format(archs=' '.join(urls.keys())))
+ pkgdata = collections.defaultdict(
+ lambda: collections.defaultdict(set)
+ )
+ for architecture, url in urls.items():
+ cache_key = '{name}_{version}_{arch}'.format(name=pkgname, version=pkgversion, arch=architecture)
+ try:
+ cache_item = cache[cache_key]
+ for line in cache_item.splitlines(True):
+ parse_md5sums_line(pkgdata, line, architecture)
+ except KeyError:
+ try:
+ cache_item = []
+ with download(url, pipe='dpkg-deb -I /dev/stdin md5sums') as md5sums_file:
+ for line in md5sums_file:
+ parse_md5sums_line(pkgdata, line, architecture)
+ cache_item += [line]
+ cache[cache_key] = b''.join(cache_item)
+ except DownloadError as exc:
+ if 'contains no control component `md5sums\'' in str(exc):
+ log_error(pkgname, pkgversion, 'missing md5sums for {arch}'.format(arch=architecture))
+ continue
+ else:
+ raise
+ for filename, md5sums in pkgdata.items():
+ if len(md5sums) <= 1:
+ continue
+ if last != (pkgname, pkgversion):
+ if last is not None:
+ print()
+ print('[{name} {ver}]'.format(name=pkgname, ver=pkgversion))
+ last = (pkgname, pkgversion)
+ print(filename.decode('UTF-8', 'replace'))
+ if options.compact:
+ if all(len(x) == 1 for x in md5sums.values()):
+ continue
+ for md5sum, architectures in sorted(md5sums.items()):
+ print(' {md5sum} {arch}'.format(
+ md5sum=md5sum.decode('ASCII'),
+ arch=' '.join(architectures)
+ ))
+ cache.close()
+
+class Universum(object):
+
+ def __contains__(self, other):
+ return True
+
+def main():
+ setup_proxies()
+ setup_locale()
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--mirror', default=default_mirror,
+ metavar='<mirror>',
+ help='use this mirror (default: {mirror})'.format(mirror=default_mirror)
+ )
+ parser.add_argument('--distribution', default=default_distribution,
+ metavar='<dist>',
+ help='check this distribution (default: {dist})'.format(dist=default_distribution)
+ )
+ parser.add_argument('--architectures', nargs='+',
+ metavar='<arch>',
+ help='check these architectures (default: all release architectures)'
+ )
+ parser.add_argument('--packages', nargs='+', default=Universum(),
+ metavar='<package>',
+ help='check only these packages (default: check all)'
+ )
+ parser.add_argument('--compact', action='store_true',
+ help='don\'t print MD5 sums if they are all different'
+ )
+ parser.add_argument('--cache', nargs='?',
+ metavar='<file>', default=False,
+ help='use cache file'
+ )
+ parser.add_argument('--log-file', type=argparse.FileType('a'), default=sys.stderr,
+ metavar='<file>',
+ help='log progress into this file (default: stderr)'
+ )
+ options = parser.parse_args()
+ setup_log_file(options.log_file)
+ if isinstance(options.packages, list):
+ options.packages = frozenset(options.packages)
+ if options.cache is None:
+ options.cache = os.path.join((
+ os.getenv('XDG_CACHE_HOME') or
+ os.path.join(os.path.expanduser('~'), '.cache')
+ ), 'debian', 'multi-arch-same-validator')
+ do_qa(options)
+
+if __name__ == '__main__':
+ main()
+
+# vim:ts=4 sw=4 et
Deleted: multi-arch/multi-arch-same-validator
===================================================================
--- multi-arch/multi-arch-same-validator 2011-11-09 18:31:54 UTC (rev 2086)
+++ multi-arch/multi-arch-same-validator 2011-11-22 22:24:14 UTC (rev 2087)
@@ -1,238 +0,0 @@
-#!/usr/bin/python3
-
-# Copyright © 2011 Jakub Wilk <jwilk at debian.org>
-
-# Redistribution and use in source and compiled forms, with or without
-# modification, are permitted under any circumstances. No warranty.
-
-'''
-Check MD5 sums for 'Multi-Arch: same' packages.
-'''
-
-import argparse
-import collections
-import dbm
-import os
-import pipes
-import re
-import sys
-import subprocess as ipc
-
-import apt_pkg
-
-default_mirror = 'http://ftp.debian.org/debian'
-default_distribution = 'unstable'
-log_file = None
-
-def setup_proxies():
- apt_pkg.init_config()
- os.environ['http_proxy'] = apt_pkg.config.get('Acquire::http::Proxy', '')
- os.environ['ftp_proxy'] = apt_pkg.config.get('Acquire::ftp::Proxy', '')
-
-def setup_locale():
- os.environ['LC_ALL'] = 'C'
-
-def setup_log_file(file):
- global log_file
- log_file = file
-
-def log_download(url):
- print('D: {url}'.format(url=url), file=log_file)
-
-def log_action(package, version, action):
- print(
- 'I: {pkg} {ver} => {action}'.format(pkg=package, ver=version, action=action),
- file=log_file
- )
-
-def log_error(package, version, message):
- print(
- 'E: {pkg} {ver} => {message}'.format(pkg=package, ver=version, message=message),
- file=log_file
- )
-
-class DownloadError(IOError):
- pass
-
-class DummyCache(object):
-
- def __getitem__(self, key):
- raise KeyError
-
- def __setitem__(self, key, value):
- pass
-
- def close(self):
- pass
-
-class download:
-
- def __init__(self, url, pipe=None):
- self._url = url
- self._pipe = pipe
-
- def __enter__(self):
- log_download(self._url)
- quoted_url = pipes.quote(self._url)
- if self._url.startswith(('/', '.')):
- if self._pipe is not None:
- commandline = '< {url} {pipe}'.format(url=quoted_url, pipe=self._pipe)
- else:
- commandline = 'cat {url}'.format(url=quoted_url)
- else:
- commandline = 'wget -O- -q {url}'.format(url=quoted_url)
- if self._pipe is not None:
- commandline += ' | ' + self._pipe
- self._child = ipc.Popen(commandline, shell=True,
- stdout=ipc.PIPE, stderr=ipc.PIPE
- )
- return self._child.stdout
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- stderr = self._child.stderr.read()
- if self._child.wait() != 0:
- stderr = stderr.decode('ASCII', 'replace').strip()
- raise DownloadError(stderr)
-
-def parse_md5sums_line(pkgdata, line, architecture):
- md5sum = line[:32]
- filename = line[34:-1]
- pkgdata[filename][md5sum].add(architecture)
-
-def do_qa(options):
- if options.cache:
- try:
- os.makedirs(os.path.dirname(options.cache))
- except OSError:
- pass
- cache = dbm.open(options.cache, 'c')
- else:
- cache = DummyCache()
- data = collections.defaultdict(dict)
- if options.architectures is None:
- release_dist = options.distribution
- if release_dist in ('unstable', 'sid', 'experimental', 'rc-buggy'):
- release_dist = 'testing'
- url = '{mirror}/dists/{dist}/Release'.format(
- mirror=options.mirror,
- dist=release_dist
- )
- with download(url) as release_tags:
- for para in apt_pkg.TagFile(release_tags):
- options.architectures = para['Architectures'].split()
- for architecture in options.architectures:
- for section in 'main', 'contrib', 'non-free':
- url = '{mirror}/dists/{dist}/{section}/binary-{arch}/Packages.gz'.format(
- mirror=options.mirror,
- dist=options.distribution,
- section=section,
- arch=architecture
- )
- with download(url, pipe='gzip -dc') as package_tags:
- for pkgdata in apt_pkg.TagFile(package_tags):
- if pkgdata.get('Multi-Arch', '') == 'same':
- pkgname = pkgdata['Package']
- if pkgname not in options.packages:
- continue
- pkgversion = pkgdata['Version']
- url = '{mirror}/{path}'.format(mirror=options.mirror, path=pkgdata['Filename'])
- data[pkgname, pkgversion][architecture] = url
- last = None
- for (pkgname, pkgversion), urls in data.items():
- if len(urls) <= 1:
- log_action(pkgname, pkgversion, 'skip')
- continue
- log_action(pkgname, pkgversion, 'download ({archs})'.format(archs=' '.join(urls.keys())))
- pkgdata = collections.defaultdict(
- lambda: collections.defaultdict(set)
- )
- for architecture, url in urls.items():
- cache_key = '{name}_{version}_{arch}'.format(name=pkgname, version=pkgversion, arch=architecture)
- try:
- cache_item = cache[cache_key]
- for line in cache_item.splitlines(True):
- parse_md5sums_line(pkgdata, line, architecture)
- except KeyError:
- try:
- cache_item = []
- with download(url, pipe='dpkg-deb -I /dev/stdin md5sums') as md5sums_file:
- for line in md5sums_file:
- parse_md5sums_line(pkgdata, line, architecture)
- cache_item += [line]
- cache[cache_key] = b''.join(cache_item)
- except DownloadError as exc:
- if 'contains no control component `md5sums\'' in str(exc):
- log_error(pkgname, pkgversion, 'missing md5sums for {arch}'.format(arch=architecture))
- continue
- else:
- raise
- for filename, md5sums in pkgdata.items():
- if len(md5sums) <= 1:
- continue
- if last != (pkgname, pkgversion):
- if last is not None:
- print()
- print('[{name} {ver}]'.format(name=pkgname, ver=pkgversion))
- last = (pkgname, pkgversion)
- print(filename.decode('UTF-8', 'replace'))
- if options.compact:
- if all(len(x) == 1 for x in md5sums.values()):
- continue
- for md5sum, architectures in sorted(md5sums.items()):
- print(' {md5sum} {arch}'.format(
- md5sum=md5sum.decode('ASCII'),
- arch=' '.join(architectures)
- ))
- cache.close()
-
-class Universum(object):
-
- def __contains__(self, other):
- return True
-
-def main():
- setup_proxies()
- setup_locale()
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument('--mirror', default=default_mirror,
- metavar='<mirror>',
- help='use this mirror (default: {mirror})'.format(mirror=default_mirror)
- )
- parser.add_argument('--distribution', default=default_distribution,
- metavar='<dist>',
- help='check this distribution (default: {dist})'.format(dist=default_distribution)
- )
- parser.add_argument('--architectures', nargs='+',
- metavar='<arch>',
- help='check these architectures (default: all release architectures)'
- )
- parser.add_argument('--packages', nargs='+', default=Universum(),
- metavar='<package>',
- help='check only these packages (default: check all)'
- )
- parser.add_argument('--compact', action='store_true',
- help='don\'t print MD5 sums if they are all different'
- )
- parser.add_argument('--cache', nargs='?',
- metavar='<file>', default=False,
- help='use cache file'
- )
- parser.add_argument('--log-file', type=argparse.FileType('a'), default=sys.stderr,
- metavar='<file>',
- help='log progress into this file (default: stderr)'
- )
- options = parser.parse_args()
- setup_log_file(options.log_file)
- if isinstance(options.packages, list):
- options.packages = frozenset(options.packages)
- if options.cache is None:
- options.cache = os.path.join((
- os.getenv('XDG_CACHE_HOME') or
- os.path.join(os.path.expanduser('~'), '.cache')
- ), 'debian', 'multi-arch-same-validator')
- do_qa(options)
-
-if __name__ == '__main__':
- main()
-
-# vim:ts=4 sw=4 et
More information about the Collab-qa-commits
mailing list