[game-data-packager] 27/51: GameData: Move to g_d_p.game
Simon McVittie
smcv at debian.org
Fri Dec 29 01:23:36 UTC 2017
This is an automated email from the git hooks/post-receive script.
smcv pushed a commit to branch master
in repository game-data-packager.
commit 8546402395d8eed194f8fa34c2e5f1f477aa6ddb
Author: Simon McVittie <smcv at debian.org>
Date: Wed Dec 27 17:24:37 2017 +0000
GameData: Move to g_d_p.game
This avoids this warning:
% python3 -m game_data_packager.download
/usr/lib/python3.6/runpy.py:125: RuntimeWarning: 'game_data_packager.download' found in sys.modules after import of package 'game_data_packager', but prior to execution of 'game_data_packager.download'; this may result in unpredictable behaviour
Signed-off-by: Simon McVittie <smcv at debian.org>
---
game_data_packager/__init__.py | 1125 -------------------------
game_data_packager/command_line.py | 2 +-
game_data_packager/download.py | 2 +-
game_data_packager/{__init__.py => game.py} | 6 +-
game_data_packager/games/doom_common.py | 2 +-
game_data_packager/games/dosbox.py | 2 +-
game_data_packager/games/ecwolf_common.py | 2 +-
game_data_packager/games/lgeneral.py | 2 +-
game_data_packager/games/morrowind.py | 2 +-
game_data_packager/games/quake.py | 2 +-
game_data_packager/games/quake2.py | 2 +-
game_data_packager/games/residualvm_common.py | 2 +-
game_data_packager/games/rott.py | 2 +-
game_data_packager/games/scummvm_common.py | 2 +-
game_data_packager/games/unreal.py | 2 +-
game_data_packager/games/wolf3d.py | 2 +-
game_data_packager/games/z_code.py | 2 +-
tools/babel.py | 2 +-
tools/check_equivalence.py | 2 +-
tools/check_gog.py | 4 +-
tools/check_steam.py | 3 +-
tools/check_syntax.py | 2 +-
tools/mirror.py | 2 +-
tools/spider.py | 4 +-
tools/stats.py | 2 +-
25 files changed, 28 insertions(+), 1154 deletions(-)
diff --git a/game_data_packager/__init__.py b/game_data_packager/__init__.py
index ac71bed..09df949 100644
--- a/game_data_packager/__init__.py
+++ b/game_data_packager/__init__.py
@@ -16,1135 +16,10 @@
# You can find the GPL license text on a Debian system under
# /usr/share/common-licenses/GPL-2.
-import argparse
-import glob
-import importlib
-import io
-import json
import logging
import os
-import random
-import re
-import sys
-import zipfile
-
-import yaml
-
-from .build import (PackagingTask)
-from .data import (FileGroup, Package, WantedFile, YamlLiteral)
-from .packaging import (NoPackaging)
-from .paths import (DATADIR)
-from .util import ascii_safe
-from .version import (GAME_PACKAGE_VERSION)
-
-logger = logging.getLogger(__name__)
if os.environ.get('DEBUG') or os.environ.get('GDP_DEBUG'):
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
-
-MD5SUM_DIVIDER = re.compile(r' [ *]?')
-
-class GameData(object):
- def __init__(self, shortname, data):
- # The name of the game for command-line purposes, e.g. quake3
- self.shortname = shortname
-
- # Other command-line names for this game
- self.aliases = set()
-
- # The formal name of the game, e.g. Quake III Arena
- self.longname = shortname.title()
-
- # Engine's wiki base URL, provided by engine plugin
- self.wikibase = ''
- # Game page on engine's wiki
- self.wiki = None
-
- # Wikipedia page, linked from per-engine wikis
- self.wikipedia = None
-
- # The franchise this game belongs to.
- # this is used to loosely ties various .yaml files
- self.franchise = None
-
- # The one-line copyright notice used to build debian/copyright
- self.copyright = None
-
- # A blurb of text that is used to build debian/copyright
- self.copyright_notice = None
-
- # Tag fanmade games so they don't screw up year * size regression
- self.fanmade = False
-
- # The game engine used to run the game (package name)
- self.engine = None
-
- # Game translations known to exists, but missing in 'packages:'
- # list of ISO-639 codes
- self.missing_langs = []
-
- # The game genre, as seen in existing .desktop files or
- # http://en.wikipedia.org/wiki/List_of_video_game_genres
- self.genre = None
-
- # binary package name => Package
- self.packages = {}
-
- # Subset of packages.values() with nonempty rip_cd
- self.rip_cd_packages = set()
-
- # Number of CD the full game was sold on
- self.disks = None
-
- self.help_text = ''
-
- # Extra directories where we might find game files
- self.try_repack_from = []
-
- # If non-empty, the game requires binary executables which are only
- # available for the given architectures (typically i386)
- self.binary_executables = ''
-
- # online stores metadata
- self.steam = {}
- self.gog = {}
- self.origin = {}
-
- # full url of online game shops
- self.url_steam = None
- self.url_gog = None
- self.url_misc = None
-
- self.data = data
-
- self.argument_parser = None
-
- # How to compress the .deb:
- # True: dpkg-deb's default
- # False: -Znone
- # str: -Zstr (gzip, xz or none)
- # list: arbitrary options (e.g. -z9 -Zgz -Sfixed)
- self.compression = True
-
- # YAML filename to use, overriding normal search path
- self.yaml_file = None
-
- for k in ('longname', 'copyright', 'compression', 'help_text', 'disks', 'fanmade',
- 'engine', 'genre', 'missing_langs', 'franchise', 'wiki', 'wikibase',
- 'steam', 'gog', 'origin', 'url_misc', 'wikipedia',
- 'binary_executables', 'copyright_notice'):
- if k in self.data:
- setattr(self, k, self.data[k])
-
- if isinstance(self.engine, dict) and 'generic' not in self.engine:
- self.engine['generic'] = None
-
- assert type(self.missing_langs) is list
-
- if 'aliases' in self.data:
- self.aliases = set(self.data['aliases'])
-
- if 'try_repack_from' in self.data:
- paths = self.data['try_repack_from']
- if isinstance(paths, list):
- self.try_repack_from = paths
- elif isinstance(paths, str):
- self.try_repack_from = [paths]
- else:
- raise AssertionError('try_repack_from should be str or list')
-
- # True if the lazy load of full file info has been done
- self.loaded_file_data = False
-
- # Map from WantedFile name to instance.
- # { 'baseq3/pak1.pk3': WantedFile instance }
- self.files = {}
-
- # Map from FileGroup name to instance.
- self.groups = {}
-
- # Map from WantedFile name to a set of names of WantedFile instances
- # from which the file named in the key can be extracted or generated.
- # { 'baseq3/pak1.pk3': set(['linuxq3apoint-1.32b-3.x86.run']) }
- self.providers = {}
-
- # Map from WantedFile look_for name to a set of names of WantedFile
- # instances which might be it
- # { 'doom2.wad': set(['doom2.wad_1.9', 'doom2.wad_bfg', ...]) }
- self.known_filenames = {}
-
- # Map from WantedFile size to a set of names of WantedFile
- # instances which might be it
- # { 14604584: set(['doom2.wad_1.9']) }
- self.known_sizes = {}
-
- # Maps from md5, sha1, sha256 to a set of names of WantedFile instances
- # { '25e1459...': set(['doom2.wad_1.9']) }
- self.known_md5s = {}
- self.known_sha1s = {}
- self.known_sha256s = {}
-
- self._populate_files(self.data.get('files'))
-
- assert 'packages' in self.data
-
- for binary, data in self.data['packages'].items():
- # these should only be at top level, since they are global
- assert 'sha1sums' not in data, binary
- assert 'sha256sums' not in data, binary
-
- if 'DISABLED' in data:
- continue
- package = self.construct_package(binary, data)
- self.packages[binary] = package
- self._populate_package(package, data)
-
- if 'groups' in self.data:
- groups = self.data['groups']
- assert isinstance(groups, dict), self.shortname
-
- # Before doing anything else, we do one pass through the list
- # of groups to record that each one is a group, so that when we
- # encounter an entry that is not known to be a group in a
- # group's members, it is definitely a file.
- for group_name in groups:
- self._ensure_group(group_name)
-
- for group_name, group_data in groups.items():
- group = self.groups[group_name]
- attrs = {}
-
- if isinstance(group_data, dict):
- members = group_data['group_members']
- for k, v in group_data.items():
- if k != 'group_members':
- assert hasattr(group, k), k
- setattr(group, k, v)
- attrs[k] = v
- elif isinstance(group_data, (str, list)):
- members = group_data
- else:
- raise AssertionError('group %r should be dict, str or list' % group_name)
-
- if isinstance(members, str):
- for line in members.splitlines():
- f = self._add_hash(line.rstrip('\n'), 'size_and_md5')
- if f is not None:
- # f may be a WantedFile or a FileGroup,
- # this works for either
- group.group_members.add(f.name)
- group.apply_group_attributes(f)
-
- elif isinstance(members, list):
- for member_name in members:
- f = self.groups.get(member_name)
-
- if f is None:
- f = self._ensure_file(member_name)
-
- # f may be a WantedFile or a FileGroup,
- # this works for either
- group.group_members.add(f.name)
- group.apply_group_attributes(f)
- else:
- raise AssertionError('group %r members should be str or list' % group_name)
-
- if 'size_and_md5' in self.data:
- for line in self.data['size_and_md5'].splitlines():
- self._add_hash(line, 'size_and_md5')
-
- for alg in ('sha1', 'sha256'):
- if alg + 'sums' in self.data:
- for line in self.data[alg + 'sums'].splitlines():
- self._add_hash(line, alg)
-
- # compute webshop URL's
- gog_url = self.gog.get('url')
- gog_removed = self.gog.get('removed')
- gog_pp = '22d200f8670dbdb3e253a90eee5098477c95c23d' # ScummVM
- steam_id = {self.steam.get('id')}
- for p in sorted(self.packages.keys(), reverse=True):
- package = self.packages[p]
- if package.gog:
- gog_url = package.gog.get('url', gog_url)
- gog_removed = package.gog.get('removed', gog_removed)
- gog_pp = package.gog.get('pp', gog_pp)
- steam_id.add(package.steam.get('id'))
- if package.url_misc:
- self.url_misc = package.url_misc
- steam_id.discard(None)
- if steam_id:
- self.url_steam = 'http://store.steampowered.com/app/%s/' % min(steam_id)
- if gog_url and not gog_removed:
- self.url_gog = 'http://www.gog.com/game/' + gog_url + '?pp=' + gog_pp
-
- def edit_help_text(self):
- help_text = ''
-
- if len(self.packages) > 1 or self.disks:
- help_text = '\npackages possible for this game:\n'
- help = []
- has_multi_cd = False
- for package in self.packages.values():
- disks = package.disks or self.disks or 1
- longname = package.longname or self.longname
- if disks > 1:
- has_multi_cd = True
- longname += ' (%dCD)' % disks
- game_type = { 'demo' : 1,
- 'full' : 2,
- 'expansion' : 3}.get(package.type)
- help.append({ 'type' : game_type,
- 'year' : package.copyright or self.copyright,
- 'name' : package.name,
- 'longname': longname})
- for h in sorted(help, key=lambda k: (k['type'], k['year'][2:6], k['name'])):
- help_text += " %-40s %s\n" % (h['name'],h['longname'])
- if has_multi_cd and self.shortname != 'zork-inquisitor':
- help_text += "\nWARNING: for multi-cd games, you'll first need to ensure that all the data\n"
- help_text += " is accessible simultaneously, e.g. copy data from CD1 to CD3 in /tmp/cd{1-3}\n"
- help_text += " and let CD4 *mounted* in the drive.\n\n"
- help_text += " It's important to first mkdir '/tmp/cd1 /tmp/cd2 /tmp/cd3' because for some\n"
- help_text += " games there are different files across the disks with the same name that\n"
- help_text += " would be overwriten.\n\n"
- help_text += " If /tmp/ is on a tmpfs and you don't have something like 16GB of RAM,\n"
- help_text += " you'll likely need to store the files somewhere else.\n\n"
- help_text += " The game can then be packaged this way:\n"
- help_text += " $ game-data-packager {game} /tmp/cd1 /tmp/cd2 /tmp/cd3 /media/cdrom0\n\n"
-
- if self.help_text:
- help_text += '\n' + self.help_text
-
- if self.missing_langs:
- help_text += ('\nThe following languages are not '
- 'yet supported: %s\n' %
- ','.join(self.missing_langs))
-
- # advertise where to buy games
- # if it's not already in the help_text
- www = list()
- if self.url_steam and '://store.steampowered.com/' not in self.help_text:
- www.append(self.url_steam)
- if self.url_gog and '://www.gog.com/' not in self.help_text:
- www.append(self.url_gog)
- if self.url_misc:
- www.append(self.url_misc)
- if www:
- random.shuffle(www)
- help_text += '\nThis game can be bought online here:\n '
- help_text += '\n '.join(www)
-
- wikis = list()
- if self.wiki:
- wikis.append(self.wikibase + self.wiki)
- for p in sorted(self.packages.keys()):
- package = self.packages[p]
- if package.wiki:
- wikis.append(self.wikibase + package.wiki)
- if self.wikipedia:
- wikis.append(self.wikipedia)
- if wikis:
- help_text += '\nExternal links:\n '
- help_text += '\n '.join(wikis)
-
- return help_text
-
- def to_data(self, expand=True):
- files = {}
- groups = {}
- packages = {}
- ret = {}
-
- def sort_set_values(d):
- ret = {}
- for k, v in d.items():
- assert isinstance(v, set), (repr(k), repr(v))
- ret[k] = sorted(v)
- return ret
-
- for filename, f in self.files.items():
- data = f.to_data(expand=expand)
-
- if data or expand:
- files[filename] = data
-
- for name, g in self.groups.items():
- groups[name] = g.to_data(expand=expand, files=self.files)
-
- for name, package in self.packages.items():
- packages[name] = package.to_data(
- expand=expand, files=self.files, groups=self.groups)
-
- if files:
- ret['files'] = files
-
- if groups:
- ret['groups'] = groups
-
- if packages:
- ret['packages'] = packages
-
- if expand:
- for k in (
- 'known_filenames',
- 'known_md5s',
- 'known_sha1s',
- 'known_sha256s',
- 'known_sizes',
- 'providers',
- ):
- v = getattr(self, k)
- if v:
- ret[k] = sort_set_values(v)
-
- unknown_md5s = set()
- unknown_sha1s = set()
- unknown_sha256s = set()
-
- for filename, f in self.files.items():
- if f.alternatives:
- continue
-
- if f.md5 is None:
- unknown_md5s.add(filename)
-
- if f.sha1 is None:
- unknown_sha1s.add(filename)
-
- if f.sha256 is None:
- unknown_sha256s.add(filename)
-
- if unknown_md5s:
- ret['unknown_md5s'] = sorted(unknown_md5s)
-
- if unknown_sha1s:
- ret['unknown_sha1s'] = sorted(unknown_sha1s)
-
- if unknown_sha256s:
- ret['unknown_sha256s'] = sorted(unknown_sha256s)
-
- for k in (
- 'copyright',
- ):
- v = getattr(self, k)
- if v is not None:
- ret[k] = v
-
- for k in (
- 'copyright_notice',
- 'help_text',
- ):
- v = getattr(self, k)
- if v is not None:
- ret[k] = YamlLiteral(v)
-
- if expand or self.longname != self.shortname.title():
- ret['longname'] = self.longname
-
- sha1s = []
- sha256s = []
- ungrouped = []
- unknown_sizes = []
-
- for filename in sorted(self.files.keys()):
- f = self.files[filename]
-
- if f.sha1 is not None:
- sha1s.append('%-40s %s\n' % (f.sha1, f.name))
-
- if f.sha256 is not None:
- sha256s.append('%-64s %s\n' % (f.sha256, f.name))
-
- if f.size is None:
- unknown_sizes.append(filename)
-
- for g in self.groups.values():
- if filename in g.group_members:
- break
- else:
- size = f.size
- md5 = f.md5
-
- if size is None:
- size = '_'
-
- if md5 is None:
- md5 = '_'
-
- ungrouped.append('%-9s %32s %s\n' % (size, md5, filename))
-
- if unknown_sizes:
- ret['unknown_sizes'] = unknown_sizes
-
- if ungrouped:
- ret['size_and_md5'] = YamlLiteral(''.join(ungrouped))
-
- if sha1s:
- ret['sha1sums'] = YamlLiteral(''.join(sha1s))
-
- if sha256s:
- ret['sha256sums'] = YamlLiteral(''.join(sha256s))
-
- return ret
-
- def size(self, package):
- size_min = 0
- size_max = 0
- for file in package.install_files:
- if file.alternatives:
- # 'or 0' is a workaround for the files without known size
- size_min += min(set(self.files[a].size or 0 for a in file.alternatives))
- size_max += max(set(self.files[a].size or 0 for a in file.alternatives))
- elif file.size:
- size_min += file.size
- size_max += file.size
- for file in package.optional_files:
- if file.alternatives:
- size_max += max(set(self.files[a].size for a in file.alternatives))
- elif file.size:
- size_max += file.size
- return (size_min, size_max)
-
- def _populate_package(self, package, d):
- if isinstance(package.engine, dict):
- if isinstance(self.engine, dict):
- for k in self.engine:
- package.engine.setdefault(k, self.engine[k])
- else:
- package.engine.setdefault('generic', self.engine)
-
- assert self.copyright or package.copyright, package.name
-
- if 'demo_for' in d:
- if not package.longname:
- package.longname = self.longname + ' (demo)'
- else:
- assert 'demo' not in package.name or len(self.packages) == 1, \
- package.name + ' miss a demo_for tag.'
- if not package.longname and package.lang != 'en':
- package.longname = self.longname + ' (%s)' % package.lang
-
- if package.mutually_exclusive:
- assert package.demo_for or package.better_versions or package.relations['provides']
-
- def _populate_files(self, d, **kwargs):
- if d is None:
- return
-
- for filename, data in d.items():
- f = self._ensure_file(filename)
-
- for k in kwargs:
- setattr(f, k, kwargs[k])
-
- assert 'optional' not in data, filename
- if 'look_for' in data and 'install_as' in data:
- assert data['look_for'] != [data['install_as']], filename
- for k in (
- 'alternatives',
- 'distinctive_name',
- 'distinctive_size',
- 'doc',
- 'download',
- 'executable',
- 'install_as',
- 'install_to',
- 'license',
- 'look_for',
- 'md5',
- 'provides',
- 'sha1',
- 'sha256',
- 'size',
- 'skip_hash_matching',
- 'unpack',
- 'unsuitable',
- ):
- if k in data:
- setattr(f, k, data[k])
-
- def _ensure_group(self, name):
- assert name not in self.files, (self.shortname, name)
-
- if name not in self.groups:
- logger.debug('Adding group: %s', name)
- self.groups[name] = FileGroup(name)
-
- return self.groups[name]
-
- def _ensure_file(self, name):
- assert name not in self.groups, (self.shortname, name)
-
- if name not in self.files:
- logger.debug('Adding file: %s', name)
- self.files[name] = WantedFile(name)
-
- return self.files[name]
-
- def add_parser(self, parsers, base_parser, **kwargs):
- aliases = self.aliases
-
- longname = ascii_safe(self.longname)
-
- parser = parsers.add_parser(self.shortname,
- help=longname, aliases=aliases,
- description='Package data files for %s.' % longname,
- epilog=ascii_safe(self.edit_help_text()),
- formatter_class=argparse.RawDescriptionHelpFormatter,
- parents=(base_parser,),
- **kwargs)
-
- parser.add_argument('paths', nargs='*',
- metavar='DIRECTORY|FILE',
- help='Files to use in constructing the .deb')
-
- # There is only a --demo option if at least one package is a demo
- parser.set_defaults(demo=False)
- for package in self.packages.values():
- if package.demo_for:
- parser.add_argument('--demo', action='store_true',
- default=False,
- help='Build demo package even if files for full '
- + 'version are available')
- break
-
- self.argument_parser = parser
- return parser
-
- def _add_hash(self, line, alg):
- """Parse one line from md5sums-style data."""
-
- stripped = line.strip()
- if stripped == '' or stripped.startswith('#'):
- return
-
- if alg == 'size_and_md5':
- size, hexdigest, filename = line.split(None, 2)
- alg = 'md5'
- else:
- size = None
- hexdigest, filename = MD5SUM_DIVIDER.split(line, 1)
-
- if filename in self.groups:
- assert size in (None, '_'), \
- "%s group %s should not have size" % (
- self.shortname, filename)
- assert hexdigest in (None, '_'), \
- "%s group %s should not have hexdigest" % (
- self.shortname, filename)
- return self.groups[filename]
-
- f = self._ensure_file(filename)
-
- if size is not None and size != '_':
- f.size = int(size)
-
- if hexdigest is not None and hexdigest != '_':
- setattr(f, alg, hexdigest)
-
- return f
-
- def _populate_groups(self, stream):
- current_group = None
- attributes = {}
-
- for line in stream:
- stripped = line.strip()
-
- if stripped == '' or stripped.startswith('#'):
- continue
-
- # The group data starts with a list of groups. This is necessary
- # so we can know whether a group member, encountered later on in
- # the data, is a group or a file.
- if stripped.startswith('*'):
- assert current_group is None
- self._ensure_group(stripped[1:])
- # After that, [Group] opens a section for each group
- elif stripped.startswith('['):
- assert stripped.endswith(']'), repr(stripped)
- current_group = self._ensure_group(stripped[1:-1])
- attributes = {}
- # JSON metadata is on a line with {}
- elif stripped.startswith('{'):
- assert current_group is not None
- attributes = json.loads(stripped)
-
- for k, v in attributes.items():
- assert hasattr(current_group, k), k
- setattr(current_group, k, v)
- # Every other line is a member, either a file or a group
- else:
- f = self._add_hash(stripped, 'size_and_md5')
- # f can either be a WantedFile or a FileGroup here
- assert current_group is not None
- current_group.apply_group_attributes(f)
- current_group.group_members.add(f.name)
-
- def load_file_data(self,
- check=('GDP_UNINSTALLED' in os.environ),
- datadir=DATADIR,
- use_vfs=True):
- if self.loaded_file_data:
- return
-
- logger.debug('loading full data')
-
- if self.yaml_file is not None:
- data = yaml.load(
- open(self.yaml_file, encoding='utf-8'),
- Loader=yaml.CSafeLoader)
-
- for group_name, group_data in sorted(
- data.get('groups', {}).items()):
- group = self._ensure_group(group_name)
-
- if isinstance(group_data, dict):
- members = group_data['group_members']
- for k, v in group_data.items():
- if k != 'group_members':
- setattr(group, k, v)
- elif isinstance(group_data, (str, list)):
- members = group_data
- else:
- raise AssertionError(
- 'group %r should be dict, str or list' % group_name)
-
- has_members = False
-
- if isinstance(members, str):
- for line in members.splitlines():
- line = line.strip()
- if line and not line.startswith('#'):
- has_members = True
- f = self._add_hash(line, 'size_and_md5')
- # f can either be a WantedFile or a FileGroup here
- group.apply_group_attributes(f)
- group.group_members.add(f.name)
- elif isinstance(members, list):
- for m in members:
- has_members = True
- f = self._add_hash('? ? ' + m, 'size_and_md5')
- # f can either be a WantedFile or a FileGroup here
- group.apply_group_attributes(f)
- group.group_members.add(f.name)
- else:
- raise AssertionError(
- 'group %r members should be str or list' % group_name)
-
- # an empty group is no use, and would break the assumption
- # that we can use f.group_members to detect groups
- assert has_members
-
- for k in ('sha1sums', 'sha256sums', 'size_and_md5'):
- v = data.get(k, None)
-
- if k.endswith('sums'):
- k = k[:-4]
-
- if v is not None:
- for line in v.splitlines():
- stripped = line.strip()
-
- if stripped == '' or stripped.startswith('#'):
- continue
-
- self._add_hash(stripped, k)
-
- elif use_vfs:
- if isinstance(use_vfs, str):
- zip = use_vfs
- else:
- zip = os.path.join(DATADIR, 'vfs.zip')
-
- with zipfile.ZipFile(zip, 'r') as zf:
- files = zf.namelist()
-
- filename = '%s.groups' % self.shortname
- if filename in files:
- logger.debug('... %s/%s', zip, filename)
- stream = io.TextIOWrapper(zf.open(filename), encoding='utf-8')
- self._populate_groups(stream)
-
- filename = '%s.files' % self.shortname
- if filename in files:
- logger.debug('... %s/%s', zip, filename)
- jsondata = zf.open(filename).read().decode('utf-8')
- data = json.loads(jsondata)
- self._populate_files(data)
-
- for alg in ('sha1', 'sha256', 'size_and_md5'):
- filename = '%s.%s%s' % (self.shortname, alg,
- '' if alg == 'size_and_md5' else 'sums')
- if filename in files:
- logger.debug('... %s/%s', zip, filename)
- rawdata = zf.open(filename).read().decode('utf-8')
- for line in rawdata.splitlines():
- self._add_hash(line.rstrip('\n'), alg)
- else:
- vfs = os.path.join(DATADIR, 'vfs')
-
- if not os.path.isdir(vfs):
- vfs = DATADIR
-
- filename = os.path.join(vfs, '%s.groups' % self.shortname)
- if os.path.isfile(filename):
- logger.debug('... %s', filename)
- stream = open(filename, encoding='utf-8')
- self._populate_groups(stream)
-
- filename = os.path.join(vfs, '%s.files' % self.shortname)
- if os.path.isfile(filename):
- logger.debug('... %s', filename)
- data = json.load(open(filename, encoding='utf-8'))
- self._populate_files(data)
-
- for alg in ('sha1', 'sha256', 'size_and_md5'):
- filename = os.path.join(vfs, '%s.%s%s' %
- (self.shortname, alg,
- '' if alg == 'size_and_md5' else 'sums'))
- if os.path.isfile(filename):
- logger.debug('... %s', filename)
- with open(filename) as f:
- for line in f:
- self._add_hash(line.rstrip('\n'), alg)
-
- self.loaded_file_data = True
-
- for package in self.packages.values():
- d = self.data['packages'][package.name]
-
- for f in self._iter_expand_groups(
- d.get('doc', ()), include_groups=True):
- # WantedFile and FileGroup both have this
- assert hasattr(f, 'doc')
- f.doc = True
-
- for f in self._iter_expand_groups(
- d.get('license', ()), include_groups=True):
- # WantedFile and FileGroup both have this
- assert hasattr(f, 'license')
- f.license = True
-
- package.install_files = set(self._iter_expand_groups(package.install))
- package.optional_files = set(self._iter_expand_groups(package.optional))
- package.activated_by_files = set(self._iter_expand_groups(package.activated_by))
-
- # _iter_expand_groups could change the contents of self.files
- for filename, f in list(self.files.items()):
- f.provides_files = set(self._iter_expand_groups(f.provides))
-
- for filename, f in self.files.items():
- for provided in f.provides_files:
- self.providers.setdefault(provided.name, set()).add(filename)
-
- if f.alternatives:
- continue
-
- if f.distinctive_size and f.size is not None:
- self.known_sizes.setdefault(f.size, set()).add(filename)
-
- for lf in f.look_for:
- self.known_filenames.setdefault(lf, set()).add(filename)
-
- if f.md5 is not None:
- self.known_md5s.setdefault(f.md5, set()).add(filename)
-
- if f.sha1 is not None:
- self.known_sha1s.setdefault(f.sha1, set()).add(filename)
-
- if f.sha256 is not None:
- self.known_sha256s.setdefault(f.sha256, set()).add(filename)
-
- if not check:
- return
-
- # check for different files that shares same md5 & look_for
- for file in self.known_md5s:
- if len(self.known_md5s[file]) == 1:
- continue
- all_lf = set()
- for f in self.known_md5s[file]:
- assert not all_lf.intersection(self.files[f].look_for),(
- 'duplicate file description in %s: %s' %
- (self.shortname, ', '.join(self.known_md5s[file])) )
- all_lf |= self.files[f].look_for
-
- # consistency check
- for package in self.packages.values():
- if package.rip_cd:
- # we only support Ogg Vorbis for now
- assert package.rip_cd['encoding'] == 'vorbis', package.name
- self.rip_cd_packages.add(package)
-
- # there had better be something it wants to install, unless
- # specifically marked as empty
- if package.empty:
- assert not package.install_files, package.name
- assert not package.rip_cd, package.name
- else:
- assert package.install_files or package.rip_cd, \
- package.name
-
- # check internal depedencies
- for demo_for_item in package.demo_for:
- assert demo_for_item in self.packages, demo_for_item
-
- if package.expansion_for:
- if package.expansion_for not in self.packages:
- # It needs to be provided on all distributions,
- # so we can ignore contextual package relations,
- # which have package = None.
- #
- # We also already asserted that distro-independent
- # relations don't have alternatives (not that they
- # would be meaningful for provides).
- provider = None
-
- for other in self.packages.values():
- for provided in other.relations['provides']:
- if package.expansion_for == provided.package:
- provider = other
- break
-
- if provider is not None:
- break
- else:
- raise Exception('%s: %s: virtual package %s not found' %
- (self.shortname, package.name,
- package.expansion_for))
-
- if package.better_versions:
- for v in package.better_versions:
- assert v in self.packages, v
-
- # check for stale missing_langs
- if not package.demo_for:
- assert not set(package.langs).intersection(self.missing_langs)
-
- # check for missing 'version:'
- for file in package.install_files:
- if self.files[file.name].filename == 'version':
- assert package.version != GAME_PACKAGE_VERSION, package.name
-
- might_install = {}
-
- no_packaging = NoPackaging()
-
- for wanted in (package.install_files | package.optional_files):
- install_as = wanted.install_as
- install_to = no_packaging.substitute(package.install_to,
- package.name)
-
- if wanted.alternatives and install_as == '$alternative':
- batch = [self.files[alt] for alt in wanted.alternatives]
- else:
- batch = [wanted]
-
- # Do not add them to might_install immediately so that
- # alternatives with install_as = '$alternative' are allowed to
- # collide with each other (perhaps we have alternatives
- # foo.dat?1.0, foo.dat?2.0 and foo_censored.dat - that's fine
- # if we are never going to install both copies of foo.dat
- # together)
- batch_might_install = {}
-
- for installable in batch:
- if installable.install_to is not None:
- install_to = no_packaging.substitute(
- installable.install_to, installable.name,
- install_to=install_to)
-
- if install_as == '$alternative':
- install_to = os.path.join(install_to.strip('/'),
- installable.install_as)
- else:
- install_to = os.path.join(install_to.strip('/'),
- install_as)
-
- if install_to in might_install:
- raise AssertionError(
- 'Package {} tries to install both {} and '
- '{} as {}'.format(
- package.name, installable.name,
- might_install[install_to].name,
- install_to))
-
- batch_might_install[install_to] = installable
-
- might_install.update(batch_might_install)
-
- for filename, wanted in self.files.items():
- if wanted.unpack:
- assert 'format' in wanted.unpack, filename
- assert wanted.provides_files, filename
- for f in wanted.provides_files:
- assert f.alternatives == [], (filename, f.name)
- if wanted.unpack['format'] == 'cat':
- assert len(wanted.provides) == 1, filename
- assert isinstance(wanted.unpack['other_parts'],
- list), filename
- for other_part in wanted.unpack['other_parts']:
- assert other_part in self.files, (filename, other_part)
- elif wanted.unpack['format'] == 'xdelta':
- assert len(wanted.provides) == 1, filename
- assert len(wanted.unpack['other_parts']) == 1, filename
- assert isinstance(wanted.unpack['other_parts'][0], str), filename
- assert wanted.unpack['other_parts'][0] in self.files, filename
-
- if wanted.alternatives:
- for alt_name in wanted.alternatives:
- alt = self.files[alt_name]
- # an alternative can't be a placeholder for alternatives
- assert not alt.alternatives, alt_name
-
- # if this is a placeholder for a bunch of alternatives, then
- # it doesn't make sense for it to have a defined checksum
- # or size
- assert wanted.md5 is None, wanted.name
- assert wanted.sha1 is None, wanted.name
- assert wanted.sha256 is None, wanted.name
- assert wanted.size is None, wanted.name
- else:
- assert (wanted.size is not None or filename in
- self.data.get('unknown_sizes', ())
- ), (self.shortname, wanted.name)
-
- for name, group in self.groups.items():
- for member_name in group.group_members:
- assert member_name in self.files or member_name in self.groups
-
- def _iter_expand_groups(self, grouped, *, include_groups=False):
- """Given a set of strings that are either filenames or groups,
- yield the WantedFile instances for those names or the members of
- those groups, recursively.
-
- If include_groups is true, also yield the groups themselves.
- """
- for filename in grouped:
- group = self.groups.get(filename)
-
- if group is not None:
- if include_groups:
- yield group
-
- for x in self._iter_expand_groups(
- group.group_members, include_groups=include_groups):
- yield x
- else:
- yield self._ensure_file(filename)
-
- def construct_task(self, **kwargs):
- self.load_file_data()
- return PackagingTask(self, **kwargs)
-
- def construct_package(self, binary, data):
- return Package(binary, data)
-
- def gog_download_name(self, package):
- if package.gog == False:
- return
- gog = package.gog or self.gog
- if 'removed' in gog:
- return
- return gog.get('game') or gog.get('url')
-
-def load_games(game='*', datadir=DATADIR, use_vfs=True):
- progress = (game == '*' and sys.stderr.isatty() and
- not logger.isEnabledFor(logging.DEBUG))
- games = {}
-
- for yamlfile in glob.glob(os.path.join(datadir, game + '.yaml')):
- if os.path.basename(yamlfile).startswith('launch-'):
- continue
-
- load_game(progress, games, yamlfile, None, yaml_file=yamlfile)
-
- if use_vfs:
- if isinstance(use_vfs, str):
- zip = use_vfs
- else:
- zip = os.path.join(datadir, 'vfs.zip')
-
- with zipfile.ZipFile(zip, 'r') as zf:
- if game == '*':
- for entry in zf.infolist():
- if entry.filename.endswith('.json'):
- if entry.filename[:-5] in games:
- # shadowed by a YAML file
- continue
-
- jsonfile = '%s/%s' % (zip, entry.filename)
- jsondata = zf.open(entry).read().decode('utf-8')
- load_game(progress, games, jsonfile, jsondata)
- elif game in games:
- # shadowed by a YAML file
- pass
- else:
- jsonfile = game + '.json'
- jsondata = zf.open(jsonfile).read().decode('utf-8')
- load_game(progress, games, '%s/%s' % (zip, jsonfile), jsondata)
- else:
- vfs = os.path.join(DATADIR, 'vfs')
-
- if not os.path.isdir(vfs):
- vfs = DATADIR
-
- for jsonfile in glob.glob(os.path.join(vfs, game + '.json')):
- if os.path.basename(jsonfile[:-5]) in games:
- # shadowed by a YAML file
- continue
-
- jsondata = open(jsonfile, encoding='utf-8').read()
- load_game(progress, games, jsonfile, jsondata)
-
- if progress:
- print('\r%s\r' % (' ' * (len(games) // 4 + 1)), end='', flush=True, file=sys.stderr)
-
- return games
-
-def load_game(progress, games, filename, content, name=None, yaml_file=None):
- if progress:
- animation = ['.','-','*','#']
- modulo = int(load_game.counter) % len(animation)
- if modulo > 0:
- print('\b', end='', flush=True, file=sys.stderr)
- print(animation[modulo], end='', flush=True, file=sys.stderr)
- load_game.counter += 1
- try:
- if name is None:
- name = os.path.basename(filename)
- name = name[:len(name) - 5]
-
- if yaml_file is not None:
- data = yaml.load(
- open(yaml_file, encoding='utf-8'),
- Loader=yaml.CSafeLoader)
- elif filename.endswith('.yaml'):
- data = yaml.load(content, Loader=yaml.CSafeLoader)
- else:
- data = json.loads(content)
-
- plugin = data.get('plugin', name)
- plugin = plugin.replace('-', '_')
-
- try:
- plugin = importlib.import_module('game_data_packager.games.%s' %
- plugin)
- game_data_constructor = plugin.GAME_DATA_SUBCLASS
- except (ImportError, AttributeError) as e:
- logger.debug('No special code for %s: %s', name, e)
- assert 'game_data_packager.games' in e.msg, e
- game_data_constructor = GameData
-
- games[name] = game_data_constructor(name, data)
-
- if yaml_file is not None:
- games[name].yaml_file = yaml_file
- except:
- print('Error loading %s:\n' % filename)
- raise
-
-load_game.counter = 0
diff --git a/game_data_packager/command_line.py b/game_data_packager/command_line.py
index fe29fe2..f195d96 100644
--- a/game_data_packager/command_line.py
+++ b/game_data_packager/command_line.py
@@ -23,9 +23,9 @@ import sys
import time
import zipfile
-from . import (load_games)
from .config import (read_config)
from .data import (ProgressCallback)
+from .game import (load_games)
from .packaging import (get_packaging_system)
from .paths import (DATADIR)
from .util import (human_size)
diff --git a/game_data_packager/download.py b/game_data_packager/download.py
index 4842f94..ea545cf 100644
--- a/game_data_packager/download.py
+++ b/game_data_packager/download.py
@@ -166,7 +166,7 @@ if __name__ == '__main__':
import sys
- from . import (load_games)
+ from .game import (load_games)
game = sys.argv[1]
filename = sys.argv[2]
diff --git a/game_data_packager/__init__.py b/game_data_packager/game.py
similarity index 99%
copy from game_data_packager/__init__.py
copy to game_data_packager/game.py
index ac71bed..5df3643 100644
--- a/game_data_packager/__init__.py
+++ b/game_data_packager/game.py
@@ -39,11 +39,6 @@ from .version import (GAME_PACKAGE_VERSION)
logger = logging.getLogger(__name__)
-if os.environ.get('DEBUG') or os.environ.get('GDP_DEBUG'):
- logging.getLogger().setLevel(logging.DEBUG)
-else:
- logging.getLogger().setLevel(logging.INFO)
-
MD5SUM_DIVIDER = re.compile(r' [ *]?')
class GameData(object):
@@ -1148,3 +1143,4 @@ def load_game(progress, games, filename, content, name=None, yaml_file=None):
raise
load_game.counter = 0
+
diff --git a/game_data_packager/games/doom_common.py b/game_data_packager/games/doom_common.py
index cc2b33d..6795c1e 100644
--- a/game_data_packager/games/doom_common.py
+++ b/game_data_packager/games/doom_common.py
@@ -21,9 +21,9 @@ import logging
import os
import subprocess
-from .. import GameData
from ..build import (PackagingTask)
from ..data import (Package)
+from ..game import (GameData)
from ..paths import DATADIR
from ..util import (copy_with_substitutions, mkdir_p)
diff --git a/game_data_packager/games/dosbox.py b/game_data_packager/games/dosbox.py
index 565e30a..c90e200 100755
--- a/game_data_packager/games/dosbox.py
+++ b/game_data_packager/games/dosbox.py
@@ -22,9 +22,9 @@ import configparser
import logging
import os
-from .. import GameData
from ..build import (PackagingTask)
from ..data import (Package)
+from ..game import (GameData)
from ..util import (mkdir_p)
logger = logging.getLogger(__name__)
diff --git a/game_data_packager/games/ecwolf_common.py b/game_data_packager/games/ecwolf_common.py
index ff7b3a8..05738e9 100644
--- a/game_data_packager/games/ecwolf_common.py
+++ b/game_data_packager/games/ecwolf_common.py
@@ -21,9 +21,9 @@ import logging
import os
import subprocess
-from .. import GameData
from ..build import (PackagingTask)
from ..data import (Package)
+from ..game import (GameData)
from ..paths import DATADIR
from ..util import (mkdir_p)
diff --git a/game_data_packager/games/lgeneral.py b/game_data_packager/games/lgeneral.py
index 798eb97..e9699a8 100644
--- a/game_data_packager/games/lgeneral.py
+++ b/game_data_packager/games/lgeneral.py
@@ -19,8 +19,8 @@ import os
import shutil
import subprocess
-from .. import (GameData)
from ..build import (PackagingTask, NoPackagesPossible)
+from ..game import (GameData)
from ..util import (mkdir_p, which)
logger = logging.getLogger(__name__)
diff --git a/game_data_packager/games/morrowind.py b/game_data_packager/games/morrowind.py
index f302ee0..d91e1d0 100644
--- a/game_data_packager/games/morrowind.py
+++ b/game_data_packager/games/morrowind.py
@@ -18,8 +18,8 @@
import logging
import os
-from .. import (GameData)
from ..build import (PackagingTask)
+from ..game import (GameData)
from ..util import (check_call)
logger = logging.getLogger(__name__)
diff --git a/game_data_packager/games/quake.py b/game_data_packager/games/quake.py
index 1646716..7ddd8bf 100644
--- a/game_data_packager/games/quake.py
+++ b/game_data_packager/games/quake.py
@@ -18,8 +18,8 @@
import logging
import os
-from .. import GameData
from ..build import (PackagingTask)
+from ..game import GameData
from ..util import TemporaryUmask, mkdir_p
logger = logging.getLogger(__name__)
diff --git a/game_data_packager/games/quake2.py b/game_data_packager/games/quake2.py
index 33dd9e4..dea6fdf 100644
--- a/game_data_packager/games/quake2.py
+++ b/game_data_packager/games/quake2.py
@@ -20,8 +20,8 @@ import os
import subprocess
import tarfile
-from .. import (GameData)
from ..build import (PackagingTask)
+from ..game import (GameData)
logger = logging.getLogger(__name__)
diff --git a/game_data_packager/games/residualvm_common.py b/game_data_packager/games/residualvm_common.py
index 021e096..8700d2d 100644
--- a/game_data_packager/games/residualvm_common.py
+++ b/game_data_packager/games/residualvm_common.py
@@ -21,8 +21,8 @@ import logging
import os
import subprocess
-from .. import GameData
from ..build import (PackagingTask)
+from ..game import (GameData)
from ..paths import DATADIR
from ..util import (mkdir_p)
diff --git a/game_data_packager/games/rott.py b/game_data_packager/games/rott.py
index 08197a5..993a156 100644
--- a/game_data_packager/games/rott.py
+++ b/game_data_packager/games/rott.py
@@ -17,7 +17,7 @@
import logging
-from .. import GameData
+from ..game import (GameData)
logger = logging.getLogger(__name__)
diff --git a/game_data_packager/games/scummvm_common.py b/game_data_packager/games/scummvm_common.py
index ced36a0..282c1b3 100644
--- a/game_data_packager/games/scummvm_common.py
+++ b/game_data_packager/games/scummvm_common.py
@@ -21,8 +21,8 @@ import logging
import os
import subprocess
-from .. import GameData
from ..build import (PackagingTask)
+from ..game import (GameData)
from ..paths import DATADIR
from ..util import (mkdir_p)
diff --git a/game_data_packager/games/unreal.py b/game_data_packager/games/unreal.py
index 72a519c..5e94e5c 100644
--- a/game_data_packager/games/unreal.py
+++ b/game_data_packager/games/unreal.py
@@ -18,8 +18,8 @@
import logging
import os
-from .. import (GameData)
from ..build import (PackagingTask)
+from ..game import (GameData)
from ..util import (TemporaryUmask, mkdir_p)
logger = logging.getLogger(__name__)
diff --git a/game_data_packager/games/wolf3d.py b/game_data_packager/games/wolf3d.py
index 6e3c617..f0d28a7 100644
--- a/game_data_packager/games/wolf3d.py
+++ b/game_data_packager/games/wolf3d.py
@@ -17,7 +17,7 @@
import logging
-from .. import GameData
+from ..game import (GameData)
logger = logging.getLogger(__name__)
diff --git a/game_data_packager/games/z_code.py b/game_data_packager/games/z_code.py
index 77988e4..a3e602b 100644
--- a/game_data_packager/games/z_code.py
+++ b/game_data_packager/games/z_code.py
@@ -22,9 +22,9 @@ import logging
import os
import re
-from .. import GameData
from ..build import (PackagingTask)
from ..data import (Package)
+from ..game import (GameData)
from ..util import (TemporaryUmask,
which,
mkdir_p)
diff --git a/tools/babel.py b/tools/babel.py
index 60338ff..cc2cd3d 100755
--- a/tools/babel.py
+++ b/tools/babel.py
@@ -18,8 +18,8 @@
# Online at http://pkg-games.alioth.debian.org/game-data/
-from game_data_packager import (load_games)
from game_data_packager.build import (FillResult)
+from game_data_packager.game import (load_games)
SHOPS=[('url_steam', 'Steam', 'https://steamcommunity.com/groups/debian_gdp#curation'),
('url_gog', 'GOG.com', 'https://www.gog.com/mix/games_supported_by_debians_gamedatapackager'),
diff --git a/tools/check_equivalence.py b/tools/check_equivalence.py
index 9ccf046..7e047e7 100755
--- a/tools/check_equivalence.py
+++ b/tools/check_equivalence.py
@@ -23,7 +23,7 @@ import time
import yaml
from contextlib import suppress
-from game_data_packager import (load_games, load_game)
+from game_data_packager.game import (load_games, load_game)
from game_data_packager.util import ascii_safe
def dump(serialized):
diff --git a/tools/check_gog.py b/tools/check_gog.py
index 686ece5..f1de38e 100755
--- a/tools/check_gog.py
+++ b/tools/check_gog.py
@@ -20,10 +20,10 @@
import json
import os
import subprocess
+from distutils.version import LooseVersion
-from game_data_packager import load_games
+from game_data_packager.game import load_games
from game_data_packager.gog import GOG
-from distutils.version import LooseVersion
yaml_files = {}
owned_files = []
diff --git a/tools/check_steam.py b/tools/check_steam.py
index aabaede..21ae4ab 100755
--- a/tools/check_steam.py
+++ b/tools/check_steam.py
@@ -17,7 +17,8 @@
import json
import urllib.request
-from game_data_packager import load_games
+
+from game_data_packager.game import load_games
from game_data_packager.util import AGENT
url = 'https://github.com/SteamDatabase/SteamLinux/raw/master/GAMES.json'
diff --git a/tools/check_syntax.py b/tools/check_syntax.py
index a1d8f99..3832f9b 100755
--- a/tools/check_syntax.py
+++ b/tools/check_syntax.py
@@ -18,7 +18,7 @@
import os
import yaml
-from game_data_packager import load_games
+from game_data_packager.game import load_games
from game_data_packager.util import ascii_safe
if __name__ == '__main__':
diff --git a/tools/mirror.py b/tools/mirror.py
index b4178c6..ae36a69 100755
--- a/tools/mirror.py
+++ b/tools/mirror.py
@@ -30,10 +30,10 @@ import argparse
import os
import urllib
-from game_data_packager import (load_games)
from game_data_packager.build import (choose_mirror)
from game_data_packager.command_line import (TerminalProgress)
from game_data_packager.data import (HashedFile)
+from game_data_packager.game import (load_games)
from game_data_packager.util import (AGENT)
archives = []
diff --git a/tools/spider.py b/tools/spider.py
index 14148cf..bf99893 100755
--- a/tools/spider.py
+++ b/tools/spider.py
@@ -22,8 +22,10 @@
import sys
import time
import urllib.request
+
from bs4 import BeautifulSoup
-from game_data_packager import load_games
+
+from game_data_packager.game import load_games
from game_data_packager.util import AGENT
CSV = 'data/wikipedia.csv'
diff --git a/tools/stats.py b/tools/stats.py
index 25a8312..2c56de9 100755
--- a/tools/stats.py
+++ b/tools/stats.py
@@ -15,7 +15,7 @@
# You can find the GPL license text on a Debian system under
# /usr/share/common-licenses/GPL-2.
-from game_data_packager import (load_games)
+from game_data_packager.game import (load_games)
games = []
order = { 'demo' : 1,
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-games/game-data-packager.git
More information about the Pkg-games-commits
mailing list