[Pkg-mozext-commits] [adblock-plus] 64/87: Issue 3952 - Fix whitespaces for compliance with PEP-8
David Prévot
taffit at moszumanska.debian.org
Sat Apr 30 17:59:09 UTC 2016
This is an automated email from the git hooks/post-receive script.
taffit pushed a commit to branch master
in repository adblock-plus.
commit 93e54d41ef7b2d1441f81b01822cbabac11fb84c
Author: Sebastian Noack <sebastian at adblockplus.org>
Date: Mon Apr 18 14:45:28 2016 +0200
Issue 3952 - Fix whitespaces for compliance with PEP-8
---
build.py | 939 +++++++++++++++++++++++----------------------
chainedconfigparser.py | 315 +++++++--------
ensure_dependencies.py | 543 +++++++++++++-------------
localeTools.py | 800 +++++++++++++++++++-------------------
packager.py | 228 +++++------
packagerChrome.py | 640 +++++++++++++++---------------
packagerGecko.py | 618 ++++++++++++++---------------
packagerSafari.py | 456 +++++++++++-----------
publicSuffixListUpdater.py | 77 ++--
releaseAutomation.py | 204 +++++-----
10 files changed, 2478 insertions(+), 2342 deletions(-)
diff --git a/build.py b/build.py
index 3540320..da27191 100644
--- a/build.py
+++ b/build.py
@@ -4,110 +4,128 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-import os, sys, re, subprocess, shutil, buildtools
+import os
+import sys
+import re
+import subprocess
+import shutil
+import buildtools
from getopt import getopt, GetoptError
from StringIO import StringIO
from zipfile import ZipFile
knownTypes = ('gecko', 'chrome', 'safari', 'generic')
+
class Command(object):
- name = property(lambda self: self._name)
- shortDescription = property(lambda self: self._shortDescription,
- lambda self, value: self.__dict__.update({'_shortDescription': value}))
- description = property(lambda self: self._description,
- lambda self, value: self.__dict__.update({'_description': value}))
- params = property(lambda self: self._params,
- lambda self, value: self.__dict__.update({'_params': value}))
- supportedTypes = property(lambda self: self._supportedTypes,
- lambda self, value: self.__dict__.update({'_supportedTypes': value}))
- options = property(lambda self: self._options)
-
- def __init__(self, handler, name):
- self._handler = handler
- self._name = name
- self._shortDescription = ''
- self._description = ''
- self._params = ''
- self._supportedTypes = None
- self._options = []
- self.addOption('Show this message and exit', short='h', long='help')
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- pass
-
- def __call__(self, baseDir, scriptName, opts, args, type):
- return self._handler(baseDir, scriptName, opts, args, type)
-
- def isSupported(self, type):
- return self._supportedTypes == None or type in self._supportedTypes
-
- def addOption(self, description, short=None, long=None, value=None, types=None):
- self._options.append((description, short, long, value, types))
-
- def parseArgs(self, type, args):
- shortOptions = map(
- lambda o: o[1]+':' if o[3] != None else o[1],
- filter(
- lambda o: o[1] != None and (o[4] == None or type in o[4]),
- self._options
- )
+ name = property(lambda self: self._name)
+ shortDescription = property(
+ lambda self: self._shortDescription,
+ lambda self, value: self.__dict__.update({'_shortDescription': value})
+ )
+ description = property(
+ lambda self: self._description,
+ lambda self, value: self.__dict__.update({'_description': value})
+ )
+ params = property(
+ lambda self: self._params,
+ lambda self, value: self.__dict__.update({'_params': value})
)
- longOptions = map(
- lambda o: o[2]+'=' if o[3] != None else o[2],
- filter(
- lambda o: o[2] != None and (o[4] == None or type in o[4]),
- self._options
- )
+ supportedTypes = property(
+ lambda self: self._supportedTypes,
+ lambda self, value: self.__dict__.update({'_supportedTypes': value})
)
- return getopt(args, ''.join(shortOptions), longOptions)
+ options = property(lambda self: self._options)
+
+ def __init__(self, handler, name):
+ self._handler = handler
+ self._name = name
+ self._shortDescription = ''
+ self._description = ''
+ self._params = ''
+ self._supportedTypes = None
+ self._options = []
+ self.addOption('Show this message and exit', short='h', long='help')
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
+
+ def __call__(self, baseDir, scriptName, opts, args, type):
+ return self._handler(baseDir, scriptName, opts, args, type)
+
+ def isSupported(self, type):
+ return self._supportedTypes == None or type in self._supportedTypes
+
+ def addOption(self, description, short=None, long=None, value=None, types=None):
+ self._options.append((description, short, long, value, types))
+
+ def parseArgs(self, type, args):
+ shortOptions = map(
+ lambda o: o[1] + ':' if o[3] != None else o[1],
+ filter(
+ lambda o: o[1] != None and (o[4] == None or type in o[4]),
+ self._options
+ )
+ )
+ longOptions = map(
+ lambda o: o[2] + '=' if o[3] != None else o[2],
+ filter(
+ lambda o: o[2] != None and (o[4] == None or type in o[4]),
+ self._options
+ )
+ )
+ return getopt(args, ''.join(shortOptions), longOptions)
commandsList = []
commands = {}
+
+
def addCommand(handler, name):
- if isinstance(name, basestring):
- aliases = ()
- else:
- name, aliases = (name[0], name[1:])
-
- global commandsList, commands
- command = Command(handler, name)
- commandsList.append(command)
- commands[name] = command
- for alias in aliases:
- commands[alias] = command
- return command
+ if isinstance(name, basestring):
+ aliases = ()
+ else:
+ name, aliases = (name[0], name[1:])
+
+ global commandsList, commands
+ command = Command(handler, name)
+ commandsList.append(command)
+ commands[name] = command
+ for alias in aliases:
+ commands[alias] = command
+ return command
+
def splitByLength(string, maxLen):
- parts = []
- currentPart = ''
- for match in re.finditer(r'\s*(\S+)', string):
- if len(match.group(0)) + len(currentPart) < maxLen:
- currentPart += match.group(0)
- else:
- parts.append(currentPart)
- currentPart = match.group(1)
- if len(currentPart):
- parts.append(currentPart)
- return parts
+ parts = []
+ currentPart = ''
+ for match in re.finditer(r'\s*(\S+)', string):
+ if len(match.group(0)) + len(currentPart) < maxLen:
+ currentPart += match.group(0)
+ else:
+ parts.append(currentPart)
+ currentPart = match.group(1)
+ if len(currentPart):
+ parts.append(currentPart)
+ return parts
+
def usage(scriptName, type, commandName=None):
- if commandName == None:
- global commandsList
- descriptions = []
- for command in commandsList:
- if not command.isSupported(type):
- continue
- commandText = ('%s %s' % (command.name, command.params)).ljust(39)
- descriptionParts = splitByLength(command.shortDescription, 29)
- descriptions.append(' %s [-t %s] %s %s' % (scriptName, type, commandText, descriptionParts[0]))
- for part in descriptionParts[1:]:
- descriptions.append(' %s %s %s %s' % (' ' * len(scriptName), ' ' * len(type), ' ' * len(commandText), part))
- print '''Usage:
+ if commandName == None:
+ global commandsList
+ descriptions = []
+ for command in commandsList:
+ if not command.isSupported(type):
+ continue
+ commandText = ('%s %s' % (command.name, command.params)).ljust(39)
+ descriptionParts = splitByLength(command.shortDescription, 29)
+ descriptions.append(' %s [-t %s] %s %s' % (scriptName, type, commandText, descriptionParts[0]))
+ for part in descriptionParts[1:]:
+ descriptions.append(' %s %s %s %s' % (' ' * len(scriptName), ' ' * len(type), ' ' * len(commandText), part))
+ print '''Usage:
%(descriptions)s
@@ -115,485 +133,490 @@ For details on a command run:
%(scriptName)s [-t %(type)s] <command> --help
''' % {
- 'scriptName': scriptName,
- 'type': type,
- 'descriptions': '\n'.join(descriptions)
- }
- else:
- global commands
- command = commands[commandName]
- description = '\n'.join(map(lambda s: '\n'.join(splitByLength(s, 80)), command.description.split('\n')))
- options = []
- for descr, short, long, value, types in command.options:
- if types != None and type not in types:
- continue
- if short == None:
- shortText = ''
- elif value == None:
- shortText = '-%s' % short
- else:
- shortText = '-%s %s' % (short, value)
- if long == None:
- longText = ''
- elif value == None:
- longText = '--%s' % long
- else:
- longText = '--%s=%s' % (long, value)
- descrParts = splitByLength(descr, 46)
- options.append(' %s %s %s' % (shortText.ljust(11), longText.ljust(19), descrParts[0]))
- for part in descrParts[1:]:
- options.append(' %s %s %s' % (' ' * 11, ' ' * 19, part))
- print '''%(scriptName)s [-t %(type)s] %(name)s %(params)s
+ 'scriptName': scriptName,
+ 'type': type,
+ 'descriptions': '\n'.join(descriptions)
+ }
+ else:
+ global commands
+ command = commands[commandName]
+ description = '\n'.join(map(lambda s: '\n'.join(splitByLength(s, 80)), command.description.split('\n')))
+ options = []
+ for descr, short, long, value, types in command.options:
+ if types != None and type not in types:
+ continue
+ if short == None:
+ shortText = ''
+ elif value == None:
+ shortText = '-%s' % short
+ else:
+ shortText = '-%s %s' % (short, value)
+ if long == None:
+ longText = ''
+ elif value == None:
+ longText = '--%s' % long
+ else:
+ longText = '--%s=%s' % (long, value)
+ descrParts = splitByLength(descr, 46)
+ options.append(' %s %s %s' % (shortText.ljust(11), longText.ljust(19), descrParts[0]))
+ for part in descrParts[1:]:
+ options.append(' %s %s %s' % (' ' * 11, ' ' * 19, part))
+ print '''%(scriptName)s [-t %(type)s] %(name)s %(params)s
%(description)s
Options:
%(options)s
''' % {
- 'scriptName': scriptName,
- 'type': type,
- 'name': command.name,
- 'params': command.params,
- 'description': description,
- 'options': '\n'.join(options)
- }
+ 'scriptName': scriptName,
+ 'type': type,
+ 'name': command.name,
+ 'params': command.params,
+ 'description': description,
+ 'options': '\n'.join(options)
+ }
def runBuild(baseDir, scriptName, opts, args, type):
- locales = None
- buildNum = None
- multicompartment = False
- releaseBuild = False
- keyFile = None
- for option, value in opts:
- if option in ('-l', '--locales'):
- locales = value.split(',')
- elif option in ('-b', '--build'):
- buildNum = int(value)
- elif option in ('-k', '--key'):
- keyFile = value
- elif option in ('-m', '--multi-compartment'):
- multicompartment = True
- elif option in ('-r', '--release'):
- releaseBuild = True
- outFile = args[0] if len(args) > 0 else None
-
- if type == 'gecko':
- import buildtools.packagerGecko as packager
- packager.createBuild(baseDir, type=type, outFile=outFile, locales=locales, buildNum=buildNum,
- releaseBuild=releaseBuild, keyFile=keyFile,
- multicompartment=multicompartment)
- elif type == 'chrome':
- import buildtools.packagerChrome as packager
- packager.createBuild(baseDir, type=type, outFile=outFile, buildNum=buildNum,
- releaseBuild=releaseBuild, keyFile=keyFile)
- elif type == 'safari':
- import buildtools.packagerSafari as packager
- packager.createBuild(baseDir, type=type, outFile=outFile, buildNum=buildNum,
- releaseBuild=releaseBuild, keyFile=keyFile)
+ locales = None
+ buildNum = None
+ multicompartment = False
+ releaseBuild = False
+ keyFile = None
+ for option, value in opts:
+ if option in ('-l', '--locales'):
+ locales = value.split(',')
+ elif option in ('-b', '--build'):
+ buildNum = int(value)
+ elif option in ('-k', '--key'):
+ keyFile = value
+ elif option in ('-m', '--multi-compartment'):
+ multicompartment = True
+ elif option in ('-r', '--release'):
+ releaseBuild = True
+ outFile = args[0] if len(args) > 0 else None
+
+ if type == 'gecko':
+ import buildtools.packagerGecko as packager
+ packager.createBuild(baseDir, type=type, outFile=outFile, locales=locales, buildNum=buildNum,
+ releaseBuild=releaseBuild, keyFile=keyFile,
+ multicompartment=multicompartment)
+ elif type == 'chrome':
+ import buildtools.packagerChrome as packager
+ packager.createBuild(baseDir, type=type, outFile=outFile, buildNum=buildNum,
+ releaseBuild=releaseBuild, keyFile=keyFile)
+ elif type == 'safari':
+ import buildtools.packagerSafari as packager
+ packager.createBuild(baseDir, type=type, outFile=outFile, buildNum=buildNum,
+ releaseBuild=releaseBuild, keyFile=keyFile)
def runAutoInstall(baseDir, scriptName, opts, args, type):
- if len(args) == 0:
- print 'Port of the Extension Auto-Installer needs to be specified'
- usage(scriptName, type, 'autoinstall')
- return
-
- multicompartment = False
- for option, value in opts:
- if option in ('-m', '--multi-compartment'):
- multicompartment = True
-
- if ':' in args[0]:
- host, port = args[0].rsplit(':', 1)
- else:
- host, port = ('localhost', args[0])
+ if len(args) == 0:
+ print 'Port of the Extension Auto-Installer needs to be specified'
+ usage(scriptName, type, 'autoinstall')
+ return
+
+ multicompartment = False
+ for option, value in opts:
+ if option in ('-m', '--multi-compartment'):
+ multicompartment = True
+
+ if ':' in args[0]:
+ host, port = args[0].rsplit(':', 1)
+ else:
+ host, port = ('localhost', args[0])
- import buildtools.packagerGecko as packager
- packager.autoInstall(baseDir, type, host, port, multicompartment=multicompartment)
+ import buildtools.packagerGecko as packager
+ packager.autoInstall(baseDir, type, host, port, multicompartment=multicompartment)
def createDevEnv(baseDir, scriptName, opts, args, type):
- if type == 'safari':
- import buildtools.packagerSafari as packager
- else:
- import buildtools.packagerChrome as packager
+ if type == 'safari':
+ import buildtools.packagerSafari as packager
+ else:
+ import buildtools.packagerChrome as packager
- file = StringIO()
- packager.createBuild(baseDir, type=type, outFile=file, devenv=True, releaseBuild=True)
+ file = StringIO()
+ packager.createBuild(baseDir, type=type, outFile=file, devenv=True, releaseBuild=True)
- from buildtools.packager import getDevEnvPath
- devenv_dir = getDevEnvPath(baseDir, type)
+ from buildtools.packager import getDevEnvPath
+ devenv_dir = getDevEnvPath(baseDir, type)
- shutil.rmtree(devenv_dir, ignore_errors=True)
+ shutil.rmtree(devenv_dir, ignore_errors=True)
- file.seek(0)
- with ZipFile(file, 'r') as zip_file:
- zip_file.extractall(devenv_dir)
+ file.seek(0)
+ with ZipFile(file, 'r') as zip_file:
+ zip_file.extractall(devenv_dir)
def readLocaleConfig(baseDir, type, metadata):
- if type == 'gecko':
- import buildtools.packagerGecko as packager
- localeDir = packager.getLocalesDir(baseDir)
- localeConfig = {
- 'name_format': 'BCP-47',
- 'file_format': 'gecko-dtd',
- 'target_platforms': {'gecko'},
- 'default_locale': packager.defaultLocale
- }
- elif type == 'chrome':
- import buildtools.packagerChrome as packager
- localeDir = os.path.join(baseDir, '_locales')
- localeConfig = {
- 'name_format': 'ISO-15897',
- 'file_format': 'chrome-json',
- 'target_platforms': {'chrome'},
- 'default_locale': packager.defaultLocale,
- }
- else:
- localeDir = os.path.join(baseDir,
- *metadata.get('locales', 'base_path').split('/'))
- localeConfig = {
- 'name_format': metadata.get('locales', 'name_format'),
- 'file_format': metadata.get('locales', 'file_format'),
- 'target_platforms': set(metadata.get('locales',
- 'target_platforms').split()),
- 'default_locale': metadata.get('locales', 'default_locale')
- }
-
- localeConfig['base_path'] = localeDir
-
- locales = [(locale, os.path.join(localeDir, locale))
- for locale in os.listdir(localeDir)]
- if localeConfig['name_format'] == 'ISO-15897':
- locales = [(locale.replace('_', '-'), localePath)
- for locale, localePath in locales]
- localeConfig['locales'] = dict(locales)
-
- return localeConfig
+ if type == 'gecko':
+ import buildtools.packagerGecko as packager
+ localeDir = packager.getLocalesDir(baseDir)
+ localeConfig = {
+ 'name_format': 'BCP-47',
+ 'file_format': 'gecko-dtd',
+ 'target_platforms': {'gecko'},
+ 'default_locale': packager.defaultLocale
+ }
+ elif type == 'chrome':
+ import buildtools.packagerChrome as packager
+ localeDir = os.path.join(baseDir, '_locales')
+ localeConfig = {
+ 'name_format': 'ISO-15897',
+ 'file_format': 'chrome-json',
+ 'target_platforms': {'chrome'},
+ 'default_locale': packager.defaultLocale,
+ }
+ else:
+ localeDir = os.path.join(
+ baseDir, *metadata.get('locales', 'base_path').split('/')
+ )
+ localeConfig = {
+ 'name_format': metadata.get('locales', 'name_format'),
+ 'file_format': metadata.get('locales', 'file_format'),
+ 'target_platforms': set(metadata.get('locales',
+ 'target_platforms').split()),
+ 'default_locale': metadata.get('locales', 'default_locale')
+ }
+
+ localeConfig['base_path'] = localeDir
+
+ locales = [(locale, os.path.join(localeDir, locale))
+ for locale in os.listdir(localeDir)]
+ if localeConfig['name_format'] == 'ISO-15897':
+ locales = [(locale.replace('_', '-'), localePath)
+ for locale, localePath in locales]
+ localeConfig['locales'] = dict(locales)
+
+ return localeConfig
+
def setupTranslations(baseDir, scriptName, opts, args, type):
- if len(args) < 1:
- print 'Project key is required to update translation master files.'
- usage(scriptName, type, 'setuptrans')
- return
+ if len(args) < 1:
+ print 'Project key is required to update translation master files.'
+ usage(scriptName, type, 'setuptrans')
+ return
- key = args[0]
+ key = args[0]
- from buildtools.packager import readMetadata
- metadata = readMetadata(baseDir, type)
+ from buildtools.packager import readMetadata
+ metadata = readMetadata(baseDir, type)
- basename = metadata.get('general', 'basename')
- localeConfig = readLocaleConfig(baseDir, type, metadata)
+ basename = metadata.get('general', 'basename')
+ localeConfig = readLocaleConfig(baseDir, type, metadata)
- import buildtools.localeTools as localeTools
- localeTools.setupTranslations(localeConfig, basename, key)
+ import buildtools.localeTools as localeTools
+ localeTools.setupTranslations(localeConfig, basename, key)
def updateTranslationMaster(baseDir, scriptName, opts, args, type):
- if len(args) < 1:
- print 'Project key is required to update translation master files.'
- usage(scriptName, type, 'translate')
- return
+ if len(args) < 1:
+ print 'Project key is required to update translation master files.'
+ usage(scriptName, type, 'translate')
+ return
- key = args[0]
+ key = args[0]
- from buildtools.packager import readMetadata
- metadata = readMetadata(baseDir, type)
+ from buildtools.packager import readMetadata
+ metadata = readMetadata(baseDir, type)
- basename = metadata.get('general', 'basename')
- localeConfig = readLocaleConfig(baseDir, type, metadata)
+ basename = metadata.get('general', 'basename')
+ localeConfig = readLocaleConfig(baseDir, type, metadata)
- defaultLocaleDir = os.path.join(localeConfig['base_path'],
- localeConfig['default_locale'])
+ defaultLocaleDir = os.path.join(localeConfig['base_path'],
+ localeConfig['default_locale'])
- import buildtools.localeTools as localeTools
- localeTools.updateTranslationMaster(localeConfig, metadata, defaultLocaleDir,
- basename, key)
+ import buildtools.localeTools as localeTools
+ localeTools.updateTranslationMaster(localeConfig, metadata, defaultLocaleDir,
+ basename, key)
def uploadTranslations(baseDir, scriptName, opts, args, type):
- if len(args) < 1:
- print 'Project key is required to upload existing translations.'
- usage(scriptName, type, 'uploadtrans')
- return
+ if len(args) < 1:
+ print 'Project key is required to upload existing translations.'
+ usage(scriptName, type, 'uploadtrans')
+ return
- key = args[0]
+ key = args[0]
- from buildtools.packager import readMetadata
- metadata = readMetadata(baseDir, type)
+ from buildtools.packager import readMetadata
+ metadata = readMetadata(baseDir, type)
- basename = metadata.get('general', 'basename')
- localeConfig = readLocaleConfig(baseDir, type, metadata)
+ basename = metadata.get('general', 'basename')
+ localeConfig = readLocaleConfig(baseDir, type, metadata)
- import buildtools.localeTools as localeTools
- for locale, localeDir in localeConfig['locales'].iteritems():
- if locale != localeConfig['default_locale']:
- localeTools.uploadTranslations(localeConfig, metadata, localeDir, locale,
- basename, key)
+ import buildtools.localeTools as localeTools
+ for locale, localeDir in localeConfig['locales'].iteritems():
+ if locale != localeConfig['default_locale']:
+ localeTools.uploadTranslations(localeConfig, metadata, localeDir, locale,
+ basename, key)
def getTranslations(baseDir, scriptName, opts, args, type):
- if len(args) < 1:
- print 'Project key is required to update translation master files.'
- usage(scriptName, type, 'translate')
- return
+ if len(args) < 1:
+ print 'Project key is required to update translation master files.'
+ usage(scriptName, type, 'translate')
+ return
- key = args[0]
+ key = args[0]
- from buildtools.packager import readMetadata
- metadata = readMetadata(baseDir, type)
+ from buildtools.packager import readMetadata
+ metadata = readMetadata(baseDir, type)
- basename = metadata.get('general', 'basename')
- localeConfig = readLocaleConfig(baseDir, type, metadata)
+ basename = metadata.get('general', 'basename')
+ localeConfig = readLocaleConfig(baseDir, type, metadata)
- import buildtools.localeTools as localeTools
- localeTools.getTranslations(localeConfig, basename, key)
+ import buildtools.localeTools as localeTools
+ localeTools.getTranslations(localeConfig, basename, key)
def showDescriptions(baseDir, scriptName, opts, args, type):
- locales = None
- for option, value in opts:
- if option in ('-l', '--locales'):
- locales = value.split(',')
-
- import buildtools.packagerGecko as packager
- if locales == None:
- locales = packager.getLocales(baseDir)
- elif locales == 'all':
- locales = packager.getLocales(baseDir, True)
-
- data = packager.readLocaleMetadata(baseDir, locales)
- localeCodes = data.keys()
- localeCodes.sort()
- for localeCode in localeCodes:
- locale = data[localeCode]
- print ('''%s
+ locales = None
+ for option, value in opts:
+ if option in ('-l', '--locales'):
+ locales = value.split(',')
+
+ import buildtools.packagerGecko as packager
+ if locales == None:
+ locales = packager.getLocales(baseDir)
+ elif locales == 'all':
+ locales = packager.getLocales(baseDir, True)
+
+ data = packager.readLocaleMetadata(baseDir, locales)
+ localeCodes = data.keys()
+ localeCodes.sort()
+ for localeCode in localeCodes:
+ locale = data[localeCode]
+ print ('''%s
%s
%s
%s
%s
''' % (localeCode,
- locale['name'] if 'name' in locale else 'None',
- locale['description'] if 'description' in locale else 'None',
- locale['description.short'] if 'description.short' in locale else 'None',
- locale['description.long'] if 'description.long' in locale else 'None',
- )).encode('utf-8')
+ locale['name'] if 'name' in locale else 'None',
+ locale['description'] if 'description' in locale else 'None',
+ locale['description.short'] if 'description.short' in locale else 'None',
+ locale['description.long'] if 'description.long' in locale else 'None',
+ )).encode('utf-8')
def generateDocs(baseDir, scriptName, opts, args, type):
- if len(args) == 0:
- print 'No target directory specified for the documentation'
- usage(scriptName, type, 'docs')
- return
- targetDir = args[0]
-
- source_dir = os.path.join(baseDir, 'lib')
- sources = [source_dir]
-
- # JSDoc struggles wih huge objects: https://github.com/jsdoc3/jsdoc/issues/976
- if type == 'chrome':
- sources = [os.path.join(source_dir, filename) for filename in os.listdir(source_dir) if filename != 'publicSuffixList.js']
-
-
- config = os.path.join(os.path.dirname(__file__), 'jsdoc.conf')
- command = ['jsdoc', '--destination', targetDir, '--configure', config] + sources
- if any(opt in ('-q', '--quiet') for opt, _ in opts):
- process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stderr = process.communicate()[1]
- retcode = process.poll()
- if retcode:
- sys.stderr.write(stderr)
- raise subprocess.CalledProcessError(command, retcode)
- else:
- subprocess.check_call(command)
+ if len(args) == 0:
+ print 'No target directory specified for the documentation'
+ usage(scriptName, type, 'docs')
+ return
+ targetDir = args[0]
+
+ source_dir = os.path.join(baseDir, 'lib')
+ sources = [source_dir]
+
+ # JSDoc struggles wih huge objects: https://github.com/jsdoc3/jsdoc/issues/976
+ if type == 'chrome':
+ sources = [os.path.join(source_dir, filename) for filename in os.listdir(source_dir) if filename != 'publicSuffixList.js']
+
+ config = os.path.join(os.path.dirname(__file__), 'jsdoc.conf')
+ command = ['jsdoc', '--destination', targetDir, '--configure', config] + sources
+ if any(opt in ('-q', '--quiet') for opt, _ in opts):
+ process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stderr = process.communicate()[1]
+ retcode = process.poll()
+ if retcode:
+ sys.stderr.write(stderr)
+ raise subprocess.CalledProcessError(command, retcode)
+ else:
+ subprocess.check_call(command)
+
def runReleaseAutomation(baseDir, scriptName, opts, args, type):
- keyFiles = []
- downloadsRepo = os.path.join(baseDir, '..', 'downloads')
- for option, value in opts:
- if option in ('-k', '--key'):
- keyFiles.append(value)
- elif option in ('-d', '--downloads'):
- downloadsRepo = value
-
- if len(args) == 0:
- print 'No version number specified for the release'
- usage(scriptName, type, 'release')
- return
- version = args[0]
- if re.search(r'[^\d\.]', version):
- print 'Wrong version number format'
- usage(scriptName, type, 'release')
- return
-
- if type == "gecko" and len(keyFiles) == 0:
- print >>sys.stderr, "Warning: no key file specified, creating an unsigned release build\n"
- elif type == "gecko" and len(keyFiles) > 1:
- print >>sys.stderr, "Error: too many key files, only one required"
- usage(scriptName, type, 'release')
- return
- elif type == "chrome" and len(keyFiles) != 2:
- print >>sys.stderr, "Error: wrong number of key files specified, two keys (Chrome and Safari) required for the release"
- usage(scriptName, type, 'release')
- return
-
- import buildtools.releaseAutomation as releaseAutomation
- releaseAutomation.run(baseDir, type, version, keyFiles, downloadsRepo)
+ keyFiles = []
+ downloadsRepo = os.path.join(baseDir, '..', 'downloads')
+ for option, value in opts:
+ if option in ('-k', '--key'):
+ keyFiles.append(value)
+ elif option in ('-d', '--downloads'):
+ downloadsRepo = value
+
+ if len(args) == 0:
+ print 'No version number specified for the release'
+ usage(scriptName, type, 'release')
+ return
+ version = args[0]
+ if re.search(r'[^\d\.]', version):
+ print 'Wrong version number format'
+ usage(scriptName, type, 'release')
+ return
+
+ if type == "gecko" and len(keyFiles) == 0:
+ print >>sys.stderr, "Warning: no key file specified, creating an unsigned release build\n"
+ elif type == "gecko" and len(keyFiles) > 1:
+ print >>sys.stderr, "Error: too many key files, only one required"
+ usage(scriptName, type, 'release')
+ return
+ elif type == "chrome" and len(keyFiles) != 2:
+ print >>sys.stderr, "Error: wrong number of key files specified, two keys (Chrome and Safari) required for the release"
+ usage(scriptName, type, 'release')
+ return
+
+ import buildtools.releaseAutomation as releaseAutomation
+ releaseAutomation.run(baseDir, type, version, keyFiles, downloadsRepo)
+
def updatePSL(baseDir, scriptName, opts, args, type):
- import buildtools.publicSuffixListUpdater as publicSuffixListUpdater
- publicSuffixListUpdater.updatePSL(baseDir)
+ import buildtools.publicSuffixListUpdater as publicSuffixListUpdater
+ publicSuffixListUpdater.updatePSL(baseDir)
with addCommand(lambda baseDir, scriptName, opts, args, type: usage(scriptName, type), ('help', '-h', '--help')) as command:
- command.shortDescription = 'Show this message'
+ command.shortDescription = 'Show this message'
with addCommand(runBuild, 'build') as command:
- command.shortDescription = 'Create a build'
- command.description = 'Creates an extension build with given file name. If output_file is missing a default name will be chosen.'
- command.params = '[options] [output_file]'
- command.addOption('Only include the given locales (if omitted: all locales not marked as incomplete)', short='l', long='locales', value='l1,l2,l3', types=('gecko'))
- command.addOption('Use given build number (if omitted the build number will be retrieved from Mercurial)', short='b', long='build', value='num')
- command.addOption('File containing private key and certificates required to sign the package', short='k', long='key', value='file', types=('gecko', 'chrome', 'safari'))
- command.addOption('Create a build for leak testing', short='m', long='multi-compartment', types=('gecko'))
- command.addOption('Create a release build', short='r', long='release')
- command.supportedTypes = ('gecko', 'chrome', 'safari')
+ command.shortDescription = 'Create a build'
+ command.description = 'Creates an extension build with given file name. If output_file is missing a default name will be chosen.'
+ command.params = '[options] [output_file]'
+ command.addOption('Only include the given locales (if omitted: all locales not marked as incomplete)', short='l', long='locales', value='l1,l2,l3', types=('gecko'))
+ command.addOption('Use given build number (if omitted the build number will be retrieved from Mercurial)', short='b', long='build', value='num')
+ command.addOption('File containing private key and certificates required to sign the package', short='k', long='key', value='file', types=('gecko', 'chrome', 'safari'))
+ command.addOption('Create a build for leak testing', short='m', long='multi-compartment', types=('gecko'))
+ command.addOption('Create a release build', short='r', long='release')
+ command.supportedTypes = ('gecko', 'chrome', 'safari')
with addCommand(runAutoInstall, 'autoinstall') as command:
- command.shortDescription = 'Install extension automatically'
- command.description = 'Will automatically install the extension in a browser running Extension Auto-Installer. If host parameter is omitted assumes that the browser runs on localhost.'
- command.params = '[<host>:]<port>'
- command.addOption('Create a build for leak testing', short='m', long='multi-compartment')
- command.supportedTypes = ('gecko')
+ command.shortDescription = 'Install extension automatically'
+ command.description = 'Will automatically install the extension in a browser running Extension Auto-Installer. If host parameter is omitted assumes that the browser runs on localhost.'
+ command.params = '[<host>:]<port>'
+ command.addOption('Create a build for leak testing', short='m', long='multi-compartment')
+ command.supportedTypes = ('gecko')
with addCommand(createDevEnv, 'devenv') as command:
- command.shortDescription = 'Set up a development environment'
- command.description = 'Will set up or update the devenv folder as an unpacked extension folder for development.'
- command.supportedTypes = ('chrome', 'safari')
+ command.shortDescription = 'Set up a development environment'
+ command.description = 'Will set up or update the devenv folder as an unpacked extension folder for development.'
+ command.supportedTypes = ('chrome', 'safari')
with addCommand(setupTranslations, 'setuptrans') as command:
- command.shortDescription = 'Sets up translation languages'
- command.description = 'Sets up translation languages for the project on crowdin.net.'
- command.params = '[options] project-key'
- command.supportedTypes = ('gecko', 'chrome', 'generic')
+ command.shortDescription = 'Sets up translation languages'
+ command.description = 'Sets up translation languages for the project on crowdin.net.'
+ command.params = '[options] project-key'
+ command.supportedTypes = ('gecko', 'chrome', 'generic')
with addCommand(updateTranslationMaster, 'translate') as command:
- command.shortDescription = 'Updates translation master files'
- command.description = 'Updates the translation master files in the project on crowdin.net.'
- command.params = '[options] project-key'
- command.supportedTypes = ('gecko', 'chrome', 'generic')
+ command.shortDescription = 'Updates translation master files'
+ command.description = 'Updates the translation master files in the project on crowdin.net.'
+ command.params = '[options] project-key'
+ command.supportedTypes = ('gecko', 'chrome', 'generic')
with addCommand(uploadTranslations, 'uploadtrans') as command:
- command.shortDescription = 'Uploads existing translations'
- command.description = 'Uploads already existing translations to the project on crowdin.net.'
- command.params = '[options] project-key'
- command.supportedTypes = ('gecko', 'chrome', 'generic')
+ command.shortDescription = 'Uploads existing translations'
+ command.description = 'Uploads already existing translations to the project on crowdin.net.'
+ command.params = '[options] project-key'
+ command.supportedTypes = ('gecko', 'chrome', 'generic')
with addCommand(getTranslations, 'gettranslations') as command:
- command.shortDescription = 'Downloads translation updates'
- command.description = 'Downloads updated translations from crowdin.net.'
- command.params = '[options] project-key'
- command.supportedTypes = ('gecko', 'chrome', 'generic')
+ command.shortDescription = 'Downloads translation updates'
+ command.description = 'Downloads updated translations from crowdin.net.'
+ command.params = '[options] project-key'
+ command.supportedTypes = ('gecko', 'chrome', 'generic')
with addCommand(showDescriptions, 'showdesc') as command:
- command.shortDescription = 'Print description strings for all locales'
- command.description = 'Display description strings for all locales as specified in the corresponding meta.properties files.'
- command.addOption('Only include the given locales', short='l', long='locales', value='l1,l2,l3')
- command.params = '[options]'
- command.supportedTypes = ('gecko')
+ command.shortDescription = 'Print description strings for all locales'
+ command.description = 'Display description strings for all locales as specified in the corresponding meta.properties files.'
+ command.addOption('Only include the given locales', short='l', long='locales', value='l1,l2,l3')
+ command.params = '[options]'
+ command.supportedTypes = ('gecko')
with addCommand(generateDocs, 'docs') as command:
- command.shortDescription = 'Generate documentation (requires node.js)'
- command.description = 'Generate documentation files and write them into the specified directory. This operation requires JsDoc 3 to be installed.'
- command.addOption('Suppress JsDoc output', short='q', long='quiet')
- command.params = '[options] <directory>'
- command.supportedTypes = ('gecko', 'chrome')
+ command.shortDescription = 'Generate documentation (requires node.js)'
+ command.description = 'Generate documentation files and write them into the specified directory. This operation requires JsDoc 3 to be installed.'
+ command.addOption('Suppress JsDoc output', short='q', long='quiet')
+ command.params = '[options] <directory>'
+ command.supportedTypes = ('gecko', 'chrome')
with addCommand(runReleaseAutomation, 'release') as command:
- command.shortDescription = 'Run release automation'
- command.description = 'Note: If you are not the project owner then you '\
- 'probably don\'t want to run this!\n\n'\
- 'Runs release automation: creates downloads for the new version, tags '\
- 'source code repository as well as downloads and buildtools repository.'
- command.addOption('File containing private key and certificates required to sign the release. Note that for Chrome releases this option needs to be specified twice: first a key to sign Chrome builds, then another to sign the Safari build.', short='k', long='key', value='file', types=('gecko', 'chrome'))
- command.addOption('Directory containing downloads repository (if omitted ../downloads is assumed)', short='d', long='downloads', value='dir')
- command.params = '[options] <version>'
- command.supportedTypes = ('gecko', 'chrome')
+ command.shortDescription = 'Run release automation'
+ command.description = 'Note: If you are not the project owner then you '\
+ 'probably don\'t want to run this!\n\n'\
+ 'Runs release automation: creates downloads for the new version, tags '\
+ 'source code repository as well as downloads and buildtools repository.'
+ command.addOption('File containing private key and certificates required to sign the release. Note that for Chrome releases this option needs to be specified twice: first a key to sign Chrome builds, then another to sign the Safari build.', short='k', long='key', value='file', types=('gecko', 'chrome'))
+ command.addOption('Directory containing downloads repository (if omitted ../downloads is assumed)', short='d', long='downloads', value='dir')
+ command.params = '[options] <version>'
+ command.supportedTypes = ('gecko', 'chrome')
with addCommand(updatePSL, 'updatepsl') as command:
- command.shortDescription = 'Updates Public Suffix List'
- command.description = 'Downloads Public Suffix List (see http://publicsuffix.org/) and generates lib/publicSuffixList.js from it.'
- command.supportedTypes = ('chrome',)
+ command.shortDescription = 'Updates Public Suffix List'
+ command.description = 'Downloads Public Suffix List (see http://publicsuffix.org/) and generates lib/publicSuffixList.js from it.'
+ command.supportedTypes = ('chrome',)
+
def getType(baseDir, scriptName, args):
- # Look for an explicit type parameter (has to be the first parameter)
- if len(args) >= 2 and args[0] == '-t':
- type = args[1]
- del args[1]
- del args[0]
- if type not in knownTypes:
- print '''
+ # Look for an explicit type parameter (has to be the first parameter)
+ if len(args) >= 2 and args[0] == '-t':
+ type = args[1]
+ del args[1]
+ del args[0]
+ if type not in knownTypes:
+ print '''
Unknown type %s specified, supported types are: %s
''' % (type, ', '.join(knownTypes))
- return None
- return type
-
- # Try to guess repository type
- types = []
- for t in knownTypes:
- if os.path.exists(os.path.join(baseDir, 'metadata.%s' % t)):
- types.append(t)
-
- if len(types) == 1:
- return types[0]
- elif len(types) > 1:
- print '''
+ return None
+ return type
+
+ # Try to guess repository type
+ types = []
+ for t in knownTypes:
+ if os.path.exists(os.path.join(baseDir, 'metadata.%s' % t)):
+ types.append(t)
+
+ if len(types) == 1:
+ return types[0]
+ elif len(types) > 1:
+ print '''
Ambiguous repository type, please specify -t parameter explicitly, e.g.
%s -t %s build
''' % (scriptName, types[0])
- return None
- else:
- print '''
+ return None
+ else:
+ print '''
No metadata file found in this repository, a metadata file like
metadata.%s is required.
''' % knownTypes[0]
- return None
+ return None
+
def processArgs(baseDir, args):
- global commands
+ global commands
- scriptName = os.path.basename(args[0])
- args = args[1:]
- type = getType(baseDir, scriptName, args)
- if type == None:
- return
+ scriptName = os.path.basename(args[0])
+ args = args[1:]
+ type = getType(baseDir, scriptName, args)
+ if type == None:
+ return
- if len(args) == 0:
- args = ['build']
- print '''
+ if len(args) == 0:
+ args = ['build']
+ print '''
No command given, assuming "build". For a list of commands run:
%s help
''' % scriptName
- command = args[0]
- if command in commands:
- if commands[command].isSupported(type):
- try:
- opts, args = commands[command].parseArgs(type, args[1:])
- except GetoptError, e:
- print str(e)
- usage(scriptName, type, command)
- sys.exit(2)
- for option, value in opts:
- if option in ('-h', '--help'):
- usage(scriptName, type, command)
- sys.exit()
- commands[command](baseDir, scriptName, opts, args, type)
+ command = args[0]
+ if command in commands:
+ if commands[command].isSupported(type):
+ try:
+ opts, args = commands[command].parseArgs(type, args[1:])
+ except GetoptError, e:
+ print str(e)
+ usage(scriptName, type, command)
+ sys.exit(2)
+ for option, value in opts:
+ if option in ('-h', '--help'):
+ usage(scriptName, type, command)
+ sys.exit()
+ commands[command](baseDir, scriptName, opts, args, type)
+ else:
+ print 'Command %s is not supported for this application type' % command
+ usage(scriptName, type)
else:
- print 'Command %s is not supported for this application type' % command
- usage(scriptName, type)
- else:
- print 'Command %s is unrecognized' % command
- usage(scriptName, type)
+ print 'Command %s is unrecognized' % command
+ usage(scriptName, type)
diff --git a/chainedconfigparser.py b/chainedconfigparser.py
index 6c4a92c..4d7ac92 100644
--- a/chainedconfigparser.py
+++ b/chainedconfigparser.py
@@ -9,164 +9,167 @@ import io
import ConfigParser
from StringIO import StringIO
+
class Item(tuple):
- def __new__(cls, name, value, source):
- result = super(Item, cls).__new__(cls, (name, value))
- result.source = source
- return result
+ def __new__(cls, name, value, source):
+ result = super(Item, cls).__new__(cls, (name, value))
+ result.source = source
+ return result
+
class DiffForUnknownOptionError(ConfigParser.Error):
- def __init__(self, option, section):
- ConfigParser.Error.__init__(self, 'Failed to apply diff for unknown option '
- '%r in section %r' % (option, section))
- self.option = option
- self.section = section
- self.args = (option, section)
+ def __init__(self, option, section):
+ ConfigParser.Error.__init__(self, 'Failed to apply diff for unknown option '
+ '%r in section %r' % (option, section))
+ self.option = option
+ self.section = section
+ self.args = (option, section)
+
class ChainedConfigParser(ConfigParser.SafeConfigParser):
- '''
- This class provides essentially the same interfaces as SafeConfigParser but
- allows chaining configuration files so that one config file provides the
- default values for the other. To specify the config file to inherit from
- a config file needs to contain the following option:
-
- [default]
- inherit = foo/bar.config
-
- It is also possible to add values to or remove values from
- whitespace-separated lists given by an inherited option:
-
- [section]
- opt1 += foo
- opt2 -= bar
-
- The value of the inherit option has to be a relative path with forward
- slashes as delimiters. Up to 5 configuration files can be chained this way,
- longer chains are disallowed to deal with circular references.
-
- As opposed to SafeConfigParser, files are decoded as UTF-8 while
- reading. Also, ChainedConfigParser data is read-only. An additional
- option_source(section, option) method is provided to get the path
- of the configuration file defining this option (for relative paths).
- Items returned by the items() function also have a source attribute
- serving the same purpose.
- '''
-
- def __init__(self):
- ConfigParser.SafeConfigParser.__init__(self)
- self._origin = {}
-
- def _make_parser(self, filename):
- parser = ConfigParser.SafeConfigParser()
- parser.optionxform = lambda option: option
-
- with io.open(filename, encoding='utf-8') as file:
- parser.readfp(file, filename)
-
- return parser
-
- def _get_parser_chain(self, parser, filename):
- parsers = [(parser, filename)]
-
- try:
- inherit = parser.get('default', 'inherit')
- except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
- return parsers
-
- dirname = os.path.dirname(filename)
- for parent in inherit.split():
- parent_filename = os.path.join(dirname, *parent.split('/'))
- parent_parser = self._make_parser(parent_filename)
- parsers[:0] = self._get_parser_chain(parent_parser, parent_filename)
-
- return parsers
-
- def _apply_diff(self, section, option, value):
- is_addition = option.endswith('+')
- is_diff = is_addition or option.endswith('-')
-
- if is_diff:
- option = option[:-1].rstrip()
- try:
- orig_value = self.get(section, option)
- except ConfigParser.NoOptionError:
- raise DiffForUnknownOptionError(option, section)
-
- orig_values = orig_value.split()
- diff_values = value.split()
-
- if is_addition:
- new_values = orig_values + [v for v in diff_values if v not in orig_values]
- else:
- new_values = [v for v in orig_values if v not in diff_values]
-
- value = ' '.join(new_values)
-
- return is_diff, option, value
-
- def _process_parsers(self, parsers):
- for parser, filename in parsers:
- for section in parser.sections():
- if not self.has_section(section):
- try:
- ConfigParser.SafeConfigParser.add_section(self, section)
- except ValueError:
- # add_section() hardcodes 'default' and raises a ValueError if
- # you try to add a section called like that (case insensitive).
- # This bug has been fixed in Python 3.
- ConfigParser.SafeConfigParser.readfp(self, StringIO('[%s]' % section))
-
- for option, value in parser.items(section):
- is_diff, option, value = self._apply_diff(section, option, value)
- ConfigParser.SafeConfigParser.set(self, section, option, value)
-
- if not is_diff:
- self._origin[(section, self.optionxform(option))] = filename
-
- def read(self, filenames):
- if isinstance(filenames, basestring):
- filenames = [filenames]
-
- read_ok = []
- for filename in filenames:
- try:
- parser = self._make_parser(filename)
- except IOError:
- continue
- self._process_parsers(self._get_parser_chain(parser, filename))
- read_ok.append(filename)
-
- return read_ok
-
- def items(self, section, *args, **kwargs):
- items = []
- for option, value in ConfigParser.SafeConfigParser.items(self, section, *args, **kwargs):
- items.append(Item(
- option, value,
- self._origin[(section, self.optionxform(option))]
- ))
- return items
-
- def option_source(self, section, option):
- option = self.optionxform(option)
- try:
- return self._origin[(section, option)]
- except KeyError:
- if not self.has_section(section):
- raise ConfigParser.NoSectionError(section)
- raise ConfigParser.NoOptionError(option, section)
-
- def readfp(self, fp, filename=None):
- raise NotImplementedError
-
- def set(self, section, option, value=None):
- raise NotImplementedError
-
- def add_section(self, section):
- raise NotImplementedError
-
- def remove_option(self, section, option):
- raise NotImplementedError
-
- def remove_section(self, section):
- raise NotImplementedError
+ '''
+ This class provides essentially the same interfaces as SafeConfigParser but
+ allows chaining configuration files so that one config file provides the
+ default values for the other. To specify the config file to inherit from
+ a config file needs to contain the following option:
+
+ [default]
+ inherit = foo/bar.config
+
+ It is also possible to add values to or remove values from
+ whitespace-separated lists given by an inherited option:
+
+ [section]
+ opt1 += foo
+ opt2 -= bar
+
+ The value of the inherit option has to be a relative path with forward
+ slashes as delimiters. Up to 5 configuration files can be chained this way,
+ longer chains are disallowed to deal with circular references.
+
+ As opposed to SafeConfigParser, files are decoded as UTF-8 while
+ reading. Also, ChainedConfigParser data is read-only. An additional
+ option_source(section, option) method is provided to get the path
+ of the configuration file defining this option (for relative paths).
+ Items returned by the items() function also have a source attribute
+ serving the same purpose.
+ '''
+
+ def __init__(self):
+ ConfigParser.SafeConfigParser.__init__(self)
+ self._origin = {}
+
+ def _make_parser(self, filename):
+ parser = ConfigParser.SafeConfigParser()
+ parser.optionxform = lambda option: option
+
+ with io.open(filename, encoding='utf-8') as file:
+ parser.readfp(file, filename)
+
+ return parser
+
+ def _get_parser_chain(self, parser, filename):
+ parsers = [(parser, filename)]
+
+ try:
+ inherit = parser.get('default', 'inherit')
+ except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
+ return parsers
+
+ dirname = os.path.dirname(filename)
+ for parent in inherit.split():
+ parent_filename = os.path.join(dirname, *parent.split('/'))
+ parent_parser = self._make_parser(parent_filename)
+ parsers[:0] = self._get_parser_chain(parent_parser, parent_filename)
+
+ return parsers
+
+ def _apply_diff(self, section, option, value):
+ is_addition = option.endswith('+')
+ is_diff = is_addition or option.endswith('-')
+
+ if is_diff:
+ option = option[:-1].rstrip()
+ try:
+ orig_value = self.get(section, option)
+ except ConfigParser.NoOptionError:
+ raise DiffForUnknownOptionError(option, section)
+
+ orig_values = orig_value.split()
+ diff_values = value.split()
+
+ if is_addition:
+ new_values = orig_values + [v for v in diff_values if v not in orig_values]
+ else:
+ new_values = [v for v in orig_values if v not in diff_values]
+
+ value = ' '.join(new_values)
+
+ return is_diff, option, value
+
+ def _process_parsers(self, parsers):
+ for parser, filename in parsers:
+ for section in parser.sections():
+ if not self.has_section(section):
+ try:
+ ConfigParser.SafeConfigParser.add_section(self, section)
+ except ValueError:
+ # add_section() hardcodes 'default' and raises a ValueError if
+ # you try to add a section called like that (case insensitive).
+ # This bug has been fixed in Python 3.
+ ConfigParser.SafeConfigParser.readfp(self, StringIO('[%s]' % section))
+
+ for option, value in parser.items(section):
+ is_diff, option, value = self._apply_diff(section, option, value)
+ ConfigParser.SafeConfigParser.set(self, section, option, value)
+
+ if not is_diff:
+ self._origin[(section, self.optionxform(option))] = filename
+
+ def read(self, filenames):
+ if isinstance(filenames, basestring):
+ filenames = [filenames]
+
+ read_ok = []
+ for filename in filenames:
+ try:
+ parser = self._make_parser(filename)
+ except IOError:
+ continue
+ self._process_parsers(self._get_parser_chain(parser, filename))
+ read_ok.append(filename)
+
+ return read_ok
+
+ def items(self, section, *args, **kwargs):
+ items = []
+ for option, value in ConfigParser.SafeConfigParser.items(self, section, *args, **kwargs):
+ items.append(Item(
+ option, value,
+ self._origin[(section, self.optionxform(option))]
+ ))
+ return items
+
+ def option_source(self, section, option):
+ option = self.optionxform(option)
+ try:
+ return self._origin[(section, option)]
+ except KeyError:
+ if not self.has_section(section):
+ raise ConfigParser.NoSectionError(section)
+ raise ConfigParser.NoOptionError(option, section)
+
+ def readfp(self, fp, filename=None):
+ raise NotImplementedError
+
+ def set(self, section, option, value=None):
+ raise NotImplementedError
+
+ def add_section(self, section):
+ raise NotImplementedError
+
+ def remove_option(self, section, option):
+ raise NotImplementedError
+
+ def remove_section(self, section):
+ raise NotImplementedError
diff --git a/ensure_dependencies.py b/ensure_dependencies.py
index 9514371..a030bd1 100755
--- a/ensure_dependencies.py
+++ b/ensure_dependencies.py
@@ -41,331 +41,342 @@ A dependencies file should look like this:
"""
SKIP_DEPENDENCY_UPDATES = os.environ.get(
- "SKIP_DEPENDENCY_UPDATES", ""
+ "SKIP_DEPENDENCY_UPDATES", ""
).lower() not in ("", "0", "false")
+
class Mercurial():
- def istype(self, repodir):
- return os.path.exists(os.path.join(repodir, ".hg"))
+ def istype(self, repodir):
+ return os.path.exists(os.path.join(repodir, ".hg"))
+
+ def clone(self, source, target):
+ if not source.endswith("/"):
+ source += "/"
+ subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, target])
- def clone(self, source, target):
- if not source.endswith("/"):
- source += "/"
- subprocess.check_call(["hg", "clone", "--quiet", "--noupdate", source, target])
+ def get_revision_id(self, repo, rev=None):
+ command = ["hg", "id", "--repository", repo, "--id"]
+ if rev:
+ command.extend(["--rev", rev])
- def get_revision_id(self, repo, rev=None):
- command = ["hg", "id", "--repository", repo, "--id"]
- if rev:
- command.extend(["--rev", rev])
+ # Ignore stderr output and return code here: if revision lookup failed we
+ # should simply return an empty string.
+ result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0]
+ return result.strip()
- # Ignore stderr output and return code here: if revision lookup failed we
- # should simply return an empty string.
- result = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0]
- return result.strip()
+ def pull(self, repo):
+ subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"])
- def pull(self, repo):
- subprocess.check_call(["hg", "pull", "--repository", repo, "--quiet"])
+ def update(self, repo, rev, revname):
+ subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev])
- def update(self, repo, rev, revname):
- subprocess.check_call(["hg", "update", "--repository", repo, "--quiet", "--check", "--rev", rev])
+ def ignore(self, target, repo):
- def ignore(self, target, repo):
+ if not self.istype(target):
- if not self.istype(target):
+ config_path = os.path.join(repo, ".hg", "hgrc")
+ ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies"))
- config_path = os.path.join(repo, ".hg", "hgrc")
- ignore_path = os.path.abspath(os.path.join(repo, ".hg", "dependencies"))
+ config = RawConfigParser()
+ config.read(config_path)
- config = RawConfigParser()
- config.read(config_path)
+ if not config.has_section("ui"):
+ config.add_section("ui")
- if not config.has_section("ui"):
- config.add_section("ui")
+ config.set("ui", "ignore.dependencies", ignore_path)
+ with open(config_path, "w") as stream:
+ config.write(stream)
- config.set("ui", "ignore.dependencies", ignore_path)
- with open(config_path, "w") as stream:
- config.write(stream)
+ module = os.path.relpath(target, repo)
+ _ensure_line_exists(ignore_path, module)
- module = os.path.relpath(target, repo)
- _ensure_line_exists(ignore_path, module)
+ def postprocess_url(self, url):
+ return url
- def postprocess_url(self, url):
- return url
class Git():
- def istype(self, repodir):
- return os.path.exists(os.path.join(repodir, ".git"))
-
- def clone(self, source, target):
- source = source.rstrip("/")
- if not source.endswith(".git"):
- source += ".git"
- subprocess.check_call(["git", "clone", "--quiet", source, target])
-
- def get_revision_id(self, repo, rev="HEAD"):
- command = ["git", "rev-parse", "--revs-only", rev + '^{commit}']
- return subprocess.check_output(command, cwd=repo).strip()
-
- def pull(self, repo):
- # Fetch tracked branches, new tags and the list of available remote branches
- subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=repo)
- # Next we need to ensure all remote branches are tracked
- newly_tracked = False
- remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo)
- for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
- remote, local = match.groups()
- with open(os.devnull, "wb") as devnull:
- if subprocess.call(["git", "branch", "--track", local, remote],
- cwd=repo, stdout=devnull, stderr=devnull) == 0:
- newly_tracked = True
- # Finally fetch any newly tracked remote branches
- if newly_tracked:
- subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo)
-
- def update(self, repo, rev, revname):
- subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo)
-
- def ignore(self, target, repo):
- module = os.path.sep + os.path.relpath(target, repo)
- exclude_file = os.path.join(repo, ".git", "info", "exclude")
- _ensure_line_exists(exclude_file, module)
-
- def postprocess_url(self, url):
- # Handle alternative syntax of SSH URLS
- if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
- return "ssh://" + url.replace(":", "/", 1)
- return url
+ def istype(self, repodir):
+ return os.path.exists(os.path.join(repodir, ".git"))
+
+ def clone(self, source, target):
+ source = source.rstrip("/")
+ if not source.endswith(".git"):
+ source += ".git"
+ subprocess.check_call(["git", "clone", "--quiet", source, target])
+
+ def get_revision_id(self, repo, rev="HEAD"):
+ command = ["git", "rev-parse", "--revs-only", rev + '^{commit}']
+ return subprocess.check_output(command, cwd=repo).strip()
+
+ def pull(self, repo):
+ # Fetch tracked branches, new tags and the list of available remote branches
+ subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=repo)
+ # Next we need to ensure all remote branches are tracked
+ newly_tracked = False
+ remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo)
+ for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
+ remote, local = match.groups()
+ with open(os.devnull, "wb") as devnull:
+ if subprocess.call(["git", "branch", "--track", local, remote],
+ cwd=repo, stdout=devnull, stderr=devnull) == 0:
+ newly_tracked = True
+ # Finally fetch any newly tracked remote branches
+ if newly_tracked:
+ subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo)
+
+ def update(self, repo, rev, revname):
+ subprocess.check_call(["git", "checkout", "--quiet", revname], cwd=repo)
+
+ def ignore(self, target, repo):
+ module = os.path.sep + os.path.relpath(target, repo)
+ exclude_file = os.path.join(repo, ".git", "info", "exclude")
+ _ensure_line_exists(exclude_file, module)
+
+ def postprocess_url(self, url):
+ # Handle alternative syntax of SSH URLS
+ if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
+ return "ssh://" + url.replace(":", "/", 1)
+ return url
repo_types = OrderedDict((
- ("hg", Mercurial()),
- ("git", Git()),
+ ("hg", Mercurial()),
+ ("git", Git()),
))
# [vcs:]value
item_regexp = re.compile(
- "^(?:(" + "|".join(map(re.escape, repo_types.keys())) +"):)?"
- "(.+)$"
+ "^(?:(" + "|".join(map(re.escape, repo_types.keys())) + "):)?"
+ "(.+)$"
)
# [url@]rev
source_regexp = re.compile(
- "^(?:(.*)@)?"
- "(.+)$"
+ "^(?:(.*)@)?"
+ "(.+)$"
)
+
def merge_seqs(seq1, seq2):
- """Return a list of any truthy values from the suplied sequences
+ """Return a list of any truthy values from the suplied sequences
- (None, 2), (1,) => [1, 2]
- None, (1, 2) => [1, 2]
- (1, 2), (3, 4) => [3, 4]
- """
- return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
+ (None, 2), (1,) => [1, 2]
+ None, (1, 2) => [1, 2]
+ (1, 2), (3, 4) => [3, 4]
+ """
+ return map(lambda item1, item2: item2 or item1, seq1 or (), seq2 or ())
-def parse_spec(path, line):
- if "=" not in line:
- logging.warning("Invalid line in file %s: %s" % (path, line))
- return None, None
- key, value = line.split("=", 1)
- key = key.strip()
- items = value.split()
- if not len(items):
- logging.warning("No value specified for key %s in file %s" % (key, path))
- return key, None
+def parse_spec(path, line):
+ if "=" not in line:
+ logging.warning("Invalid line in file %s: %s" % (path, line))
+ return None, None
+
+ key, value = line.split("=", 1)
+ key = key.strip()
+ items = value.split()
+ if not len(items):
+ logging.warning("No value specified for key %s in file %s" % (key, path))
+ return key, None
+
+ result = OrderedDict()
+ is_dependency_field = not key.startswith("_")
+
+ for i, item in enumerate(items):
+ try:
+ vcs, value = re.search(item_regexp, item).groups()
+ vcs = vcs or "*"
+ if is_dependency_field:
+ if i == 0 and vcs == "*":
+ # In order to be backwards compatible we have to assume that the first
+ # source contains only a URL/path for the repo if it does not contain
+ # the VCS part
+ url_rev = (value, None)
+ else:
+ url_rev = re.search(source_regexp, value).groups()
+ result[vcs] = merge_seqs(result.get(vcs), url_rev)
+ else:
+ if vcs in result:
+ logging.warning("Ignoring duplicate value for type %r "
+ "(key %r in file %r)" % (vcs, key, path))
+ result[vcs] = value
+ except AttributeError:
+ logging.warning("Ignoring invalid item %r for type %r "
+ "(key %r in file %r)" % (item, vcs, key, path))
+ continue
+ return key, result
- result = OrderedDict()
- is_dependency_field = not key.startswith("_")
- for i, item in enumerate(items):
+def read_deps(repodir):
+ result = {}
+ deps_path = os.path.join(repodir, "dependencies")
try:
- vcs, value = re.search(item_regexp, item).groups()
- vcs = vcs or "*"
- if is_dependency_field:
- if i == 0 and vcs == "*":
- # In order to be backwards compatible we have to assume that the first
- # source contains only a URL/path for the repo if it does not contain
- # the VCS part
- url_rev = (value, None)
- else:
- url_rev = re.search(source_regexp, value).groups()
- result[vcs] = merge_seqs(result.get(vcs), url_rev)
- else:
- if vcs in result:
- logging.warning("Ignoring duplicate value for type %r "
- "(key %r in file %r)" % (vcs, key, path))
- result[vcs] = value
- except AttributeError:
- logging.warning("Ignoring invalid item %r for type %r "
- "(key %r in file %r)" % (item, vcs, key, path))
- continue
- return key, result
+ with io.open(deps_path, "rt", encoding="utf-8") as handle:
+ for line in handle:
+ # Remove comments and whitespace
+ line = re.sub(r"#.*", "", line).strip()
+ if not line:
+ continue
+
+ key, spec = parse_spec(deps_path, line)
+ if spec:
+ result[key] = spec
+ return result
+ except IOError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ return None
-def read_deps(repodir):
- result = {}
- deps_path = os.path.join(repodir, "dependencies")
- try:
- with io.open(deps_path, "rt", encoding="utf-8") as handle:
- for line in handle:
- # Remove comments and whitespace
- line = re.sub(r"#.*", "", line).strip()
- if not line:
- continue
-
- key, spec = parse_spec(deps_path, line)
- if spec:
- result[key] = spec
- return result
- except IOError, e:
- if e.errno != errno.ENOENT:
- raise
- return None
def safe_join(path, subpath):
- # This has been inspired by Flask's safe_join() function
- forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
- if any(sep in subpath for sep in forbidden):
- raise Exception("Illegal directory separator in dependency path %s" % subpath)
-
- normpath = posixpath.normpath(subpath)
- if posixpath.isabs(normpath):
- raise Exception("Dependency path %s cannot be absolute" % subpath)
- if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posixpath.sep):
- raise Exception("Dependency path %s has to be inside the repository" % subpath)
- return os.path.join(path, *normpath.split(posixpath.sep))
+ # This has been inspired by Flask's safe_join() function
+ forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
+ if any(sep in subpath for sep in forbidden):
+ raise Exception("Illegal directory separator in dependency path %s" % subpath)
+
+ normpath = posixpath.normpath(subpath)
+ if posixpath.isabs(normpath):
+ raise Exception("Dependency path %s cannot be absolute" % subpath)
+ if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posixpath.sep):
+ raise Exception("Dependency path %s has to be inside the repository" % subpath)
+ return os.path.join(path, *normpath.split(posixpath.sep))
+
def get_repo_type(repo):
- for name, repotype in repo_types.iteritems():
- if repotype.istype(repo):
- return name
- return "hg"
+ for name, repotype in repo_types.iteritems():
+ if repotype.istype(repo):
+ return name
+ return "hg"
+
def ensure_repo(parentrepo, parenttype, target, type, root, sourcename):
- if os.path.exists(target):
- return
+ if os.path.exists(target):
+ return
- if SKIP_DEPENDENCY_UPDATES:
- logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
- "%s not cloned", target)
- return
+ if SKIP_DEPENDENCY_UPDATES:
+ logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
+ "%s not cloned", target)
+ return
- postprocess_url = repo_types[type].postprocess_url
- root = postprocess_url(root)
- sourcename = postprocess_url(sourcename)
+ postprocess_url = repo_types[type].postprocess_url
+ root = postprocess_url(root)
+ sourcename = postprocess_url(sourcename)
- if os.path.exists(root):
- url = os.path.join(root, sourcename)
- else:
- url = urlparse.urljoin(root, sourcename)
+ if os.path.exists(root):
+ url = os.path.join(root, sourcename)
+ else:
+ url = urlparse.urljoin(root, sourcename)
+
+ logging.info("Cloning repository %s into %s" % (url, target))
+ repo_types[type].clone(url, target)
+ repo_types[parenttype].ignore(target, parentrepo)
- logging.info("Cloning repository %s into %s" % (url, target))
- repo_types[type].clone(url, target)
- repo_types[parenttype].ignore(target, parentrepo)
def update_repo(target, type, revision):
- resolved_revision = repo_types[type].get_revision_id(target, revision)
- current_revision = repo_types[type].get_revision_id(target)
+ resolved_revision = repo_types[type].get_revision_id(target, revision)
+ current_revision = repo_types[type].get_revision_id(target)
- if resolved_revision != current_revision:
- if SKIP_DEPENDENCY_UPDATES:
- logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
- "%s not checked out to %s", target, revision)
- return
+ if resolved_revision != current_revision:
+ if SKIP_DEPENDENCY_UPDATES:
+ logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
+ "%s not checked out to %s", target, revision)
+ return
- if not resolved_revision:
- logging.info("Revision %s is unknown, downloading remote changes" % revision)
- repo_types[type].pull(target)
- resolved_revision = repo_types[type].get_revision_id(target, revision)
- if not resolved_revision:
- raise Exception("Failed to resolve revision %s" % revision)
+ if not resolved_revision:
+ logging.info("Revision %s is unknown, downloading remote changes" % revision)
+ repo_types[type].pull(target)
+ resolved_revision = repo_types[type].get_revision_id(target, revision)
+ if not resolved_revision:
+ raise Exception("Failed to resolve revision %s" % revision)
+
+ logging.info("Updating repository %s to revision %s" % (target, resolved_revision))
+ repo_types[type].update(target, resolved_revision, revision)
- logging.info("Updating repository %s to revision %s" % (target, resolved_revision))
- repo_types[type].update(target, resolved_revision, revision)
def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdependencies=set()):
- config = read_deps(repodir)
- if config is None:
- if level == 0:
- logging.warning("No dependencies file in directory %s, nothing to do...\n%s" % (repodir, USAGE))
- return
- if level >= 10:
- logging.warning("Too much subrepository nesting, ignoring %s" % repo)
- return
-
- if overrideroots is not None:
- config["_root"] = overrideroots
-
- for dir, sources in config.iteritems():
- if (dir.startswith("_") or
- skipdependencies.intersection([s[0] for s in sources if s[0]])):
- continue
-
- target = safe_join(repodir, dir)
- parenttype = get_repo_type(repodir)
- _root = config.get("_root", {})
-
- for key in sources.keys() + _root.keys():
- if key == parenttype or key is None and vcs != "*":
- vcs = key
- source, rev = merge_seqs(sources.get("*"), sources.get(vcs))
-
- if not (vcs and source and rev):
- logging.warning("No valid source / revision found to create %s" % target)
- continue
-
- ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source)
- update_repo(target, vcs, rev)
- resolve_deps(target, level + 1, self_update=False,
- overrideroots=overrideroots, skipdependencies=skipdependencies)
-
- if self_update and "_self" in config and "*" in config["_self"]:
- source = safe_join(repodir, config["_self"]["*"])
- try:
- with io.open(source, "rb") as handle:
- sourcedata = handle.read()
- except IOError, e:
- if e.errno != errno.ENOENT:
- raise
- logging.warning("File %s doesn't exist, skipping self-update" % source)
- return
-
- target = __file__
- with io.open(target, "rb") as handle:
- targetdata = handle.read()
-
- if sourcedata != targetdata:
- logging.info("Updating %s from %s, don't forget to commit" % (target, source))
- with io.open(target, "wb") as handle:
- handle.write(sourcedata)
- if __name__ == "__main__":
- logging.info("Restarting %s" % target)
- os.execv(sys.executable, [sys.executable, target] + sys.argv[1:])
- else:
- logging.warning("Cannot restart %s automatically, please rerun" % target)
+ config = read_deps(repodir)
+ if config is None:
+ if level == 0:
+ logging.warning("No dependencies file in directory %s, nothing to do...\n%s" % (repodir, USAGE))
+ return
+ if level >= 10:
+ logging.warning("Too much subrepository nesting, ignoring %s" % repo)
+ return
+
+ if overrideroots is not None:
+ config["_root"] = overrideroots
+
+ for dir, sources in config.iteritems():
+ if (dir.startswith("_") or
+ skipdependencies.intersection([s[0] for s in sources if s[0]])):
+ continue
+
+ target = safe_join(repodir, dir)
+ parenttype = get_repo_type(repodir)
+ _root = config.get("_root", {})
+
+ for key in sources.keys() + _root.keys():
+ if key == parenttype or key is None and vcs != "*":
+ vcs = key
+ source, rev = merge_seqs(sources.get("*"), sources.get(vcs))
+
+ if not (vcs and source and rev):
+ logging.warning("No valid source / revision found to create %s" % target)
+ continue
+
+ ensure_repo(repodir, parenttype, target, vcs, _root.get(vcs, ""), source)
+ update_repo(target, vcs, rev)
+ resolve_deps(target, level + 1, self_update=False,
+ overrideroots=overrideroots, skipdependencies=skipdependencies)
+
+ if self_update and "_self" in config and "*" in config["_self"]:
+ source = safe_join(repodir, config["_self"]["*"])
+ try:
+ with io.open(source, "rb") as handle:
+ sourcedata = handle.read()
+ except IOError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ logging.warning("File %s doesn't exist, skipping self-update" % source)
+ return
+
+ target = __file__
+ with io.open(target, "rb") as handle:
+ targetdata = handle.read()
+
+ if sourcedata != targetdata:
+ logging.info("Updating %s from %s, don't forget to commit" % (target, source))
+ with io.open(target, "wb") as handle:
+ handle.write(sourcedata)
+ if __name__ == "__main__":
+ logging.info("Restarting %s" % target)
+ os.execv(sys.executable, [sys.executable, target] + sys.argv[1:])
+ else:
+ logging.warning("Cannot restart %s automatically, please rerun" % target)
+
def _ensure_line_exists(path, pattern):
- with open(path, 'a+') as f:
- file_content = [l.strip() for l in f.readlines()]
- if not pattern in file_content:
- file_content.append(pattern)
- f.seek(0, os.SEEK_SET)
- f.truncate()
- for l in file_content:
- print >>f, l
+ with open(path, 'a+') as f:
+ file_content = [l.strip() for l in f.readlines()]
+ if not pattern in file_content:
+ file_content.append(pattern)
+ f.seek(0, os.SEEK_SET)
+ f.truncate()
+ for l in file_content:
+ print >>f, l
if __name__ == "__main__":
- logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
+ logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
- parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.")
- parser.add_argument("repos", metavar="repository", type=str, nargs="*", help="Repository path")
- parser.add_argument("-q", "--quiet", action="store_true", help="Suppress informational output")
- args = parser.parse_args()
+ parser = argparse.ArgumentParser(description="Verify dependencies for a set of repositories, by default the repository of this script.")
+ parser.add_argument("repos", metavar="repository", type=str, nargs="*", help="Repository path")
+ parser.add_argument("-q", "--quiet", action="store_true", help="Suppress informational output")
+ args = parser.parse_args()
- if args.quiet:
- logging.disable(logging.INFO)
+ if args.quiet:
+ logging.disable(logging.INFO)
- repos = args.repos
- if not len(repos):
- repos = [os.path.dirname(__file__)]
- for repo in repos:
- resolve_deps(repo)
+ repos = args.repos
+ if not len(repos):
+ repos = [os.path.dirname(__file__)]
+ for repo in repos:
+ resolve_deps(repo)
diff --git a/localeTools.py b/localeTools.py
index 93ab16c..06481cc 100644
--- a/localeTools.py
+++ b/localeTools.py
@@ -4,460 +4,490 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-import re, os, sys, codecs, json, urllib, urllib2
+import re
+import os
+import sys
+import codecs
+import json
+import urllib
+import urllib2
from StringIO import StringIO
from ConfigParser import SafeConfigParser
from zipfile import ZipFile
from xml.parsers.expat import ParserCreate, XML_PARAM_ENTITY_PARSING_ALWAYS
langMappingGecko = {
- 'bn-BD': 'bn',
- 'br': 'br-FR',
- 'dsb': 'dsb-DE',
- 'fj-FJ': 'fj',
- 'hsb': 'hsb-DE',
- 'hi-IN': 'hi',
- 'ml': 'ml-IN',
- 'nb-NO': 'nb',
- 'rm': 'rm-CH',
- 'ta-LK': 'ta',
- 'wo-SN': 'wo',
+ 'bn-BD': 'bn',
+ 'br': 'br-FR',
+ 'dsb': 'dsb-DE',
+ 'fj-FJ': 'fj',
+ 'hsb': 'hsb-DE',
+ 'hi-IN': 'hi',
+ 'ml': 'ml-IN',
+ 'nb-NO': 'nb',
+ 'rm': 'rm-CH',
+ 'ta-LK': 'ta',
+ 'wo-SN': 'wo',
}
langMappingChrome = {
- 'es-419': 'es-MX',
- 'es': 'es-ES',
- 'sv': 'sv-SE',
- 'ml': 'ml-IN',
- 'gu': 'gu-IN',
+ 'es-419': 'es-MX',
+ 'es': 'es-ES',
+ 'sv': 'sv-SE',
+ 'ml': 'ml-IN',
+ 'gu': 'gu-IN',
}
chromeLocales = [
- "am",
- "ar",
- "bg",
- "bn",
- "ca",
- "cs",
- "da",
- "de",
- "el",
- "en-GB",
- "en-US",
- "es-419",
- "es",
- "et",
- "fa",
- "fi",
- "fil",
- "fr",
- "gu",
- "he",
- "hi",
- "hr",
- "hu",
- "id",
- "it",
- "ja",
- "kn",
- "ko",
- "lt",
- "lv",
- "ml",
- "mr",
- "ms",
- "nb",
- "nl",
- "pl",
- "pt-BR",
- "pt-PT",
- "ro",
- "ru",
- "sk",
- "sl",
- "sr",
- "sv",
- "sw",
- "ta",
- "te",
- "th",
- "tr",
- "uk",
- "vi",
- "zh-CN",
- "zh-TW",
+ "am",
+ "ar",
+ "bg",
+ "bn",
+ "ca",
+ "cs",
+ "da",
+ "de",
+ "el",
+ "en-GB",
+ "en-US",
+ "es-419",
+ "es",
+ "et",
+ "fa",
+ "fi",
+ "fil",
+ "fr",
+ "gu",
+ "he",
+ "hi",
+ "hr",
+ "hu",
+ "id",
+ "it",
+ "ja",
+ "kn",
+ "ko",
+ "lt",
+ "lv",
+ "ml",
+ "mr",
+ "ms",
+ "nb",
+ "nl",
+ "pl",
+ "pt-BR",
+ "pt-PT",
+ "ro",
+ "ru",
+ "sk",
+ "sl",
+ "sr",
+ "sv",
+ "sw",
+ "ta",
+ "te",
+ "th",
+ "tr",
+ "uk",
+ "vi",
+ "zh-CN",
+ "zh-TW",
]
+
class OrderedDict(dict):
- def __init__(self):
- self.__order = []
- def __setitem__(self, key, value):
- self.__order.append(key)
- dict.__setitem__(self, key, value)
- def iteritems(self):
- done = set()
- for key in self.__order:
- if not key in done and key in self:
- yield (key, self[key])
- done.add(key)
+ def __init__(self):
+ self.__order = []
+
+ def __setitem__(self, key, value):
+ self.__order.append(key)
+ dict.__setitem__(self, key, value)
+
+ def iteritems(self):
+ done = set()
+ for key in self.__order:
+ if not key in done and key in self:
+ yield (key, self[key])
+ done.add(key)
+
def escapeEntity(value):
- return value.replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"')
+ return value.replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"')
+
def unescapeEntity(value):
- return value.replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"')
+ return value.replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"')
+
def mapLocale(type, locale):
- mapping = langMappingChrome if type == 'ISO-15897' else langMappingGecko
- return mapping.get(locale, locale)
+ mapping = langMappingChrome if type == 'ISO-15897' else langMappingGecko
+ return mapping.get(locale, locale)
+
def parseDTDString(data, path):
- result = []
- currentComment = [None]
+ result = []
+ currentComment = [None]
+
+ parser = ParserCreate()
+ parser.UseForeignDTD(True)
+ parser.SetParamEntityParsing(XML_PARAM_ENTITY_PARSING_ALWAYS)
- parser = ParserCreate()
- parser.UseForeignDTD(True)
- parser.SetParamEntityParsing(XML_PARAM_ENTITY_PARSING_ALWAYS)
+ def ExternalEntityRefHandler(context, base, systemId, publicId):
+ subparser = parser.ExternalEntityParserCreate(context, 'utf-8')
+ subparser.Parse(data.encode('utf-8'), True)
+ return 1
- def ExternalEntityRefHandler(context, base, systemId, publicId):
- subparser = parser.ExternalEntityParserCreate(context, 'utf-8')
- subparser.Parse(data.encode('utf-8'), True)
- return 1
+ def CommentHandler(data):
+ currentComment[0] = data.strip()
- def CommentHandler(data):
- currentComment[0] = data.strip()
+ def EntityDeclHandler(entityName, is_parameter_entity, value, base, systemId, publicId, notationName):
+ result.append((unescapeEntity(entityName), currentComment[0], unescapeEntity(value.strip())))
+ currentComment[0] = None
- def EntityDeclHandler(entityName, is_parameter_entity, value, base, systemId, publicId, notationName):
- result.append((unescapeEntity(entityName), currentComment[0], unescapeEntity(value.strip())))
- currentComment[0] = None
+ parser.ExternalEntityRefHandler = ExternalEntityRefHandler
+ parser.CommentHandler = CommentHandler
+ parser.EntityDeclHandler = EntityDeclHandler
+ parser.Parse('<!DOCTYPE root SYSTEM "foo"><root/>', True)
- parser.ExternalEntityRefHandler = ExternalEntityRefHandler
- parser.CommentHandler = CommentHandler
- parser.EntityDeclHandler = EntityDeclHandler
- parser.Parse('<!DOCTYPE root SYSTEM "foo"><root/>', True)
+ for entry in result:
+ yield entry
- for entry in result:
- yield entry
def escapeProperty(value):
- return value.replace('\n', '\\n')
+ return value.replace('\n', '\\n')
+
def unescapeProperty(value):
- return value.replace('\\n', '\n')
+ return value.replace('\\n', '\n')
+
def parsePropertiesString(data, path):
- currentComment = None
- for line in data.splitlines():
- match = re.search(r'^\s*[#!]\s*(.*)', line)
- if match:
- currentComment = match.group(1)
- elif '=' in line:
- key, value = line.split('=', 1)
- yield (unescapeProperty(key), currentComment, unescapeProperty(value))
- currentComment = None
- elif re.search(r'\S', line):
- print >>sys.stderr, 'Unrecognized data in file %s: %s' % (path, line)
+ currentComment = None
+ for line in data.splitlines():
+ match = re.search(r'^\s*[#!]\s*(.*)', line)
+ if match:
+ currentComment = match.group(1)
+ elif '=' in line:
+ key, value = line.split('=', 1)
+ yield (unescapeProperty(key), currentComment, unescapeProperty(value))
+ currentComment = None
+ elif re.search(r'\S', line):
+ print >>sys.stderr, 'Unrecognized data in file %s: %s' % (path, line)
+
def parseString(data, path):
- result = {'_origData': data}
- if path.endswith('.dtd'):
- it = parseDTDString(data, path)
- elif path.endswith('.properties'):
- it = parsePropertiesString(data, path)
- else:
- return None
-
- for name, comment, value in it:
- result[name] = value
- return result
+ result = {'_origData': data}
+ if path.endswith('.dtd'):
+ it = parseDTDString(data, path)
+ elif path.endswith('.properties'):
+ it = parsePropertiesString(data, path)
+ else:
+ return None
+
+ for name, comment, value in it:
+ result[name] = value
+ return result
+
def readFile(path):
- fileHandle = codecs.open(path, 'rb', encoding='utf-8')
- data = fileHandle.read()
- fileHandle.close()
- return parseString(data, path)
+ fileHandle = codecs.open(path, 'rb', encoding='utf-8')
+ data = fileHandle.read()
+ fileHandle.close()
+ return parseString(data, path)
+
def generateStringEntry(key, value, path):
- if path.endswith('.dtd'):
- return '<!ENTITY %s "%s">\n' % (escapeEntity(key), escapeEntity(value))
- else:
- return '%s=%s\n' % (escapeProperty(key), escapeProperty(value))
+ if path.endswith('.dtd'):
+ return '<!ENTITY %s "%s">\n' % (escapeEntity(key), escapeEntity(value))
+ else:
+ return '%s=%s\n' % (escapeProperty(key), escapeProperty(value))
+
def appendToFile(path, key, value):
- fileHandle = codecs.open(path, 'ab', encoding='utf-8')
- fileHandle.write(generateStringEntry(key, value, path))
- fileHandle.close()
+ fileHandle = codecs.open(path, 'ab', encoding='utf-8')
+ fileHandle.write(generateStringEntry(key, value, path))
+ fileHandle.close()
+
def removeFromFile(path, key):
- fileHandle = codecs.open(path, 'rb', encoding='utf-8')
- data = fileHandle.read()
- fileHandle.close()
+ fileHandle = codecs.open(path, 'rb', encoding='utf-8')
+ data = fileHandle.read()
+ fileHandle.close()
- if path.endswith('.dtd'):
- data = re.sub(r'<!ENTITY\s+%s\s+"[^"]*">\s*' % key, '', data, re.S)
- else:
- data = re.sub(r'(^|\n)%s=[^\n]*\n' % key, r'\1', data, re.S)
+ if path.endswith('.dtd'):
+ data = re.sub(r'<!ENTITY\s+%s\s+"[^"]*">\s*' % key, '', data, re.S)
+ else:
+ data = re.sub(r'(^|\n)%s=[^\n]*\n' % key, r'\1', data, re.S)
+
+ fileHandle = codecs.open(path, 'wb', encoding='utf-8')
+ fileHandle.write(data)
+ fileHandle.close()
- fileHandle = codecs.open(path, 'wb', encoding='utf-8')
- fileHandle.write(data)
- fileHandle.close()
def toJSON(path):
- fileHandle = codecs.open(path, 'rb', encoding='utf-8')
- data = fileHandle.read()
- fileHandle.close()
-
- if path.endswith('.dtd'):
- it = parseDTDString(data, path)
- elif path.endswith('.properties'):
- it = parsePropertiesString(data, path)
- else:
- return None
-
- result = OrderedDict()
- for name, comment, value in it:
- obj = {'message': value}
- if comment == None:
- obj['description'] = name
+ fileHandle = codecs.open(path, 'rb', encoding='utf-8')
+ data = fileHandle.read()
+ fileHandle.close()
+
+ if path.endswith('.dtd'):
+ it = parseDTDString(data, path)
+ elif path.endswith('.properties'):
+ it = parsePropertiesString(data, path)
else:
- obj['description'] = '%s: %s' % (name, comment)
- result[name] = obj
- return json.dumps(result, ensure_ascii=False, indent=2)
+ return None
+
+ result = OrderedDict()
+ for name, comment, value in it:
+ obj = {'message': value}
+ if comment == None:
+ obj['description'] = name
+ else:
+ obj['description'] = '%s: %s' % (name, comment)
+ result[name] = obj
+ return json.dumps(result, ensure_ascii=False, indent=2)
+
def fromJSON(path, data):
- data = json.loads(data)
- if not data:
- if os.path.exists(path):
- os.remove(path)
- return
-
- dir = os.path.dirname(path)
- if not os.path.exists(dir):
- os.makedirs(dir)
- file = codecs.open(path, 'wb', encoding='utf-8')
- for key, value in data.iteritems():
- file.write(generateStringEntry(key, value['message'], path))
- file.close()
+ data = json.loads(data)
+ if not data:
+ if os.path.exists(path):
+ os.remove(path)
+ return
+
+ dir = os.path.dirname(path)
+ if not os.path.exists(dir):
+ os.makedirs(dir)
+ file = codecs.open(path, 'wb', encoding='utf-8')
+ for key, value in data.iteritems():
+ file.write(generateStringEntry(key, value['message'], path))
+ file.close()
+
def preprocessChromeLocale(path, metadata, isMaster):
- fileHandle = codecs.open(path, 'rb', encoding='utf-8')
- data = json.load(fileHandle)
- fileHandle.close()
-
- for key, value in data.iteritems():
- if isMaster:
- # Make sure the key name is listed in the description
- if "description" in value:
- value["description"] = "%s: %s" % (key, value["description"])
- else:
- value["description"] = key
- else:
- # Delete description from translations
- if "description" in value:
- del value["description"]
+ fileHandle = codecs.open(path, 'rb', encoding='utf-8')
+ data = json.load(fileHandle)
+ fileHandle.close()
+
+ for key, value in data.iteritems():
+ if isMaster:
+ # Make sure the key name is listed in the description
+ if "description" in value:
+ value["description"] = "%s: %s" % (key, value["description"])
+ else:
+ value["description"] = key
+ else:
+ # Delete description from translations
+ if "description" in value:
+ del value["description"]
+
+ return json.dumps(data, ensure_ascii=False, sort_keys=True, indent=2)
- return json.dumps(data, ensure_ascii=False, sort_keys=True, indent=2)
def postprocessChromeLocale(path, data):
- parsed = json.loads(data)
- if isinstance(parsed, list):
- return
+ parsed = json.loads(data)
+ if isinstance(parsed, list):
+ return
+
+ # Delete description from translations
+ for key, value in parsed.iteritems():
+ if "description" in value:
+ del value["description"]
- # Delete description from translations
- for key, value in parsed.iteritems():
- if "description" in value:
- del value["description"]
+ file = codecs.open(path, 'wb', encoding='utf-8')
+ json.dump(parsed, file, ensure_ascii=False, sort_keys=True, indent=2, separators=(',', ': '))
+ file.close()
- file = codecs.open(path, 'wb', encoding='utf-8')
- json.dump(parsed, file, ensure_ascii=False, sort_keys=True, indent=2, separators=(',', ': '))
- file.close()
def setupTranslations(localeConfig, projectName, key):
- # Make a new set from the locales list, mapping to Crowdin friendly format
- locales = {mapLocale(localeConfig['name_format'], locale)
- for locale in localeConfig['locales']}
-
- # Fill up with locales that we don't have but the browser supports
- if 'chrome' in localeConfig['target_platforms']:
- for locale in chromeLocales:
- locales.add(mapLocale('ISO-15897', locale))
-
- if 'gecko' in localeConfig['target_platforms']:
- firefoxLocales = urllib2.urlopen('http://www.mozilla.org/en-US/firefox/all.html').read()
- for match in re.finditer(r'&lang=([\w\-]+)"', firefoxLocales):
- locales.add(mapLocale('BCP-47', match.group(1)))
- langPacks = urllib2.urlopen('https://addons.mozilla.org/en-US/firefox/language-tools/').read()
- for match in re.finditer(r'<tr>.*?</tr>', langPacks, re.S):
- if match.group(0).find('Install Language Pack') >= 0:
- match2 = re.search(r'lang="([\w\-]+)"', match.group(0))
- if match2:
- locales.add(mapLocale('BCP-47', match2.group(1)))
-
- allowed = set()
- allowedLocales = urllib2.urlopen('http://crowdin.net/page/language-codes').read()
- for match in re.finditer(r'<tr>\s*<td\b[^<>]*>([\w\-]+)</td>', allowedLocales, re.S):
- allowed.add(match.group(1))
- if not allowed.issuperset(locales):
- print 'Warning, following locales aren\'t allowed by server: ' + ', '.join(locales - allowed)
-
- locales = list(locales & allowed)
- locales.sort()
- params = urllib.urlencode([('languages[]', locale) for locale in locales])
- result = urllib2.urlopen('http://api.crowdin.net/api/project/%s/edit-project?key=%s' % (projectName, key), params).read()
- if result.find('<success') < 0:
- raise Exception('Server indicated that the operation was not successful\n' + result)
+ # Make a new set from the locales list, mapping to Crowdin friendly format
+ locales = {mapLocale(localeConfig['name_format'], locale)
+ for locale in localeConfig['locales']}
+
+ # Fill up with locales that we don't have but the browser supports
+ if 'chrome' in localeConfig['target_platforms']:
+ for locale in chromeLocales:
+ locales.add(mapLocale('ISO-15897', locale))
+
+ if 'gecko' in localeConfig['target_platforms']:
+ firefoxLocales = urllib2.urlopen('http://www.mozilla.org/en-US/firefox/all.html').read()
+ for match in re.finditer(r'&lang=([\w\-]+)"', firefoxLocales):
+ locales.add(mapLocale('BCP-47', match.group(1)))
+ langPacks = urllib2.urlopen('https://addons.mozilla.org/en-US/firefox/language-tools/').read()
+ for match in re.finditer(r'<tr>.*?</tr>', langPacks, re.S):
+ if match.group(0).find('Install Language Pack') >= 0:
+ match2 = re.search(r'lang="([\w\-]+)"', match.group(0))
+ if match2:
+ locales.add(mapLocale('BCP-47', match2.group(1)))
+
+ allowed = set()
+ allowedLocales = urllib2.urlopen('http://crowdin.net/page/language-codes').read()
+ for match in re.finditer(r'<tr>\s*<td\b[^<>]*>([\w\-]+)</td>', allowedLocales, re.S):
+ allowed.add(match.group(1))
+ if not allowed.issuperset(locales):
+ print 'Warning, following locales aren\'t allowed by server: ' + ', '.join(locales - allowed)
+
+ locales = list(locales & allowed)
+ locales.sort()
+ params = urllib.urlencode([('languages[]', locale) for locale in locales])
+ result = urllib2.urlopen('http://api.crowdin.net/api/project/%s/edit-project?key=%s' % (projectName, key), params).read()
+ if result.find('<success') < 0:
+ raise Exception('Server indicated that the operation was not successful\n' + result)
+
def postFiles(files, url):
- boundary = '----------ThIs_Is_tHe_bouNdaRY_$'
- body = ''
- for file, data in files:
- body += '--%s\r\n' % boundary
- body += 'Content-Disposition: form-data; name="files[%s]"; filename="%s"\r\n' % (file, file)
- body += 'Content-Type: application/octet-stream\r\n'
- body += 'Content-Transfer-Encoding: binary\r\n'
- body += '\r\n' + data + '\r\n'
- body += '--%s--\r\n' % boundary
-
- body = body.encode('utf-8')
- request = urllib2.Request(url, StringIO(body))
- request.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
- request.add_header('Content-Length', len(body))
- result = urllib2.urlopen(request).read()
- if result.find('<success') < 0:
- raise Exception('Server indicated that the operation was not successful\n' + result)
+ boundary = '----------ThIs_Is_tHe_bouNdaRY_$'
+ body = ''
+ for file, data in files:
+ body += '--%s\r\n' % boundary
+ body += 'Content-Disposition: form-data; name="files[%s]"; filename="%s"\r\n' % (file, file)
+ body += 'Content-Type: application/octet-stream\r\n'
+ body += 'Content-Transfer-Encoding: binary\r\n'
+ body += '\r\n' + data + '\r\n'
+ body += '--%s--\r\n' % boundary
+
+ body = body.encode('utf-8')
+ request = urllib2.Request(url, StringIO(body))
+ request.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
+ request.add_header('Content-Length', len(body))
+ result = urllib2.urlopen(request).read()
+ if result.find('<success') < 0:
+ raise Exception('Server indicated that the operation was not successful\n' + result)
+
def updateTranslationMaster(localeConfig, metadata, dir, projectName, key):
- result = json.load(urllib2.urlopen('http://api.crowdin.net/api/project/%s/info?key=%s&json=1' % (projectName, key)))
-
- existing = set(map(lambda f: f['name'], result['files']))
- add = []
- update = []
- for file in os.listdir(dir):
- path = os.path.join(dir, file)
- if os.path.isfile(path):
- if localeConfig['file_format'] == 'chrome-json' and file.endswith('.json'):
- data = preprocessChromeLocale(path, metadata, True)
- newName = file
- elif localeConfig['file_format'] == 'chrome-json':
- fileHandle = codecs.open(path, 'rb', encoding='utf-8')
- data = json.dumps({file: {'message': fileHandle.read()}})
- fileHandle.close()
- newName = file + '.json'
- else:
- data = toJSON(path)
- newName = file + '.json'
-
- if data:
- if newName in existing:
- update.append((newName, data))
- existing.remove(newName)
- else:
- add.append((newName, data))
-
- if len(add):
- titles = urllib.urlencode([('titles[%s]' % name, re.sub(r'\.json', '', name)) for name, data in add])
- postFiles(add, 'http://api.crowdin.net/api/project/%s/add-file?key=%s&type=chrome&%s' % (projectName, key, titles))
- if len(update):
- postFiles(update, 'http://api.crowdin.net/api/project/%s/update-file?key=%s' % (projectName, key))
- for file in existing:
- result = urllib2.urlopen('http://api.crowdin.net/api/project/%s/delete-file?key=%s&file=%s' % (projectName, key, file)).read()
- if result.find('<success') < 0:
- raise Exception('Server indicated that the operation was not successful\n' + result)
+ result = json.load(urllib2.urlopen('http://api.crowdin.net/api/project/%s/info?key=%s&json=1' % (projectName, key)))
+
+ existing = set(map(lambda f: f['name'], result['files']))
+ add = []
+ update = []
+ for file in os.listdir(dir):
+ path = os.path.join(dir, file)
+ if os.path.isfile(path):
+ if localeConfig['file_format'] == 'chrome-json' and file.endswith('.json'):
+ data = preprocessChromeLocale(path, metadata, True)
+ newName = file
+ elif localeConfig['file_format'] == 'chrome-json':
+ fileHandle = codecs.open(path, 'rb', encoding='utf-8')
+ data = json.dumps({file: {'message': fileHandle.read()}})
+ fileHandle.close()
+ newName = file + '.json'
+ else:
+ data = toJSON(path)
+ newName = file + '.json'
+
+ if data:
+ if newName in existing:
+ update.append((newName, data))
+ existing.remove(newName)
+ else:
+ add.append((newName, data))
+
+ if len(add):
+ titles = urllib.urlencode([('titles[%s]' % name, re.sub(r'\.json', '', name)) for name, data in add])
+ postFiles(add, 'http://api.crowdin.net/api/project/%s/add-file?key=%s&type=chrome&%s' % (projectName, key, titles))
+ if len(update):
+ postFiles(update, 'http://api.crowdin.net/api/project/%s/update-file?key=%s' % (projectName, key))
+ for file in existing:
+ result = urllib2.urlopen('http://api.crowdin.net/api/project/%s/delete-file?key=%s&file=%s' % (projectName, key, file)).read()
+ if result.find('<success') < 0:
+ raise Exception('Server indicated that the operation was not successful\n' + result)
+
def uploadTranslations(localeConfig, metadata, dir, locale, projectName, key):
- files = []
- for file in os.listdir(dir):
- path = os.path.join(dir, file)
- if os.path.isfile(path):
- if localeConfig['file_format'] == 'chrome-json' and file.endswith('.json'):
- data = preprocessChromeLocale(path, metadata, False)
- newName = file
- elif localeConfig['file_format'] == 'chrome-json':
- fileHandle = codecs.open(path, 'rb', encoding='utf-8')
- data = json.dumps({file: {'message': fileHandle.read()}})
- fileHandle.close()
- newName = file + '.json'
- else:
- data = toJSON(path)
- newName = file + '.json'
-
- if data:
- files.append((newName, data))
- if len(files):
- postFiles(files, 'http://api.crowdin.net/api/project/%s/upload-translation?key=%s&language=%s' % (
- projectName, key, mapLocale(localeConfig['name_format'], locale))
- )
+ files = []
+ for file in os.listdir(dir):
+ path = os.path.join(dir, file)
+ if os.path.isfile(path):
+ if localeConfig['file_format'] == 'chrome-json' and file.endswith('.json'):
+ data = preprocessChromeLocale(path, metadata, False)
+ newName = file
+ elif localeConfig['file_format'] == 'chrome-json':
+ fileHandle = codecs.open(path, 'rb', encoding='utf-8')
+ data = json.dumps({file: {'message': fileHandle.read()}})
+ fileHandle.close()
+ newName = file + '.json'
+ else:
+ data = toJSON(path)
+ newName = file + '.json'
+
+ if data:
+ files.append((newName, data))
+ if len(files):
+ postFiles(files, 'http://api.crowdin.net/api/project/%s/upload-translation?key=%s&language=%s' % (
+ projectName, key, mapLocale(localeConfig['name_format'], locale))
+ )
+
def getTranslations(localeConfig, projectName, key):
- result = urllib2.urlopen('http://api.crowdin.net/api/project/%s/export?key=%s' % (projectName, key)).read()
- if result.find('<success') < 0:
- raise Exception('Server indicated that the operation was not successful\n' + result)
-
- result = urllib2.urlopen('http://api.crowdin.net/api/project/%s/download/all.zip?key=%s' % (projectName, key)).read()
- zip = ZipFile(StringIO(result))
- dirs = {}
-
- normalizedDefaultLocale = localeConfig['default_locale']
- if localeConfig['name_format'] == 'ISO-15897':
- normalizedDefaultLocale = normalizedDefaultLocale.replace('_', '-')
- normalizedDefaultLocale = mapLocale(localeConfig['name_format'],
- normalizedDefaultLocale)
-
- for info in zip.infolist():
- if not info.filename.endswith('.json'):
- continue
-
- dir, file = os.path.split(info.filename)
- if not re.match(r'^[\w\-]+$', dir) or dir == normalizedDefaultLocale:
- continue
- if localeConfig['file_format'] == 'chrome-json' and file.count('.') == 1:
- origFile = file
- else:
- origFile = re.sub(r'\.json$', '', file)
- if (localeConfig['file_format'] == 'gecko-dtd' and
- not origFile.endswith('.dtd') and
- not origFile.endswith('.properties')):
- continue
+ result = urllib2.urlopen('http://api.crowdin.net/api/project/%s/export?key=%s' % (projectName, key)).read()
+ if result.find('<success') < 0:
+ raise Exception('Server indicated that the operation was not successful\n' + result)
- if localeConfig['name_format'] == 'ISO-15897':
- mapping = langMappingChrome
- else:
- mapping = langMappingGecko
+ result = urllib2.urlopen('http://api.crowdin.net/api/project/%s/download/all.zip?key=%s' % (projectName, key)).read()
+ zip = ZipFile(StringIO(result))
+ dirs = {}
- for key, value in mapping.iteritems():
- if value == dir:
- dir = key
+ normalizedDefaultLocale = localeConfig['default_locale']
if localeConfig['name_format'] == 'ISO-15897':
- dir = dir.replace('-', '_')
-
- data = zip.open(info.filename).read()
- if data == '[]':
- continue
-
- if not dir in dirs:
- dirs[dir] = set()
- dirs[dir].add(origFile)
-
- path = os.path.join(localeConfig['base_path'], dir, origFile)
- if not os.path.exists(os.path.dirname(path)):
- os.makedirs(os.path.dirname(path))
- if localeConfig['file_format'] == 'chrome-json' and file.endswith('.json'):
- postprocessChromeLocale(path, data)
- elif localeConfig['file_format'] == 'chrome-json':
- data = json.loads(data)
- if origFile in data:
- fileHandle = codecs.open(path, 'wb', encoding='utf-8')
- fileHandle.write(data[origFile]['message'])
- fileHandle.close()
- else:
- fromJSON(path, data)
-
- # Remove any extra files
- for dir, files in dirs.iteritems():
- baseDir = os.path.join(localeConfig['base_path'], dir)
- if not os.path.exists(baseDir):
- continue
- for file in os.listdir(baseDir):
- path = os.path.join(baseDir, file)
- if os.path.isfile(path) and (file.endswith('.json') or file.endswith('.properties') or file.endswith('.dtd')) and not file in files:
- os.remove(path)
+ normalizedDefaultLocale = normalizedDefaultLocale.replace('_', '-')
+ normalizedDefaultLocale = mapLocale(localeConfig['name_format'],
+ normalizedDefaultLocale)
+
+ for info in zip.infolist():
+ if not info.filename.endswith('.json'):
+ continue
+
+ dir, file = os.path.split(info.filename)
+ if not re.match(r'^[\w\-]+$', dir) or dir == normalizedDefaultLocale:
+ continue
+ if localeConfig['file_format'] == 'chrome-json' and file.count('.') == 1:
+ origFile = file
+ else:
+ origFile = re.sub(r'\.json$', '', file)
+ if (localeConfig['file_format'] == 'gecko-dtd' and
+ not origFile.endswith('.dtd') and
+ not origFile.endswith('.properties')):
+ continue
+
+ if localeConfig['name_format'] == 'ISO-15897':
+ mapping = langMappingChrome
+ else:
+ mapping = langMappingGecko
+
+ for key, value in mapping.iteritems():
+ if value == dir:
+ dir = key
+ if localeConfig['name_format'] == 'ISO-15897':
+ dir = dir.replace('-', '_')
+
+ data = zip.open(info.filename).read()
+ if data == '[]':
+ continue
+
+ if not dir in dirs:
+ dirs[dir] = set()
+ dirs[dir].add(origFile)
+
+ path = os.path.join(localeConfig['base_path'], dir, origFile)
+ if not os.path.exists(os.path.dirname(path)):
+ os.makedirs(os.path.dirname(path))
+ if localeConfig['file_format'] == 'chrome-json' and file.endswith('.json'):
+ postprocessChromeLocale(path, data)
+ elif localeConfig['file_format'] == 'chrome-json':
+ data = json.loads(data)
+ if origFile in data:
+ fileHandle = codecs.open(path, 'wb', encoding='utf-8')
+ fileHandle.write(data[origFile]['message'])
+ fileHandle.close()
+ else:
+ fromJSON(path, data)
+
+ # Remove any extra files
+ for dir, files in dirs.iteritems():
+ baseDir = os.path.join(localeConfig['base_path'], dir)
+ if not os.path.exists(baseDir):
+ continue
+ for file in os.listdir(baseDir):
+ path = os.path.join(baseDir, file)
+ if os.path.isfile(path) and (file.endswith('.json') or file.endswith('.properties') or file.endswith('.dtd')) and not file in files:
+ os.remove(path)
diff --git a/packager.py b/packager.py
index 8a5765a..8e2c4ec 100644
--- a/packager.py
+++ b/packager.py
@@ -7,131 +7,145 @@
# Note: These are the base functions common to all packagers, the actual
# packagers are implemented in packagerGecko and packagerChrome.
-import sys, os, re, codecs, subprocess, json, zipfile
+import sys
+import os
+import re
+import codecs
+import subprocess
+import json
+import zipfile
from StringIO import StringIO
from chainedconfigparser import ChainedConfigParser
import buildtools
+
def getDefaultFileName(metadata, version, ext):
- return '%s-%s.%s' % (metadata.get('general', 'basename'), version, ext)
+ return '%s-%s.%s' % (metadata.get('general', 'basename'), version, ext)
+
def getMetadataPath(baseDir, type):
- return os.path.join(baseDir, 'metadata.%s' % type)
+ return os.path.join(baseDir, 'metadata.%s' % type)
+
def getDevEnvPath(baseDir, type):
- return os.path.join(baseDir, 'devenv.' + type)
+ return os.path.join(baseDir, 'devenv.' + type)
+
def readMetadata(baseDir, type):
- parser = ChainedConfigParser()
- parser.optionxform = lambda option: option
- parser.read(getMetadataPath(baseDir, type))
- return parser
+ parser = ChainedConfigParser()
+ parser.optionxform = lambda option: option
+ parser.read(getMetadataPath(baseDir, type))
+ return parser
+
def getBuildNum(baseDir):
- try:
- from buildtools.ensure_dependencies import Mercurial, Git
- if Mercurial().istype(baseDir):
- result = subprocess.check_output(['hg', 'id', '-R', baseDir, '-n'])
- return re.sub(r'\D', '', result)
- elif Git().istype(baseDir):
- result = subprocess.check_output(['git', 'rev-list', 'HEAD'], cwd=baseDir)
- return len(result.splitlines())
- except subprocess.CalledProcessError:
- pass
-
- return '0'
+ try:
+ from buildtools.ensure_dependencies import Mercurial, Git
+ if Mercurial().istype(baseDir):
+ result = subprocess.check_output(['hg', 'id', '-R', baseDir, '-n'])
+ return re.sub(r'\D', '', result)
+ elif Git().istype(baseDir):
+ result = subprocess.check_output(['git', 'rev-list', 'HEAD'], cwd=baseDir)
+ return len(result.splitlines())
+ except subprocess.CalledProcessError:
+ pass
+
+ return '0'
+
def getBuildVersion(baseDir, metadata, releaseBuild, buildNum=None):
- version = metadata.get('general', 'version')
- if not releaseBuild:
- if buildNum == None:
- buildNum = getBuildNum(baseDir)
- buildNum = str(buildNum)
- if len(buildNum) > 0:
- if re.search(r'(^|\.)\d+$', version):
- # Numerical version number - need to fill up with zeros to have three
- # version components.
- while version.count('.') < 2:
- version += '.0'
- version += '.' + buildNum
- return version
+ version = metadata.get('general', 'version')
+ if not releaseBuild:
+ if buildNum == None:
+ buildNum = getBuildNum(baseDir)
+ buildNum = str(buildNum)
+ if len(buildNum) > 0:
+ if re.search(r'(^|\.)\d+$', version):
+ # Numerical version number - need to fill up with zeros to have three
+ # version components.
+ while version.count('.') < 2:
+ version += '.0'
+ version += '.' + buildNum
+ return version
+
def getTemplate(template, autoEscape=False):
- import jinja2
+ import jinja2
+
+ templatePath = buildtools.__path__[0]
+ if autoEscape:
+ env = jinja2.Environment(loader=jinja2.FileSystemLoader(templatePath), autoescape=True)
+ else:
+ env = jinja2.Environment(loader=jinja2.FileSystemLoader(templatePath))
+ env.filters.update({'json': json.dumps})
+ return env.get_template(template)
- templatePath = buildtools.__path__[0]
- if autoEscape:
- env = jinja2.Environment(loader=jinja2.FileSystemLoader(templatePath), autoescape=True)
- else:
- env = jinja2.Environment(loader=jinja2.FileSystemLoader(templatePath))
- env.filters.update({'json': json.dumps})
- return env.get_template(template)
class Files(dict):
- def __init__(self, includedFiles, ignoredFiles, process=None):
- self.includedFiles = includedFiles
- self.ignoredFiles = ignoredFiles
- self.process = process
-
- def __setitem__(self, key, value):
- if self.process:
- value = self.process(key, value)
- dict.__setitem__(self, key, value)
-
- def isIncluded(self, relpath):
- parts = relpath.split('/')
- if not parts[0] in self.includedFiles:
- return False
- for part in parts:
- if part in self.ignoredFiles:
- return False
- return True
-
- def read(self, path, relpath='', skip=()):
- if os.path.isdir(path):
- for file in os.listdir(path):
- name = relpath + ('/' if relpath != '' else '') + file
- if name not in skip and self.isIncluded(name):
- self.read(os.path.join(path, file), name, skip)
- else:
- with open(path, 'rb') as file:
- if relpath in self:
- print >>sys.stderr, 'Warning: File %s defined multiple times' % relpath
- self[relpath] = file.read()
-
- def readMappedFiles(self, mappings):
- for item in mappings:
- target, source = item
-
- # Make sure the file is inside an included directory
- if '/' in target and not self.isIncluded(target):
- continue
- parts = source.split('/')
- path = os.path.join(os.path.dirname(item.source), *parts)
- if os.path.exists(path):
- self.read(path, target)
- else:
- print >>sys.stderr, 'Warning: Mapped file %s doesn\'t exist' % source
-
- def preprocess(self, filenames, params={}):
- import jinja2
- env = jinja2.Environment()
-
- for filename in filenames:
- env.autoescape = os.path.splitext(filename)[1].lower() in ('.html', '.xml')
- template = env.from_string(self[filename].decode('utf-8'))
- self[filename] = template.render(params).encode('utf-8')
-
- def zip(self, outFile, sortKey=None):
- zip = zipfile.ZipFile(outFile, 'w', zipfile.ZIP_DEFLATED)
- names = self.keys()
- names.sort(key=sortKey)
- for name in names:
- zip.writestr(name, self[name])
- zip.close()
-
- def zipToString(self, sortKey=None):
- buffer = StringIO()
- self.zip(buffer, sortKey=sortKey)
- return buffer.getvalue()
+ def __init__(self, includedFiles, ignoredFiles, process=None):
+ self.includedFiles = includedFiles
+ self.ignoredFiles = ignoredFiles
+ self.process = process
+
+ def __setitem__(self, key, value):
+ if self.process:
+ value = self.process(key, value)
+ dict.__setitem__(self, key, value)
+
+ def isIncluded(self, relpath):
+ parts = relpath.split('/')
+ if not parts[0] in self.includedFiles:
+ return False
+ for part in parts:
+ if part in self.ignoredFiles:
+ return False
+ return True
+
+ def read(self, path, relpath='', skip=()):
+ if os.path.isdir(path):
+ for file in os.listdir(path):
+ name = relpath + ('/' if relpath != '' else '') + file
+ if name not in skip and self.isIncluded(name):
+ self.read(os.path.join(path, file), name, skip)
+ else:
+ with open(path, 'rb') as file:
+ if relpath in self:
+ print >>sys.stderr, 'Warning: File %s defined multiple times' % relpath
+ self[relpath] = file.read()
+
+ def readMappedFiles(self, mappings):
+ for item in mappings:
+ target, source = item
+
+ # Make sure the file is inside an included directory
+ if '/' in target and not self.isIncluded(target):
+ continue
+ parts = source.split('/')
+ path = os.path.join(os.path.dirname(item.source), *parts)
+ if os.path.exists(path):
+ self.read(path, target)
+ else:
+ print >>sys.stderr, 'Warning: Mapped file %s doesn\'t exist' % source
+
+ def preprocess(self, filenames, params={}):
+ import jinja2
+ env = jinja2.Environment()
+
+ for filename in filenames:
+ env.autoescape = os.path.splitext(filename)[1].lower() in ('.html', '.xml')
+ template = env.from_string(self[filename].decode('utf-8'))
+ self[filename] = template.render(params).encode('utf-8')
+
+ def zip(self, outFile, sortKey=None):
+ zip = zipfile.ZipFile(outFile, 'w', zipfile.ZIP_DEFLATED)
+ names = self.keys()
+ names.sort(key=sortKey)
+ for name in names:
+ zip.writestr(name, self[name])
+ zip.close()
+
+ def zipToString(self, sortKey=None):
+ buffer = StringIO()
+ self.zip(buffer, sortKey=sortKey)
+ return buffer.getvalue()
diff --git a/packagerChrome.py b/packagerChrome.py
index 93a7e31..e4b19de 100644
--- a/packagerChrome.py
+++ b/packagerChrome.py
@@ -17,354 +17,370 @@ from packager import readMetadata, getMetadataPath, getDefaultFileName, getBuild
defaultLocale = 'en_US'
+
def getIgnoredFiles(params):
- return {'store.description'}
+ return {'store.description'}
+
def getPackageFiles(params):
- result = set(('_locales', 'icons', 'jquery-ui', 'lib', 'skin', 'ui', 'ext'))
+ result = set(('_locales', 'icons', 'jquery-ui', 'lib', 'skin', 'ui', 'ext'))
+
+ if params['devenv']:
+ result.add('qunit')
- if params['devenv']:
- result.add('qunit')
+ baseDir = params['baseDir']
+ for file in os.listdir(baseDir):
+ if file.endswith('.js') or file.endswith('.html') or file.endswith('.xml'):
+ result.add(file)
+ return result
- baseDir = params['baseDir']
- for file in os.listdir(baseDir):
- if file.endswith('.js') or file.endswith('.html') or file.endswith('.xml'):
- result.add(file)
- return result
def processFile(path, data, params):
- # We don't change anything yet, this function currently only exists here so
- # that it can be overridden if necessary.
- return data
+ # We don't change anything yet, this function currently only exists here so
+ # that it can be overridden if necessary.
+ return data
-def makeIcons(files, filenames):
- try:
- from PIL import Image
- except ImportError:
- import Image
- icons = {}
- for filename in filenames:
- width, height = Image.open(StringIO(files[filename])).size
- if(width != height):
- print >>sys.stderr, 'Warning: %s size is %ix%i, icon should be square' % (filename, width, height)
- icons[width] = filename
- return icons
-def createScriptPage(params, template_name, script_option):
- template = getTemplate(template_name, autoEscape=True)
- return template.render(
- basename=params['metadata'].get('general', 'basename'),
- scripts=params['metadata'].get(*script_option).split()
- ).encode('utf-8')
+def makeIcons(files, filenames):
+ try:
+ from PIL import Image
+ except ImportError:
+ import Image
+ icons = {}
+ for filename in filenames:
+ width, height = Image.open(StringIO(files[filename])).size
+ if(width != height):
+ print >>sys.stderr, 'Warning: %s size is %ix%i, icon should be square' % (filename, width, height)
+ icons[width] = filename
+ return icons
-def createManifest(params, files):
- template = getTemplate('manifest.json.tmpl')
- templateData = dict(params)
-
- baseDir = templateData['baseDir']
- metadata = templateData['metadata']
-
- for opt in ('browserAction', 'pageAction'):
- if not metadata.has_option('general', opt):
- continue
-
- icons = metadata.get('general', opt).split()
- if not icons:
- continue
-
- if len(icons) == 1:
- # ... = icon.png
- icon, popup = icons[0], None
- elif len(icons) == 2:
- # ... = icon.png popup.html
- icon, popup = icons
- else:
- # ... = icon-19.png icon-38.png popup.html
- popup = icons.pop()
- icon = makeIcons(files, icons)
- templateData[opt] = {'icon': icon, 'popup': popup}
+def createScriptPage(params, template_name, script_option):
+ template = getTemplate(template_name, autoEscape=True)
+ return template.render(
+ basename=params['metadata'].get('general', 'basename'),
+ scripts=params['metadata'].get(*script_option).split()
+ ).encode('utf-8')
- if metadata.has_option('general', 'icons'):
- templateData['icons'] = makeIcons(files,
- metadata.get('general', 'icons').split())
- if metadata.has_option('general', 'permissions'):
- templateData['permissions'] = metadata.get('general', 'permissions').split()
+def createManifest(params, files):
+ template = getTemplate('manifest.json.tmpl')
+ templateData = dict(params)
+
+ baseDir = templateData['baseDir']
+ metadata = templateData['metadata']
+
+ for opt in ('browserAction', 'pageAction'):
+ if not metadata.has_option('general', opt):
+ continue
+
+ icons = metadata.get('general', opt).split()
+ if not icons:
+ continue
+
+ if len(icons) == 1:
+ # ... = icon.png
+ icon, popup = icons[0], None
+ elif len(icons) == 2:
+ # ... = icon.png popup.html
+ icon, popup = icons
+ else:
+ # ... = icon-19.png icon-38.png popup.html
+ popup = icons.pop()
+ icon = makeIcons(files, icons)
+
+ templateData[opt] = {'icon': icon, 'popup': popup}
+
+ if metadata.has_option('general', 'icons'):
+ templateData['icons'] = makeIcons(files,
+ metadata.get('general', 'icons').split())
+
+ if metadata.has_option('general', 'permissions'):
+ templateData['permissions'] = metadata.get('general', 'permissions').split()
+
+ if metadata.has_option('general', 'optionalPermissions'):
+ templateData['optionalPermissions'] = metadata.get(
+ 'general', 'optionalPermissions').split()
+
+ if metadata.has_option('general', 'backgroundScripts'):
+ templateData['backgroundScripts'] = metadata.get(
+ 'general', 'backgroundScripts').split()
+ if params['devenv']:
+ templateData['backgroundScripts'].append('devenvPoller__.js')
+
+ if metadata.has_option('general', 'webAccessible') and metadata.get('general', 'webAccessible') != '':
+ templateData['webAccessible'] = metadata.get('general',
+ 'webAccessible').split()
+
+ if metadata.has_section('contentScripts'):
+ contentScripts = []
+ for run_at, scripts in metadata.items('contentScripts'):
+ if scripts == '':
+ continue
+ contentScripts.append({
+ 'matches': ['http://*/*', 'https://*/*'],
+ 'js': scripts.split(),
+ 'run_at': run_at,
+ 'all_frames': True,
+ 'match_about_blank': True,
+ })
+ templateData['contentScripts'] = contentScripts
+
+ manifest = template.render(templateData)
+
+ # Normalize JSON structure
+ licenseComment = re.compile(r'/\*.*?\*/', re.S)
+ data = json.loads(re.sub(licenseComment, '', manifest, 1))
+ if '_dummy' in data:
+ del data['_dummy']
+ manifest = json.dumps(data, sort_keys=True, indent=2)
+
+ return manifest.encode('utf-8')
- if metadata.has_option('general', 'optionalPermissions'):
- templateData['optionalPermissions'] = metadata.get(
- 'general', 'optionalPermissions').split()
-
- if metadata.has_option('general', 'backgroundScripts'):
- templateData['backgroundScripts'] = metadata.get(
- 'general', 'backgroundScripts').split()
- if params['devenv']:
- templateData['backgroundScripts'].append('devenvPoller__.js')
-
- if metadata.has_option('general', 'webAccessible') and metadata.get('general', 'webAccessible') != '':
- templateData['webAccessible'] = metadata.get('general',
- 'webAccessible').split()
-
- if metadata.has_section('contentScripts'):
- contentScripts = []
- for run_at, scripts in metadata.items('contentScripts'):
- if scripts == '':
- continue
- contentScripts.append({
- 'matches': ['http://*/*', 'https://*/*'],
- 'js': scripts.split(),
- 'run_at': run_at,
- 'all_frames': True,
- 'match_about_blank': True,
- })
- templateData['contentScripts'] = contentScripts
-
- manifest = template.render(templateData)
-
- # Normalize JSON structure
- licenseComment = re.compile(r'/\*.*?\*/', re.S)
- data = json.loads(re.sub(licenseComment, '', manifest, 1))
- if '_dummy' in data:
- del data['_dummy']
- manifest = json.dumps(data, sort_keys=True, indent=2)
-
- return manifest.encode('utf-8')
def createInfoModule(params):
- template = getTemplate('chromeInfo.js.tmpl')
- return template.render(params).encode('utf-8');
+ template = getTemplate('chromeInfo.js.tmpl')
+ return template.render(params).encode('utf-8')
+
def convertJS(params, files):
- from jshydra.abp_rewrite import doRewrite
+ from jshydra.abp_rewrite import doRewrite
- for item in params['metadata'].items('convert_js'):
- file, sources = item
- baseDir = os.path.dirname(item.source)
+ for item in params['metadata'].items('convert_js'):
+ file, sources = item
+ baseDir = os.path.dirname(item.source)
- # Make sure the file is inside an included directory
- if '/' in file and not files.isIncluded(file):
- continue
+ # Make sure the file is inside an included directory
+ if '/' in file and not files.isIncluded(file):
+ continue
- sourceFiles = sources.split()
- args = []
- try:
- argsStart = sourceFiles.index('--arg')
- args = sourceFiles[argsStart + 1:]
- sourceFiles = sourceFiles[0:argsStart]
- except ValueError:
- pass
+ sourceFiles = sources.split()
+ args = []
+ try:
+ argsStart = sourceFiles.index('--arg')
+ args = sourceFiles[argsStart + 1:]
+ sourceFiles = sourceFiles[0:argsStart]
+ except ValueError:
+ pass
+
+ # Source files of the conversion shouldn't be part of the build
+ for sourceFile in sourceFiles:
+ if sourceFile in files:
+ del files[sourceFile]
- # Source files of the conversion shouldn't be part of the build
- for sourceFile in sourceFiles:
- if sourceFile in files:
- del files[sourceFile]
+ sourceFiles = map(lambda f: os.path.abspath(os.path.join(baseDir, f)), sourceFiles)
+ files[file] = doRewrite(sourceFiles, args)
- sourceFiles = map(lambda f: os.path.abspath(os.path.join(baseDir, f)), sourceFiles)
- files[file] = doRewrite(sourceFiles, args)
def toJson(data):
- return json.dumps(
- data, ensure_ascii=False, sort_keys=True,
- indent=2, separators=(',', ': ')
- ).encode('utf-8') + '\n'
+ return json.dumps(
+ data, ensure_ascii=False, sort_keys=True,
+ indent=2, separators=(',', ': ')
+ ).encode('utf-8') + '\n'
+
def importGeckoLocales(params, files):
- import localeTools
-
- # FIXME: localeTools doesn't use real Chrome locales, it uses dash as
- # separator instead.
- convert_locale_code = lambda code: code.replace('-', '_')
-
- # We need to map Chrome locales to Gecko locales. Start by mapping Chrome
- # locales to themselves, merely with the dash as separator.
- locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocales}
-
- # Convert values to Crowdin locales first (use Chrome => Crowdin mapping).
- for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems():
- locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale
-
- # Now convert values to Gecko locales (use Gecko => Crowdin mapping).
- reverse_mapping = {v: k for k, v in locale_mapping.iteritems()}
- for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems():
- if crowdin_locale in reverse_mapping:
- locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale
-
- for target, source in locale_mapping.iteritems():
- targetFile = '_locales/%s/messages.json' % target
- if not targetFile in files:
- continue
-
- for item in params['metadata'].items('import_locales'):
- fileName, keys = item
- parts = map(lambda n: source if n == '*' else n, fileName.split('/'))
- sourceFile = os.path.join(os.path.dirname(item.source), *parts)
- incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incomplete')
- if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker):
- continue
-
- data = json.loads(files[targetFile].decode('utf-8'))
-
- try:
- if sourceFile.endswith('.json'):
- with io.open(sourceFile, 'r', encoding='utf-8') as handle:
- sourceData = {k: v['message'] for k, v in json.load(handle).iteritems()}
- else:
- sourceData = localeTools.readFile(sourceFile)
-
- # Resolve wildcard imports
- if keys == '*' or keys == '=*':
- importList = sourceData.keys()
- importList = filter(lambda k: not k.startswith('_'), importList)
- if keys == '=*':
- importList = map(lambda k: '=' + k, importList)
- keys = ' '.join(importList)
-
- for stringID in keys.split():
- noMangling = False
- if stringID.startswith('='):
- stringID = stringID[1:]
- noMangling = True
-
- if stringID in sourceData:
- if noMangling:
- key = re.sub(r'\W', '_', stringID)
- else:
- key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub(r'\W', '_', stringID)
- if key in data:
- print 'Warning: locale string %s defined multiple times' % key
-
- # Remove access keys
- value = sourceData[stringID]
- match = re.search(r'^(.*?)\s*\(&.\)$', value)
- if match:
- value = match.group(1)
- else:
- index = value.find("&")
- if index >= 0:
- value = value[0:index] + value[index + 1:]
- data[key] = {'message': value}
- except Exception, e:
- print 'Warning: error importing locale data from %s: %s' % (sourceFile, e)
-
- files[targetFile] = toJson(data)
+ import localeTools
+
+ # FIXME: localeTools doesn't use real Chrome locales, it uses dash as
+ # separator instead.
+ convert_locale_code = lambda code: code.replace('-', '_')
+
+ # We need to map Chrome locales to Gecko locales. Start by mapping Chrome
+ # locales to themselves, merely with the dash as separator.
+ locale_mapping = {convert_locale_code(l): l for l in localeTools.chromeLocales}
+
+ # Convert values to Crowdin locales first (use Chrome => Crowdin mapping).
+ for chrome_locale, crowdin_locale in localeTools.langMappingChrome.iteritems():
+ locale_mapping[convert_locale_code(chrome_locale)] = crowdin_locale
+
+ # Now convert values to Gecko locales (use Gecko => Crowdin mapping).
+ reverse_mapping = {v: k for k, v in locale_mapping.iteritems()}
+ for gecko_locale, crowdin_locale in localeTools.langMappingGecko.iteritems():
+ if crowdin_locale in reverse_mapping:
+ locale_mapping[reverse_mapping[crowdin_locale]] = gecko_locale
+
+ for target, source in locale_mapping.iteritems():
+ targetFile = '_locales/%s/messages.json' % target
+ if not targetFile in files:
+ continue
+
+ for item in params['metadata'].items('import_locales'):
+ fileName, keys = item
+ parts = map(lambda n: source if n == '*' else n, fileName.split('/'))
+ sourceFile = os.path.join(os.path.dirname(item.source), *parts)
+ incompleteMarker = os.path.join(os.path.dirname(sourceFile), '.incomplete')
+ if not os.path.exists(sourceFile) or os.path.exists(incompleteMarker):
+ continue
+
+ data = json.loads(files[targetFile].decode('utf-8'))
+
+ try:
+ if sourceFile.endswith('.json'):
+ with io.open(sourceFile, 'r', encoding='utf-8') as handle:
+ sourceData = {k: v['message'] for k, v in json.load(handle).iteritems()}
+ else:
+ sourceData = localeTools.readFile(sourceFile)
+
+ # Resolve wildcard imports
+ if keys == '*' or keys == '=*':
+ importList = sourceData.keys()
+ importList = filter(lambda k: not k.startswith('_'), importList)
+ if keys == '=*':
+ importList = map(lambda k: '=' + k, importList)
+ keys = ' '.join(importList)
+
+ for stringID in keys.split():
+ noMangling = False
+ if stringID.startswith('='):
+ stringID = stringID[1:]
+ noMangling = True
+
+ if stringID in sourceData:
+ if noMangling:
+ key = re.sub(r'\W', '_', stringID)
+ else:
+ key = re.sub(r'\..*', '', parts[-1]) + '_' + re.sub(r'\W', '_', stringID)
+ if key in data:
+ print 'Warning: locale string %s defined multiple times' % key
+
+ # Remove access keys
+ value = sourceData[stringID]
+ match = re.search(r'^(.*?)\s*\(&.\)$', value)
+ if match:
+ value = match.group(1)
+ else:
+ index = value.find("&")
+ if index >= 0:
+ value = value[0:index] + value[index + 1:]
+ data[key] = {'message': value}
+ except Exception, e:
+ print 'Warning: error importing locale data from %s: %s' % (sourceFile, e)
+
+ files[targetFile] = toJson(data)
+
def truncate(text, length_limit):
- if len(text) <= length_limit:
- return text
- return text[:length_limit - 1].rstrip() + u"\u2026"
+ if len(text) <= length_limit:
+ return text
+ return text[:length_limit - 1].rstrip() + u"\u2026"
+
def fixTranslationsForCWS(files):
- # Chrome Web Store requires messages used in manifest.json to be present in
- # all languages. It also enforces length limits for extension names and
- # descriptions.
- defaults = {}
- data = json.loads(files['_locales/%s/messages.json' % defaultLocale])
- for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']):
- name = match.group(1)
- defaults[name] = data[name]
-
- limits = {}
- manifest = json.loads(files['manifest.json'])
- for key, limit in (('name', 45), ('description', 132), ('short_name', 12)):
- match = re.search(r'__MSG_(\S+)__', manifest.get(key, ""))
- if match:
- limits[match.group(1)] = limit
-
- for filename in files:
- if not filename.startswith('_locales/') or not filename.endswith('/messages.json'):
- continue
-
- data = json.loads(files[filename])
- for name, info in defaults.iteritems():
- data.setdefault(name, info)
- for name, limit in limits.iteritems():
- if name in data:
- data[name]['message'] = truncate(data[name]['message'], limit)
- files[filename] = toJson(data)
+ # Chrome Web Store requires messages used in manifest.json to be present in
+ # all languages. It also enforces length limits for extension names and
+ # descriptions.
+ defaults = {}
+ data = json.loads(files['_locales/%s/messages.json' % defaultLocale])
+ for match in re.finditer(r'__MSG_(\S+)__', files['manifest.json']):
+ name = match.group(1)
+ defaults[name] = data[name]
+
+ limits = {}
+ manifest = json.loads(files['manifest.json'])
+ for key, limit in (('name', 45), ('description', 132), ('short_name', 12)):
+ match = re.search(r'__MSG_(\S+)__', manifest.get(key, ""))
+ if match:
+ limits[match.group(1)] = limit
+
+ for filename in files:
+ if not filename.startswith('_locales/') or not filename.endswith('/messages.json'):
+ continue
+
+ data = json.loads(files[filename])
+ for name, info in defaults.iteritems():
+ data.setdefault(name, info)
+ for name, limit in limits.iteritems():
+ if name in data:
+ data[name]['message'] = truncate(data[name]['message'], limit)
+ files[filename] = toJson(data)
+
def signBinary(zipdata, keyFile):
- import M2Crypto
- if not os.path.exists(keyFile):
- M2Crypto.RSA.gen_key(1024, 65537, callback=lambda x: None).save_key(keyFile, cipher=None)
- key = M2Crypto.EVP.load_key(keyFile)
- key.sign_init()
- key.sign_update(zipdata)
- return key.final()
+ import M2Crypto
+ if not os.path.exists(keyFile):
+ M2Crypto.RSA.gen_key(1024, 65537, callback=lambda x: None).save_key(keyFile, cipher=None)
+ key = M2Crypto.EVP.load_key(keyFile)
+ key.sign_init()
+ key.sign_update(zipdata)
+ return key.final()
+
def getPublicKey(keyFile):
- import M2Crypto
- return M2Crypto.EVP.load_key(keyFile).as_der()
+ import M2Crypto
+ return M2Crypto.EVP.load_key(keyFile).as_der()
+
def writePackage(outputFile, pubkey, signature, zipdata):
- if isinstance(outputFile, basestring):
- file = open(outputFile, 'wb')
- else:
- file = outputFile
- if pubkey != None and signature != None:
- file.write(struct.pack('<4sIII', 'Cr24', 2, len(pubkey), len(signature)))
- file.write(pubkey)
- file.write(signature)
- file.write(zipdata)
+ if isinstance(outputFile, basestring):
+ file = open(outputFile, 'wb')
+ else:
+ file = outputFile
+ if pubkey != None and signature != None:
+ file.write(struct.pack('<4sIII', 'Cr24', 2, len(pubkey), len(signature)))
+ file.write(pubkey)
+ file.write(signature)
+ file.write(zipdata)
+
def createBuild(baseDir, type='chrome', outFile=None, buildNum=None, releaseBuild=False, keyFile=None, devenv=False):
- metadata = readMetadata(baseDir, type)
- version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum)
-
- if outFile == None:
- outFile = getDefaultFileName(metadata, version, 'crx' if keyFile else 'zip')
-
- params = {
- 'type': type,
- 'baseDir': baseDir,
- 'releaseBuild': releaseBuild,
- 'version': version,
- 'devenv': devenv,
- 'metadata': metadata,
- }
-
- mapped = metadata.items('mapping') if metadata.has_section('mapping') else []
- files = Files(getPackageFiles(params), getIgnoredFiles(params),
- process=lambda path, data: processFile(path, data, params))
-
- files.readMappedFiles(mapped)
- files.read(baseDir, skip=[opt for opt, _ in mapped])
-
- if metadata.has_section('convert_js'):
- convertJS(params, files)
-
- if metadata.has_section('preprocess'):
- files.preprocess(
- [f for f, _ in metadata.items('preprocess')],
- {'needsExt': True}
- )
-
- if metadata.has_section('import_locales'):
- importGeckoLocales(params, files)
-
- files['manifest.json'] = createManifest(params, files)
- if type == 'chrome':
- fixTranslationsForCWS(files)
-
- if devenv:
- import buildtools
- import random
- files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js'), relpath='devenvPoller__.js')
- files['devenvVersion__'] = str(random.random())
-
- if (metadata.has_option('general', 'backgroundScripts') and
- 'lib/info.js' in metadata.get('general', 'backgroundScripts').split() and
- 'lib/info.js' not in files):
- files['lib/info.js'] = createInfoModule(params)
-
- if metadata.has_option('general', 'testScripts'):
- files['qunit/index.html'] = createScriptPage(params, 'testIndex.html.tmpl',
- ('general', 'testScripts'))
-
- zipdata = files.zipToString()
- signature = None
- pubkey = None
- if keyFile != None:
- signature = signBinary(zipdata, keyFile)
- pubkey = getPublicKey(keyFile)
- writePackage(outFile, pubkey, signature, zipdata)
+ metadata = readMetadata(baseDir, type)
+ version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum)
+
+ if outFile == None:
+ outFile = getDefaultFileName(metadata, version, 'crx' if keyFile else 'zip')
+
+ params = {
+ 'type': type,
+ 'baseDir': baseDir,
+ 'releaseBuild': releaseBuild,
+ 'version': version,
+ 'devenv': devenv,
+ 'metadata': metadata,
+ }
+
+ mapped = metadata.items('mapping') if metadata.has_section('mapping') else []
+ files = Files(getPackageFiles(params), getIgnoredFiles(params),
+ process=lambda path, data: processFile(path, data, params))
+
+ files.readMappedFiles(mapped)
+ files.read(baseDir, skip=[opt for opt, _ in mapped])
+
+ if metadata.has_section('convert_js'):
+ convertJS(params, files)
+
+ if metadata.has_section('preprocess'):
+ files.preprocess(
+ [f for f, _ in metadata.items('preprocess')],
+ {'needsExt': True}
+ )
+
+ if metadata.has_section('import_locales'):
+ importGeckoLocales(params, files)
+
+ files['manifest.json'] = createManifest(params, files)
+ if type == 'chrome':
+ fixTranslationsForCWS(files)
+
+ if devenv:
+ import buildtools
+ import random
+ files.read(os.path.join(buildtools.__path__[0], 'chromeDevenvPoller__.js'), relpath='devenvPoller__.js')
+ files['devenvVersion__'] = str(random.random())
+
+ if (metadata.has_option('general', 'backgroundScripts') and
+ 'lib/info.js' in metadata.get('general', 'backgroundScripts').split() and
+ 'lib/info.js' not in files):
+ files['lib/info.js'] = createInfoModule(params)
+
+ if metadata.has_option('general', 'testScripts'):
+ files['qunit/index.html'] = createScriptPage(params, 'testIndex.html.tmpl',
+ ('general', 'testScripts'))
+
+ zipdata = files.zipToString()
+ signature = None
+ pubkey = None
+ if keyFile != None:
+ signature = signBinary(zipdata, keyFile)
+ pubkey = getPublicKey(keyFile)
+ writePackage(outFile, pubkey, signature, zipdata)
diff --git a/packagerGecko.py b/packagerGecko.py
index c9a156e..987ee15 100644
--- a/packagerGecko.py
+++ b/packagerGecko.py
@@ -21,352 +21,372 @@ import packager
from packager import readMetadata, getMetadataPath, getDefaultFileName, getBuildVersion, getTemplate, Files
KNOWN_APPS = {
- 'conkeror': '{a79fe89b-6662-4ff4-8e88-09950ad4dfde}',
- 'emusic': 'dlm at emusic.com',
- 'fennec': '{a23983c0-fd0e-11dc-95ff-0800200c9a66}',
- 'fennec2': '{aa3c5121-dab2-40e2-81ca-7ea25febc110}',
- 'firefox': '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
- 'midbrowser': '{aa5ca914-c309-495d-91cf-3141bbb04115}',
- 'prism': 'prism at developer.mozilla.org',
- 'seamonkey': '{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}',
- 'songbird': 'songbird at songbirdnest.com',
- 'thunderbird': '{3550f703-e582-4d05-9a08-453d09bdfdc6}',
- 'toolkit': 'toolkit at mozilla.org',
- 'adblockbrowser': '{55aba3ac-94d3-41a8-9e25-5c21fe874539}',
+ 'conkeror': '{a79fe89b-6662-4ff4-8e88-09950ad4dfde}',
+ 'emusic': 'dlm at emusic.com',
+ 'fennec': '{a23983c0-fd0e-11dc-95ff-0800200c9a66}',
+ 'fennec2': '{aa3c5121-dab2-40e2-81ca-7ea25febc110}',
+ 'firefox': '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
+ 'midbrowser': '{aa5ca914-c309-495d-91cf-3141bbb04115}',
+ 'prism': 'prism at developer.mozilla.org',
+ 'seamonkey': '{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}',
+ 'songbird': 'songbird at songbirdnest.com',
+ 'thunderbird': '{3550f703-e582-4d05-9a08-453d09bdfdc6}',
+ 'toolkit': 'toolkit at mozilla.org',
+ 'adblockbrowser': '{55aba3ac-94d3-41a8-9e25-5c21fe874539}',
}
defaultLocale = 'en-US'
+
def getChromeDir(baseDir):
- return os.path.join(baseDir, 'chrome')
+ return os.path.join(baseDir, 'chrome')
+
def getLocalesDir(baseDir):
- return os.path.join(getChromeDir(baseDir), 'locale')
+ return os.path.join(getChromeDir(baseDir), 'locale')
+
def getChromeSubdirs(baseDir, locales):
- result = {}
- chromeDir = getChromeDir(baseDir)
- for subdir in ('content', 'skin'):
- result[subdir] = os.path.join(chromeDir, subdir)
- for locale in locales:
- result['locale/%s' % locale] = os.path.join(chromeDir, 'locale', locale)
- return result
+ result = {}
+ chromeDir = getChromeDir(baseDir)
+ for subdir in ('content', 'skin'):
+ result[subdir] = os.path.join(chromeDir, subdir)
+ for locale in locales:
+ result['locale/%s' % locale] = os.path.join(chromeDir, 'locale', locale)
+ return result
+
def getPackageFiles(params):
- result = set(('chrome', 'components', 'modules', 'lib', 'resources', 'chrome.manifest', 'icon.png', 'icon64.png',))
+ result = set(('chrome', 'components', 'modules', 'lib', 'resources', 'chrome.manifest', 'icon.png', 'icon64.png',))
+
+ baseDir = params['baseDir']
+ for file in os.listdir(baseDir):
+ if file.endswith('.js') or file.endswith('.xml'):
+ result.add(file)
+ return result
- baseDir = params['baseDir']
- for file in os.listdir(baseDir):
- if file.endswith('.js') or file.endswith('.xml'):
- result.add(file)
- return result
def getIgnoredFiles(params):
- return {'.incomplete', 'meta.properties'}
+ return {'.incomplete', 'meta.properties'}
+
def archive_path(path, baseDir):
- return '/'.join(os.path.split(os.path.relpath(path, baseDir)))
+ return '/'.join(os.path.split(os.path.relpath(path, baseDir)))
+
def isValidLocale(localesDir, dir, includeIncomplete=False):
- if re.search(r'[^\w\-]', dir):
- return False
- curLocaleDir = os.path.join(localesDir, dir)
- if not os.path.isdir(curLocaleDir):
- return False
- if len(os.listdir(curLocaleDir)) == 0:
- return False
- if not includeIncomplete and os.path.exists(os.path.join(localesDir, dir, '.incomplete')):
- return False
- return True
+ if re.search(r'[^\w\-]', dir):
+ return False
+ curLocaleDir = os.path.join(localesDir, dir)
+ if not os.path.isdir(curLocaleDir):
+ return False
+ if len(os.listdir(curLocaleDir)) == 0:
+ return False
+ if not includeIncomplete and os.path.exists(os.path.join(localesDir, dir, '.incomplete')):
+ return False
+ return True
+
def getLocales(baseDir, includeIncomplete=False):
- global defaultLocale
- localesDir = getLocalesDir(baseDir)
- locales = filter(lambda dir: isValidLocale(localesDir, dir, includeIncomplete), os.listdir(localesDir))
- locales.sort(key=lambda x: '!' if x == defaultLocale else x)
- return locales
+ global defaultLocale
+ localesDir = getLocalesDir(baseDir)
+ locales = filter(lambda dir: isValidLocale(localesDir, dir, includeIncomplete), os.listdir(localesDir))
+ locales.sort(key=lambda x: '!' if x == defaultLocale else x)
+ return locales
+
def processFile(path, data, params):
- if path.endswith('.manifest') and data.find('{{LOCALE}}') >= 0:
- localesRegExp = re.compile(r'^(.*?){{LOCALE}}(.*?){{LOCALE}}(.*)$', re.M)
- replacement = '\n'.join(map(lambda locale: r'\1%s\2%s\3' % (locale, locale), params['locales']))
- data = re.sub(localesRegExp, replacement, data)
+ if path.endswith('.manifest') and data.find('{{LOCALE}}') >= 0:
+ localesRegExp = re.compile(r'^(.*?){{LOCALE}}(.*?){{LOCALE}}(.*)$', re.M)
+ replacement = '\n'.join(map(lambda locale: r'\1%s\2%s\3' % (locale, locale), params['locales']))
+ data = re.sub(localesRegExp, replacement, data)
+
+ return data
- return data
def readLocaleMetadata(baseDir, locales):
- result = {}
-
- # Make sure we always have fallback data even if the default locale isn't part
- # of the build
- locales = list(locales)
- if not defaultLocale in locales:
- locales.append(defaultLocale)
-
- for locale in locales:
- data = SafeConfigParser()
- data.optionxform = str
- try:
- result[locale] = localeTools.readFile(os.path.join(getLocalesDir(baseDir), locale, 'meta.properties'))
- except:
- result[locale] = {}
- return result
+ result = {}
+
+ # Make sure we always have fallback data even if the default locale isn't part
+ # of the build
+ locales = list(locales)
+ if not defaultLocale in locales:
+ locales.append(defaultLocale)
+
+ for locale in locales:
+ data = SafeConfigParser()
+ data.optionxform = str
+ try:
+ result[locale] = localeTools.readFile(os.path.join(getLocalesDir(baseDir), locale, 'meta.properties'))
+ except:
+ result[locale] = {}
+ return result
+
def getContributors(metadata):
- main = []
- additional = set()
- if metadata.has_section('contributors'):
- options = metadata.options('contributors')
- options.sort()
- for option in options:
- value = metadata.get('contributors', option)
- if re.search(r'\D', option):
- match = re.search(r'^\s*(\S+)\s+//([^/\s]+)/@(\S+)\s*$', value)
- if not match:
- print >>sys.stderr, 'Warning: unrecognized contributor location "%s"\n' % value
- continue
- baseDir = os.path.dirname(metadata.option_source('contributors', option))
- parts = match.group(1).split('/')
- dom = minidom.parse(os.path.join(baseDir, *parts))
- tags = dom.getElementsByTagName(match.group(2))
- for tag in tags:
- if tag.hasAttribute(match.group(3)):
- for name in re.split(r'\s*,\s*', tag.getAttribute(match.group(3))):
- additional.add(name)
- else:
- main.append(value)
- return main + sorted(additional, key=unicode.lower)
+ main = []
+ additional = set()
+ if metadata.has_section('contributors'):
+ options = metadata.options('contributors')
+ options.sort()
+ for option in options:
+ value = metadata.get('contributors', option)
+ if re.search(r'\D', option):
+ match = re.search(r'^\s*(\S+)\s+//([^/\s]+)/@(\S+)\s*$', value)
+ if not match:
+ print >>sys.stderr, 'Warning: unrecognized contributor location "%s"\n' % value
+ continue
+ baseDir = os.path.dirname(metadata.option_source('contributors', option))
+ parts = match.group(1).split('/')
+ dom = minidom.parse(os.path.join(baseDir, *parts))
+ tags = dom.getElementsByTagName(match.group(2))
+ for tag in tags:
+ if tag.hasAttribute(match.group(3)):
+ for name in re.split(r'\s*,\s*', tag.getAttribute(match.group(3))):
+ additional.add(name)
+ else:
+ main.append(value)
+ return main + sorted(additional, key=unicode.lower)
+
def initTranslators(localeMetadata):
- for locale in localeMetadata.itervalues():
- if 'translator' in locale:
- locale['translators'] = sorted(map(lambda t: t.strip(), locale['translator'].split(',')), key=unicode.lower)
- else:
- locale['translators'] = []
+ for locale in localeMetadata.itervalues():
+ if 'translator' in locale:
+ locale['translators'] = sorted(map(lambda t: t.strip(), locale['translator'].split(',')), key=unicode.lower)
+ else:
+ locale['translators'] = []
+
def createManifest(params):
- global KNOWN_APPS, defaultLocale
- template = getTemplate('install.rdf.tmpl', autoEscape=True)
- templateData = dict(params)
- templateData['localeMetadata'] = readLocaleMetadata(params['baseDir'], params['locales'])
- initTranslators(templateData['localeMetadata'])
- templateData['KNOWN_APPS'] = KNOWN_APPS
- templateData['defaultLocale'] = defaultLocale
- return template.render(templateData).encode('utf-8')
+ global KNOWN_APPS, defaultLocale
+ template = getTemplate('install.rdf.tmpl', autoEscape=True)
+ templateData = dict(params)
+ templateData['localeMetadata'] = readLocaleMetadata(params['baseDir'], params['locales'])
+ initTranslators(templateData['localeMetadata'])
+ templateData['KNOWN_APPS'] = KNOWN_APPS
+ templateData['defaultLocale'] = defaultLocale
+ return template.render(templateData).encode('utf-8')
+
def importLocales(params, files):
- SECTION = 'import_locales'
- if not params['metadata'].has_section(SECTION):
- return
+ SECTION = 'import_locales'
+ if not params['metadata'].has_section(SECTION):
+ return
+
+ import localeTools
- import localeTools
+ for locale in params['locales']:
+ for item in params['metadata'].items(SECTION):
+ path, keys = item
+ parts = [locale if p == '*' else p for p in path.split('/')]
+ source = os.path.join(os.path.dirname(item.source), *parts)
+ if not os.path.exists(source):
+ continue
- for locale in params['locales']:
- for item in params['metadata'].items(SECTION):
- path, keys = item
- parts = [locale if p == '*' else p for p in path.split('/')]
- source = os.path.join(os.path.dirname(item.source), *parts)
- if not os.path.exists(source):
- continue
+ with io.open(source, 'r', encoding='utf-8') as handle:
+ data = json.load(handle)
- with io.open(source, 'r', encoding='utf-8') as handle:
- data = json.load(handle)
+ target_name = os.path.splitext(os.path.basename(source))[0] + '.properties'
+ target = archive_path(os.path.join(getLocalesDir(params['baseDir']), locale, target_name), params['baseDir'])
- target_name = os.path.splitext(os.path.basename(source))[0] + '.properties'
- target = archive_path(os.path.join(getLocalesDir(params['baseDir']), locale, target_name), params['baseDir'])
+ files[target] = ''
+ for key, value in sorted(data.items()):
+ message = value['message']
+ files[target] += localeTools.generateStringEntry(key, message, target).encode('utf-8')
- files[target] = ''
- for key, value in sorted(data.items()):
- message = value['message']
- files[target] += localeTools.generateStringEntry(key, message, target).encode('utf-8')
def fixupLocales(params, files):
- global defaultLocale
-
- # Read in default locale data, it might not be included in package files
- defaultLocaleDir = os.path.join(getLocalesDir(params['baseDir']), defaultLocale)
- reference_files = Files(getPackageFiles(params), getIgnoredFiles(params))
- reference_files.read(defaultLocaleDir, archive_path(defaultLocaleDir, params['baseDir']))
- reference_params = dict(params)
- reference_params['locales'] = [defaultLocale]
- importLocales(reference_params, reference_files)
-
- reference = {}
- for path, data in reference_files.iteritems():
- filename = path.split('/')[-1]
- data = localeTools.parseString(data.decode('utf-8'), filename)
- if data:
- reference[filename] = data
-
- for locale in params['locales']:
- for file in reference.iterkeys():
- path = 'chrome/locale/%s/%s' % (locale, file)
- if path in files:
- data = localeTools.parseString(files[path].decode('utf-8'), path)
- for key, value in reference[file].iteritems():
- if not key in data:
- files[path] += localeTools.generateStringEntry(key, value, path).encode('utf-8')
- else:
- files[path] = reference[file]['_origData'].encode('utf-8')
+ global defaultLocale
+
+ # Read in default locale data, it might not be included in package files
+ defaultLocaleDir = os.path.join(getLocalesDir(params['baseDir']), defaultLocale)
+ reference_files = Files(getPackageFiles(params), getIgnoredFiles(params))
+ reference_files.read(defaultLocaleDir, archive_path(defaultLocaleDir, params['baseDir']))
+ reference_params = dict(params)
+ reference_params['locales'] = [defaultLocale]
+ importLocales(reference_params, reference_files)
+
+ reference = {}
+ for path, data in reference_files.iteritems():
+ filename = path.split('/')[-1]
+ data = localeTools.parseString(data.decode('utf-8'), filename)
+ if data:
+ reference[filename] = data
+
+ for locale in params['locales']:
+ for file in reference.iterkeys():
+ path = 'chrome/locale/%s/%s' % (locale, file)
+ if path in files:
+ data = localeTools.parseString(files[path].decode('utf-8'), path)
+ for key, value in reference[file].iteritems():
+ if not key in data:
+ files[path] += localeTools.generateStringEntry(key, value, path).encode('utf-8')
+ else:
+ files[path] = reference[file]['_origData'].encode('utf-8')
+
def processJSONFiles(params, files):
- prefix = 'lib/'
- for name, content in files.iteritems():
- if name.startswith(prefix) and name.endswith('.json'):
- params['jsonRequires'][name[len(prefix):]] = json.loads(content)
- for name in params['jsonRequires'].iterkeys():
- del files[prefix + name]
+ prefix = 'lib/'
+ for name, content in files.iteritems():
+ if name.startswith(prefix) and name.endswith('.json'):
+ params['jsonRequires'][name[len(prefix):]] = json.loads(content)
+ for name in params['jsonRequires'].iterkeys():
+ del files[prefix + name]
+
def addMissingFiles(params, files):
- templateData = {
- 'hasChrome': False,
- 'hasChromeRequires': False,
- 'hasShutdownHandlers': False,
- 'hasXMLHttpRequest': False,
- 'chromeWindows': [],
- 'requires': set(),
- 'jsonRequires': params['jsonRequires'],
- 'metadata': params['metadata'],
- 'multicompartment': params['multicompartment'],
- 'applications': dict((v, k) for k, v in KNOWN_APPS.iteritems()),
- }
-
- def checkScript(name):
- content = files[name]
- for match in re.finditer(r'(?:^|\s)require\(\s*"([\w\-]+)"\s*\)', content):
- templateData['requires'].add(match.group(1))
- if name.startswith('chrome/content/'):
- templateData['hasChromeRequires'] = True
- if name.startswith('lib/') and re.search(r'\bXMLHttpRequest\b', content):
- templateData['hasXMLHttpRequest'] = True
- if not '/' in name or name.startswith('lib/'):
- if re.search(r'(?:^|\s)onShutdown\.', content):
- templateData['hasShutdownHandlers'] = True
-
- for name, content in files.iteritems():
- if name == 'chrome.manifest':
- templateData['hasChrome'] = True
- elif name.endswith('.js'):
- checkScript(name)
- elif name.endswith('.xul'):
- match = re.search(r'<(?:window|dialog)\s[^>]*\bwindowtype="([^">]+)"', content)
- if match:
- templateData['chromeWindows'].append(match.group(1))
-
- while True:
- missing = []
- for module in templateData['requires']:
- moduleFile = 'lib/' + module + '.js'
- if not moduleFile in files:
- import buildtools
- path = os.path.join(buildtools.__path__[0], moduleFile)
- if os.path.exists(path):
- missing.append((path, moduleFile))
- if not len(missing):
- break
- for path, moduleFile in missing:
- files.read(path, moduleFile)
- checkScript(moduleFile)
-
- template = getTemplate('bootstrap.js.tmpl')
- files['bootstrap.js'] = template.render(templateData).encode('utf-8')
+ templateData = {
+ 'hasChrome': False,
+ 'hasChromeRequires': False,
+ 'hasShutdownHandlers': False,
+ 'hasXMLHttpRequest': False,
+ 'chromeWindows': [],
+ 'requires': set(),
+ 'jsonRequires': params['jsonRequires'],
+ 'metadata': params['metadata'],
+ 'multicompartment': params['multicompartment'],
+ 'applications': dict((v, k) for k, v in KNOWN_APPS.iteritems()),
+ }
+
+ def checkScript(name):
+ content = files[name]
+ for match in re.finditer(r'(?:^|\s)require\(\s*"([\w\-]+)"\s*\)', content):
+ templateData['requires'].add(match.group(1))
+ if name.startswith('chrome/content/'):
+ templateData['hasChromeRequires'] = True
+ if name.startswith('lib/') and re.search(r'\bXMLHttpRequest\b', content):
+ templateData['hasXMLHttpRequest'] = True
+ if not '/' in name or name.startswith('lib/'):
+ if re.search(r'(?:^|\s)onShutdown\.', content):
+ templateData['hasShutdownHandlers'] = True
+
+ for name, content in files.iteritems():
+ if name == 'chrome.manifest':
+ templateData['hasChrome'] = True
+ elif name.endswith('.js'):
+ checkScript(name)
+ elif name.endswith('.xul'):
+ match = re.search(r'<(?:window|dialog)\s[^>]*\bwindowtype="([^">]+)"', content)
+ if match:
+ templateData['chromeWindows'].append(match.group(1))
+
+ while True:
+ missing = []
+ for module in templateData['requires']:
+ moduleFile = 'lib/' + module + '.js'
+ if not moduleFile in files:
+ import buildtools
+ path = os.path.join(buildtools.__path__[0], moduleFile)
+ if os.path.exists(path):
+ missing.append((path, moduleFile))
+ if not len(missing):
+ break
+ for path, moduleFile in missing:
+ files.read(path, moduleFile)
+ checkScript(moduleFile)
+
+ template = getTemplate('bootstrap.js.tmpl')
+ files['bootstrap.js'] = template.render(templateData).encode('utf-8')
+
def signFiles(files, keyFile):
- import M2Crypto
- manifest = []
- signature = []
-
- def getDigest(data):
- md5 = hashlib.md5()
- md5.update(data)
- sha1 = hashlib.sha1()
- sha1.update(data)
- return 'Digest-Algorithms: MD5 SHA1\nMD5-Digest: %s\nSHA1-Digest: %s\n' % (base64.b64encode(md5.digest()), base64.b64encode(sha1.digest()))
-
- def addSection(manifestData, signaturePrefix):
- manifest.append(manifestData)
- signatureData = ''
- if signaturePrefix:
- signatureData += signaturePrefix
- signatureData += getDigest(manifestData)
- signature.append(signatureData)
-
- addSection('Manifest-Version: 1.0\n', 'Signature-Version: 1.0\n')
- fileNames = files.keys()
- fileNames.sort()
- for fileName in fileNames:
- addSection('Name: %s\n%s' % (fileName, getDigest(files[fileName])), 'Name: %s\n' % fileName)
- files['META-INF/manifest.mf'] = '\n'.join(manifest)
- files['META-INF/zigbert.sf'] = '\n'.join(signature)
-
- keyHandle = open(keyFile, 'rb')
- keyData = keyHandle.read()
- keyHandle.close()
- stack = M2Crypto.X509.X509_Stack()
- first = True
- for match in re.finditer(r'-----BEGIN CERTIFICATE-----.*?-----END CERTIFICATE-----', keyData, re.S):
- if first:
- # Skip first certificate
- first = False
- else:
- stack.push(M2Crypto.X509.load_cert_string(match.group(0)))
-
- mime = M2Crypto.SMIME.SMIME()
- mime.load_key(keyFile)
- mime.set_x509_stack(stack)
- signature = mime.sign(M2Crypto.BIO.MemoryBuffer(files['META-INF/zigbert.sf'].encode('utf-8')), M2Crypto.SMIME.PKCS7_DETACHED | M2Crypto.SMIME.PKCS7_BINARY)
-
- buffer = M2Crypto.BIO.MemoryBuffer()
- signature.write_der(buffer)
- files['META-INF/zigbert.rsa'] = buffer.read()
+ import M2Crypto
+ manifest = []
+ signature = []
+
+ def getDigest(data):
+ md5 = hashlib.md5()
+ md5.update(data)
+ sha1 = hashlib.sha1()
+ sha1.update(data)
+ return 'Digest-Algorithms: MD5 SHA1\nMD5-Digest: %s\nSHA1-Digest: %s\n' % (base64.b64encode(md5.digest()), base64.b64encode(sha1.digest()))
+
+ def addSection(manifestData, signaturePrefix):
+ manifest.append(manifestData)
+ signatureData = ''
+ if signaturePrefix:
+ signatureData += signaturePrefix
+ signatureData += getDigest(manifestData)
+ signature.append(signatureData)
+
+ addSection('Manifest-Version: 1.0\n', 'Signature-Version: 1.0\n')
+ fileNames = files.keys()
+ fileNames.sort()
+ for fileName in fileNames:
+ addSection('Name: %s\n%s' % (fileName, getDigest(files[fileName])), 'Name: %s\n' % fileName)
+ files['META-INF/manifest.mf'] = '\n'.join(manifest)
+ files['META-INF/zigbert.sf'] = '\n'.join(signature)
+
+ keyHandle = open(keyFile, 'rb')
+ keyData = keyHandle.read()
+ keyHandle.close()
+ stack = M2Crypto.X509.X509_Stack()
+ first = True
+ for match in re.finditer(r'-----BEGIN CERTIFICATE-----.*?-----END CERTIFICATE-----', keyData, re.S):
+ if first:
+ # Skip first certificate
+ first = False
+ else:
+ stack.push(M2Crypto.X509.load_cert_string(match.group(0)))
+
+ mime = M2Crypto.SMIME.SMIME()
+ mime.load_key(keyFile)
+ mime.set_x509_stack(stack)
+ signature = mime.sign(M2Crypto.BIO.MemoryBuffer(files['META-INF/zigbert.sf'].encode('utf-8')), M2Crypto.SMIME.PKCS7_DETACHED | M2Crypto.SMIME.PKCS7_BINARY)
+
+ buffer = M2Crypto.BIO.MemoryBuffer()
+ signature.write_der(buffer)
+ files['META-INF/zigbert.rsa'] = buffer.read()
+
def createBuild(baseDir, type="gecko", outFile=None, locales=None, buildNum=None, releaseBuild=False, keyFile=None, multicompartment=False):
- if locales == None:
- locales = getLocales(baseDir)
- elif locales == 'all':
- locales = getLocales(baseDir, True)
-
- metadata = readMetadata(baseDir, type)
- version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum)
-
- if outFile == None:
- outFile = getDefaultFileName(metadata, version, 'xpi')
-
- contributors = getContributors(metadata)
-
- params = {
- 'baseDir': baseDir,
- 'locales': locales,
- 'releaseBuild': releaseBuild,
- 'version': version.encode('utf-8'),
- 'metadata': metadata,
- 'contributors': contributors,
- 'multicompartment': multicompartment,
- 'jsonRequires': {},
- }
-
- mapped = metadata.items('mapping') if metadata.has_section('mapping') else []
- skip = [opt for opt, _ in mapped] + ['chrome']
- files = Files(getPackageFiles(params), getIgnoredFiles(params),
- process=lambda path, data: processFile(path, data, params))
- files['install.rdf'] = createManifest(params)
- files.readMappedFiles(mapped)
- files.read(baseDir, skip=skip)
- for name, path in getChromeSubdirs(baseDir, params['locales']).iteritems():
- if os.path.isdir(path):
- files.read(path, 'chrome/%s' % name, skip=skip)
- importLocales(params, files)
- fixupLocales(params, files)
- processJSONFiles(params, files)
- if not 'bootstrap.js' in files:
- addMissingFiles(params, files)
- if metadata.has_section('preprocess'):
- files.preprocess([f for f, _ in metadata.items('preprocess')])
- if keyFile:
- signFiles(files, keyFile)
- files.zip(outFile, sortKey=lambda x: '!' if x == 'META-INF/zigbert.rsa' else x)
+ if locales == None:
+ locales = getLocales(baseDir)
+ elif locales == 'all':
+ locales = getLocales(baseDir, True)
+
+ metadata = readMetadata(baseDir, type)
+ version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum)
+
+ if outFile == None:
+ outFile = getDefaultFileName(metadata, version, 'xpi')
+
+ contributors = getContributors(metadata)
+
+ params = {
+ 'baseDir': baseDir,
+ 'locales': locales,
+ 'releaseBuild': releaseBuild,
+ 'version': version.encode('utf-8'),
+ 'metadata': metadata,
+ 'contributors': contributors,
+ 'multicompartment': multicompartment,
+ 'jsonRequires': {},
+ }
+
+ mapped = metadata.items('mapping') if metadata.has_section('mapping') else []
+ skip = [opt for opt, _ in mapped] + ['chrome']
+ files = Files(getPackageFiles(params), getIgnoredFiles(params),
+ process=lambda path, data: processFile(path, data, params))
+ files['install.rdf'] = createManifest(params)
+ files.readMappedFiles(mapped)
+ files.read(baseDir, skip=skip)
+ for name, path in getChromeSubdirs(baseDir, params['locales']).iteritems():
+ if os.path.isdir(path):
+ files.read(path, 'chrome/%s' % name, skip=skip)
+ importLocales(params, files)
+ fixupLocales(params, files)
+ processJSONFiles(params, files)
+ if not 'bootstrap.js' in files:
+ addMissingFiles(params, files)
+ if metadata.has_section('preprocess'):
+ files.preprocess([f for f, _ in metadata.items('preprocess')])
+ if keyFile:
+ signFiles(files, keyFile)
+ files.zip(outFile, sortKey=lambda x: '!' if x == 'META-INF/zigbert.rsa' else x)
+
def autoInstall(baseDir, type, host, port, multicompartment=False):
- fileBuffer = StringIO()
- createBuild(baseDir, type=type, outFile=fileBuffer, multicompartment=multicompartment)
- urllib.urlopen('http://%s:%s/' % (host, port), data=fileBuffer.getvalue())
+ fileBuffer = StringIO()
+ createBuild(baseDir, type=type, outFile=fileBuffer, multicompartment=multicompartment)
+ urllib.urlopen('http://%s:%s/' % (host, port), data=fileBuffer.getvalue())
diff --git a/packagerSafari.py b/packagerSafari.py
index cb4ab29..6549652 100644
--- a/packagerSafari.py
+++ b/packagerSafari.py
@@ -13,259 +13,267 @@ from urlparse import urlparse
from packager import readMetadata, getDefaultFileName, getBuildVersion, getTemplate, Files
from packagerChrome import convertJS, importGeckoLocales, getIgnoredFiles, getPackageFiles, defaultLocale, createScriptPage
+
def processFile(path, data, params):
- return data
+ return data
+
def createManifest(params, files):
- template = getTemplate('Info.plist.tmpl', autoEscape=True)
- metadata = params['metadata']
- catalog = json.loads(files['_locales/%s/messages.json' % defaultLocale])
+ template = getTemplate('Info.plist.tmpl', autoEscape=True)
+ metadata = params['metadata']
+ catalog = json.loads(files['_locales/%s/messages.json' % defaultLocale])
- def parse_section(section, depth=1):
- result = {}
+ def parse_section(section, depth=1):
+ result = {}
- if not metadata.has_section(section):
- return result
+ if not metadata.has_section(section):
+ return result
- for opt in metadata.options(section):
- bits = opt.split('_', depth)
- key = bits.pop().replace('_', ' ').title()
- val = metadata.get(section, opt)
+ for opt in metadata.options(section):
+ bits = opt.split('_', depth)
+ key = bits.pop().replace('_', ' ').title()
+ val = metadata.get(section, opt)
- try:
- val = int(val)
- except ValueError:
- try:
- val = float(val)
- except ValueError:
- pass
+ try:
+ val = int(val)
+ except ValueError:
+ try:
+ val = float(val)
+ except ValueError:
+ pass
- reduce(lambda d, x: d.setdefault(x, {}), bits, result)[key] = val
+ reduce(lambda d, x: d.setdefault(x, {}), bits, result)[key] = val
- return result
+ return result
+
+ def get_optional(*args):
+ try:
+ return metadata.get(*args)
+ except ConfigParser.Error:
+ return None
+
+ allowedDomains = set()
+ allowAllDomains = False
+ allowSecurePages = False
+
+ for perm in metadata.get('general', 'permissions').split():
+ if perm == '<all_urls>':
+ allowAllDomains = True
+ allowSecurePages = True
+ continue
+
+ url = urlparse(perm)
+
+ if url.scheme == 'https':
+ allowSecurePages = True
+ elif url.scheme != 'http':
+ continue
+
+ if '*' in url.hostname:
+ allowAllDomains = True
+ continue
+
+ allowedDomains.add(url.hostname)
+
+ return template.render(
+ basename=metadata.get('general', 'basename'),
+ version=params['version'],
+ releaseBuild=params['releaseBuild'],
+ name=catalog['name']['message'],
+ description=catalog['description']['message'],
+ author=get_optional('general', 'author'),
+ homepage=get_optional('general', 'homepage'),
+ updateURL=get_optional('general', 'updateURL'),
+ allowedDomains=allowedDomains,
+ allowAllDomains=allowAllDomains,
+ allowSecurePages=allowSecurePages,
+ startScripts=(get_optional('contentScripts', 'document_start') or '').split(),
+ endScripts=(get_optional('contentScripts', 'document_end') or '').split(),
+ menus=parse_section('menus', 2),
+ toolbarItems=parse_section('toolbar_items'),
+ popovers=parse_section('popovers'),
+ developerIdentifier=params.get('developerIdentifier')
+ ).encode('utf-8')
- def get_optional(*args):
- try:
- return metadata.get(*args)
- except ConfigParser.Error:
- return None
-
- allowedDomains = set()
- allowAllDomains = False
- allowSecurePages = False
-
- for perm in metadata.get('general', 'permissions').split():
- if perm == '<all_urls>':
- allowAllDomains = True
- allowSecurePages = True
- continue
-
- url = urlparse(perm)
-
- if url.scheme == 'https':
- allowSecurePages = True
- elif url.scheme != 'http':
- continue
-
- if '*' in url.hostname:
- allowAllDomains = True
- continue
-
- allowedDomains.add(url.hostname)
-
- return template.render(
- basename=metadata.get('general', 'basename'),
- version=params['version'],
- releaseBuild=params['releaseBuild'],
- name=catalog['name']['message'],
- description=catalog['description']['message'],
- author=get_optional('general', 'author'),
- homepage=get_optional('general', 'homepage'),
- updateURL=get_optional('general', 'updateURL'),
- allowedDomains=allowedDomains,
- allowAllDomains=allowAllDomains,
- allowSecurePages=allowSecurePages,
- startScripts=(get_optional('contentScripts', 'document_start') or '').split(),
- endScripts=(get_optional('contentScripts', 'document_end') or '').split(),
- menus=parse_section('menus', 2),
- toolbarItems=parse_section('toolbar_items'),
- popovers=parse_section('popovers'),
- developerIdentifier=params.get('developerIdentifier')
- ).encode('utf-8')
def createInfoModule(params):
- template = getTemplate('safariInfo.js.tmpl')
- return template.render(params).encode('utf-8')
+ template = getTemplate('safariInfo.js.tmpl')
+ return template.render(params).encode('utf-8')
+
def fixAbsoluteUrls(files):
- for filename, content in files.iteritems():
- if os.path.splitext(filename)[1].lower() == '.html':
- files[filename] = re.sub(
- r'(<[^<>]*?\b(?:href|src)\s*=\s*["\']?)\/+',
- r'\1' + '/'.join(['..'] * filename.count('/') + ['']),
- content, re.S | re.I
- )
+ for filename, content in files.iteritems():
+ if os.path.splitext(filename)[1].lower() == '.html':
+ files[filename] = re.sub(
+ r'(<[^<>]*?\b(?:href|src)\s*=\s*["\']?)\/+',
+ r'\1' + '/'.join(['..'] * filename.count('/') + ['']),
+ content, re.S | re.I
+ )
+
def get_certificates_and_key(keyfile):
- import M2Crypto
+ import M2Crypto
+
+ certs = []
+ bio = M2Crypto.BIO.openfile(keyfile)
- certs = []
- bio = M2Crypto.BIO.openfile(keyfile)
+ try:
+ key = M2Crypto.RSA.load_key_bio(bio)
+ bio.reset()
+ while True:
+ try:
+ certs.append(M2Crypto.X509.load_cert_bio(bio))
+ except M2Crypto.X509.X509Error:
+ break
+ finally:
+ bio.close()
- try:
- key = M2Crypto.RSA.load_key_bio(bio)
- bio.reset()
- while True:
- try:
- certs.append(M2Crypto.X509.load_cert_bio(bio))
- except M2Crypto.X509.X509Error:
- break
- finally:
- bio.close()
+ return certs, key
- return certs, key
def get_developer_identifier(certs):
- for cert in certs:
- subject = cert.get_subject()
- for entry in subject.get_entries_by_nid(subject.nid['CN']):
- m = re.match(r'Safari Developer: \((.*?)\)', entry.get_data().as_text())
- if m:
- return m.group(1)
+ for cert in certs:
+ subject = cert.get_subject()
+ for entry in subject.get_entries_by_nid(subject.nid['CN']):
+ m = re.match(r'Safari Developer: \((.*?)\)', entry.get_data().as_text())
+ if m:
+ return m.group(1)
+
+ raise Exception('No Safari developer certificate found in chain')
- raise Exception('No Safari developer certificate found in chain')
def createSignedXarArchive(outFile, files, certs, key):
- import subprocess
- import tempfile
- import shutil
- import M2Crypto
-
- # write files to temporary directory and create a xar archive
- dirname = tempfile.mkdtemp()
- try:
- for filename, contents in files.iteritems():
- path = os.path.join(dirname, filename)
-
- try:
- os.makedirs(os.path.dirname(path))
- except OSError:
- pass
-
- with open(path, 'wb') as file:
- file.write(contents)
-
- subprocess.check_output(
- ['xar', '-czf', os.path.abspath(outFile), '--distribution'] + os.listdir(dirname),
- cwd=dirname
- )
- finally:
- shutil.rmtree(dirname)
-
- certificate_filenames = []
- try:
- # write each certificate in DER format to a separate
- # temporary file, that they can be passed to xar
- for cert in certs:
- fd, filename = tempfile.mkstemp()
- try:
- certificate_filenames.append(filename)
- os.write(fd, cert.as_der())
- finally:
- os.close(fd)
+ import subprocess
+ import tempfile
+ import shutil
+ import M2Crypto
+
+ # write files to temporary directory and create a xar archive
+ dirname = tempfile.mkdtemp()
+ try:
+ for filename, contents in files.iteritems():
+ path = os.path.join(dirname, filename)
+
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError:
+ pass
+
+ with open(path, 'wb') as file:
+ file.write(contents)
- # add certificates and placeholder signature
- # to the xar archive, and get data to sign
- fd, digestinfo_filename = tempfile.mkstemp()
- os.close(fd)
+ subprocess.check_output(
+ ['xar', '-czf', os.path.abspath(outFile), '--distribution'] + os.listdir(dirname),
+ cwd=dirname
+ )
+ finally:
+ shutil.rmtree(dirname)
+
+ certificate_filenames = []
try:
- subprocess.check_call(
- [
- 'xar', '--sign', '-f', outFile,
- '--digestinfo-to-sign', digestinfo_filename,
- '--sig-size', str(len(key.private_encrypt('', M2Crypto.RSA.pkcs1_padding)))
- ] + [
- arg for cert in certificate_filenames for arg in ('--cert-loc', cert)
- ]
- )
-
- with open(digestinfo_filename, 'rb') as file:
- digestinfo = file.read()
+ # write each certificate in DER format to a separate
+ # temporary file, that they can be passed to xar
+ for cert in certs:
+ fd, filename = tempfile.mkstemp()
+ try:
+ certificate_filenames.append(filename)
+ os.write(fd, cert.as_der())
+ finally:
+ os.close(fd)
+
+ # add certificates and placeholder signature
+ # to the xar archive, and get data to sign
+ fd, digestinfo_filename = tempfile.mkstemp()
+ os.close(fd)
+ try:
+ subprocess.check_call(
+ [
+ 'xar', '--sign', '-f', outFile,
+ '--digestinfo-to-sign', digestinfo_filename,
+ '--sig-size', str(len(key.private_encrypt('', M2Crypto.RSA.pkcs1_padding)))
+ ] + [
+ arg for cert in certificate_filenames for arg in ('--cert-loc', cert)
+ ]
+ )
+
+ with open(digestinfo_filename, 'rb') as file:
+ digestinfo = file.read()
+ finally:
+ os.unlink(digestinfo_filename)
finally:
- os.unlink(digestinfo_filename)
- finally:
- for filename in certificate_filenames:
- os.unlink(filename)
-
- # sign data and inject signature into xar archive
- fd, signature_filename = tempfile.mkstemp()
- try:
+ for filename in certificate_filenames:
+ os.unlink(filename)
+
+ # sign data and inject signature into xar archive
+ fd, signature_filename = tempfile.mkstemp()
try:
- os.write(fd, key.private_encrypt(
- digestinfo,
- M2Crypto.RSA.pkcs1_padding
- ))
+ try:
+ os.write(fd, key.private_encrypt(
+ digestinfo,
+ M2Crypto.RSA.pkcs1_padding
+ ))
+ finally:
+ os.close(fd)
+
+ subprocess.check_call(['xar', '--inject-sig', signature_filename, '-f', outFile])
finally:
- os.close(fd)
+ os.unlink(signature_filename)
- subprocess.check_call(['xar', '--inject-sig', signature_filename, '-f', outFile])
- finally:
- os.unlink(signature_filename)
def createBuild(baseDir, type, outFile=None, buildNum=None, releaseBuild=False, keyFile=None, devenv=False):
- metadata = readMetadata(baseDir, type)
- version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum)
-
- if not outFile:
- outFile = getDefaultFileName(metadata, version, 'safariextz' if keyFile else 'zip')
-
- params = {
- 'type': type,
- 'baseDir': baseDir,
- 'releaseBuild': releaseBuild,
- 'version': version,
- 'devenv': devenv,
- 'metadata': metadata,
- }
-
- mapped = metadata.items('mapping') if metadata.has_section('mapping') else []
- files = Files(getPackageFiles(params), getIgnoredFiles(params),
- process=lambda path, data: processFile(path, data, params))
- files.readMappedFiles(mapped)
- files.read(baseDir, skip=[opt for opt, _ in mapped])
-
- if metadata.has_section('convert_js'):
- convertJS(params, files)
-
- if metadata.has_section('preprocess'):
- files.preprocess(
- [f for f, _ in metadata.items('preprocess')],
- {'needsExt': True}
- )
-
- if metadata.has_section('import_locales'):
- importGeckoLocales(params, files)
-
- if metadata.has_option('general', 'testScripts'):
- files['qunit/index.html'] = createScriptPage(params, 'testIndex.html.tmpl',
- ('general', 'testScripts'))
-
- if keyFile:
- certs, key = get_certificates_and_key(keyFile)
- params['developerIdentifier'] = get_developer_identifier(certs)
-
- files['lib/info.js'] = createInfoModule(params)
- files['background.html'] = createScriptPage(params, 'background.html.tmpl',
- ('general', 'backgroundScripts'))
- files['Info.plist'] = createManifest(params, files)
-
- fixAbsoluteUrls(files)
-
- dirname = metadata.get('general', 'basename') + '.safariextension'
- for filename in files.keys():
- files[os.path.join(dirname, filename)] = files.pop(filename)
-
- if not devenv and keyFile:
- createSignedXarArchive(outFile, files, certs, key)
- else:
- files.zip(outFile)
+ metadata = readMetadata(baseDir, type)
+ version = getBuildVersion(baseDir, metadata, releaseBuild, buildNum)
+
+ if not outFile:
+ outFile = getDefaultFileName(metadata, version, 'safariextz' if keyFile else 'zip')
+
+ params = {
+ 'type': type,
+ 'baseDir': baseDir,
+ 'releaseBuild': releaseBuild,
+ 'version': version,
+ 'devenv': devenv,
+ 'metadata': metadata,
+ }
+
+ mapped = metadata.items('mapping') if metadata.has_section('mapping') else []
+ files = Files(getPackageFiles(params), getIgnoredFiles(params),
+ process=lambda path, data: processFile(path, data, params))
+ files.readMappedFiles(mapped)
+ files.read(baseDir, skip=[opt for opt, _ in mapped])
+
+ if metadata.has_section('convert_js'):
+ convertJS(params, files)
+
+ if metadata.has_section('preprocess'):
+ files.preprocess(
+ [f for f, _ in metadata.items('preprocess')],
+ {'needsExt': True}
+ )
+
+ if metadata.has_section('import_locales'):
+ importGeckoLocales(params, files)
+
+ if metadata.has_option('general', 'testScripts'):
+ files['qunit/index.html'] = createScriptPage(params, 'testIndex.html.tmpl',
+ ('general', 'testScripts'))
+
+ if keyFile:
+ certs, key = get_certificates_and_key(keyFile)
+ params['developerIdentifier'] = get_developer_identifier(certs)
+
+ files['lib/info.js'] = createInfoModule(params)
+ files['background.html'] = createScriptPage(params, 'background.html.tmpl',
+ ('general', 'backgroundScripts'))
+ files['Info.plist'] = createManifest(params, files)
+
+ fixAbsoluteUrls(files)
+
+ dirname = metadata.get('general', 'basename') + '.safariextension'
+ for filename in files.keys():
+ files[os.path.join(dirname, filename)] = files.pop(filename)
+
+ if not devenv and keyFile:
+ createSignedXarArchive(outFile, files, certs, key)
+ else:
+ files.zip(outFile)
diff --git a/publicSuffixListUpdater.py b/publicSuffixListUpdater.py
index 57b8b96..5f25655 100644
--- a/publicSuffixListUpdater.py
+++ b/publicSuffixListUpdater.py
@@ -15,45 +15,48 @@ import os
import urllib
import json
+
def urlopen(url, attempts=3):
- """
- Tries to open a particular URL, retries on failure.
- """
- for i in range(attempts):
- try:
- return urllib.urlopen(url)
- except IOError, e:
- error = e
- time.sleep(5)
- raise error
+ """
+ Tries to open a particular URL, retries on failure.
+ """
+ for i in range(attempts):
+ try:
+ return urllib.urlopen(url)
+ except IOError, e:
+ error = e
+ time.sleep(5)
+ raise error
+
def getPublicSuffixList():
- """
- gets download link for a Gecko add-on from the Mozilla Addons site
- """
- suffixes = {};
- url = 'http://mxr.mozilla.org/mozilla-central/source/netwerk/dns/effective_tld_names.dat?raw=1'
- resource = urlopen(url)
-
- for line in resource:
- line = line.rstrip()
- if line.startswith("//") or "." not in line:
- continue
- if line.startswith('*.'):
- suffixes[line[2:]] = 2
- elif line.startswith('!'):
- suffixes[line[1:]] = 0
- else:
- suffixes[line] = 1
-
- return suffixes
+ """
+ gets download link for a Gecko add-on from the Mozilla Addons site
+ """
+ suffixes = {}
+ url = 'http://mxr.mozilla.org/mozilla-central/source/netwerk/dns/effective_tld_names.dat?raw=1'
+ resource = urlopen(url)
+
+ for line in resource:
+ line = line.rstrip()
+ if line.startswith("//") or "." not in line:
+ continue
+ if line.startswith('*.'):
+ suffixes[line[2:]] = 2
+ elif line.startswith('!'):
+ suffixes[line[1:]] = 0
+ else:
+ suffixes[line] = 1
+
+ return suffixes
+
def updatePSL(baseDir):
- """
- writes the current public suffix list to js file in json format
- """
-
- psl = getPublicSuffixList()
- file = open(os.path.join(baseDir, 'lib', 'publicSuffixList.js'), 'w')
- print >>file, 'var publicSuffixes = ' + json.dumps(psl, sort_keys=True, indent=4, separators=(',', ': ')) + ';'
- file.close()
+ """
+ writes the current public suffix list to js file in json format
+ """
+
+ psl = getPublicSuffixList()
+ file = open(os.path.join(baseDir, 'lib', 'publicSuffixList.js'), 'w')
+ print >>file, 'var publicSuffixes = ' + json.dumps(psl, sort_keys=True, indent=4, separators=(',', ': ')) + ';'
+ file.close()
diff --git a/releaseAutomation.py b/releaseAutomation.py
index bffe8b6..3b1e277 100644
--- a/releaseAutomation.py
+++ b/releaseAutomation.py
@@ -4,108 +4,116 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-import os, re, codecs, subprocess, tarfile, json
+import os
+import re
+import codecs
+import subprocess
+import tarfile
+import json
+
def get_dependencies(prefix, repos):
- from ensure_dependencies import read_deps, safe_join
- repo = repos[prefix]
- deps = read_deps(repo)
- if deps:
- for subpath in deps:
- if subpath.startswith('_'):
- continue
- depprefix = prefix + subpath + '/'
- deppath = safe_join(repo, subpath)
- repos[depprefix] = deppath
- get_dependencies(depprefix, repos)
+ from ensure_dependencies import read_deps, safe_join
+ repo = repos[prefix]
+ deps = read_deps(repo)
+ if deps:
+ for subpath in deps:
+ if subpath.startswith('_'):
+ continue
+ depprefix = prefix + subpath + '/'
+ deppath = safe_join(repo, subpath)
+ repos[depprefix] = deppath
+ get_dependencies(depprefix, repos)
+
def create_sourcearchive(repo, output):
- with tarfile.open(output, mode='w:gz') as archive:
- repos = {'': repo}
- get_dependencies('', repos)
- for prefix, path in repos.iteritems():
- process = subprocess.Popen(['hg', 'archive', '-R', path, '-t', 'tar', '-S', '-'], stdout=subprocess.PIPE)
- try:
- with tarfile.open(fileobj=process.stdout, mode='r|') as repoarchive:
- for fileinfo in repoarchive:
- if os.path.basename(fileinfo.name) in ('.hgtags', '.hgignore'):
- continue
- filedata = repoarchive.extractfile(fileinfo)
- fileinfo.name = re.sub(r'^[^/]+/', prefix, fileinfo.name)
- archive.addfile(fileinfo, filedata)
- finally:
- process.stdout.close()
- process.wait()
+ with tarfile.open(output, mode='w:gz') as archive:
+ repos = {'': repo}
+ get_dependencies('', repos)
+ for prefix, path in repos.iteritems():
+ process = subprocess.Popen(['hg', 'archive', '-R', path, '-t', 'tar', '-S', '-'], stdout=subprocess.PIPE)
+ try:
+ with tarfile.open(fileobj=process.stdout, mode='r|') as repoarchive:
+ for fileinfo in repoarchive:
+ if os.path.basename(fileinfo.name) in ('.hgtags', '.hgignore'):
+ continue
+ filedata = repoarchive.extractfile(fileinfo)
+ fileinfo.name = re.sub(r'^[^/]+/', prefix, fileinfo.name)
+ archive.addfile(fileinfo, filedata)
+ finally:
+ process.stdout.close()
+ process.wait()
+
def run(baseDir, type, version, keyFiles, downloadsRepo):
- if type == "gecko":
- import buildtools.packagerGecko as packager
- elif type == "chrome":
- import buildtools.packagerChrome as packager
-
- # Replace version number in metadata file "manually", ConfigParser will mess
- # up the order of lines.
- metadata = packager.readMetadata(baseDir, type)
- with open(metadata.option_source("general", "version"), 'r+b') as file:
- rawMetadata = file.read()
- rawMetadata = re.sub(
- r'^(\s*version\s*=\s*).*', r'\g<1>%s' % version,
- rawMetadata, flags=re.I | re.M
- )
-
- file.seek(0)
- file.write(rawMetadata)
- file.truncate()
-
- # Read extension name from locale data
- import buildtools.packagerGecko as packagerGecko
- if type == "gecko":
- locales_base = baseDir
- else:
- # This is somewhat of a hack but reading out locale import config here would be too much
- locales_base = os.path.join(baseDir, "adblockplus")
-
- locales = packagerGecko.readLocaleMetadata(locales_base, [packagerGecko.defaultLocale])
- extensionName = locales[packagerGecko.defaultLocale]['name']
-
- # Now commit the change and tag it
- subprocess.check_call(['hg', 'commit', '-R', baseDir, '-m', 'Releasing %s %s' % (extensionName, version)])
- subprocess.check_call(['hg', 'tag', '-R', baseDir, '-f', version])
-
- # Create a release build
- downloads = []
- if type == "gecko":
- keyFile = keyFiles[0] if keyFiles else None
- metadata = packager.readMetadata(baseDir, type)
- buildPath = os.path.join(downloadsRepo, packager.getDefaultFileName(metadata, version, 'xpi'))
- packager.createBuild(baseDir, type=type, outFile=buildPath, releaseBuild=True, keyFile=keyFile)
- downloads.append(buildPath)
- elif type == "chrome":
- # We actually have to create three different builds: signed and unsigned
- # Chrome builds (the latter for Chrome Web Store), and a signed Safari build.
+ if type == "gecko":
+ import buildtools.packagerGecko as packager
+ elif type == "chrome":
+ import buildtools.packagerChrome as packager
+
+ # Replace version number in metadata file "manually", ConfigParser will mess
+ # up the order of lines.
metadata = packager.readMetadata(baseDir, type)
- buildPath = os.path.join(downloadsRepo, packager.getDefaultFileName(metadata, version, 'crx'))
- packager.createBuild(baseDir, type=type, outFile=buildPath, releaseBuild=True, keyFile=keyFiles[0])
- downloads.append(buildPath)
-
- buildPathUnsigned = os.path.join(baseDir, packager.getDefaultFileName(metadata, version, 'zip'))
- packager.createBuild(baseDir, type=type, outFile=buildPathUnsigned, releaseBuild=True, keyFile=None)
-
- import buildtools.packagerSafari as packagerSafari
- metadataSafari = packagerSafari.readMetadata(baseDir, "safari")
- buildPathSafari = os.path.join(downloadsRepo, packagerSafari.getDefaultFileName(metadataSafari, version, 'safariextz'))
- packagerSafari.createBuild(baseDir, type="safari", outFile=buildPathSafari, releaseBuild=True, keyFile=keyFiles[1])
- downloads.append(buildPathSafari)
-
- # Create source archive
- archivePath = os.path.splitext(buildPath)[0] + '-source.tgz'
- create_sourcearchive(baseDir, archivePath)
- downloads.append(archivePath)
-
- # Now add the downloads and commit
- subprocess.check_call(['hg', 'add', '-R', downloadsRepo] + downloads)
- subprocess.check_call(['hg', 'commit', '-R', downloadsRepo, '-m', 'Releasing %s %s' % (extensionName, version)])
-
- # Push all changes
- subprocess.check_call(['hg', 'push', '-R', baseDir])
- subprocess.check_call(['hg', 'push', '-R', downloadsRepo])
+ with open(metadata.option_source("general", "version"), 'r+b') as file:
+ rawMetadata = file.read()
+ rawMetadata = re.sub(
+ r'^(\s*version\s*=\s*).*', r'\g<1>%s' % version,
+ rawMetadata, flags=re.I | re.M
+ )
+
+ file.seek(0)
+ file.write(rawMetadata)
+ file.truncate()
+
+ # Read extension name from locale data
+ import buildtools.packagerGecko as packagerGecko
+ if type == "gecko":
+ locales_base = baseDir
+ else:
+ # This is somewhat of a hack but reading out locale import config here would be too much
+ locales_base = os.path.join(baseDir, "adblockplus")
+
+ locales = packagerGecko.readLocaleMetadata(locales_base, [packagerGecko.defaultLocale])
+ extensionName = locales[packagerGecko.defaultLocale]['name']
+
+ # Now commit the change and tag it
+ subprocess.check_call(['hg', 'commit', '-R', baseDir, '-m', 'Releasing %s %s' % (extensionName, version)])
+ subprocess.check_call(['hg', 'tag', '-R', baseDir, '-f', version])
+
+ # Create a release build
+ downloads = []
+ if type == "gecko":
+ keyFile = keyFiles[0] if keyFiles else None
+ metadata = packager.readMetadata(baseDir, type)
+ buildPath = os.path.join(downloadsRepo, packager.getDefaultFileName(metadata, version, 'xpi'))
+ packager.createBuild(baseDir, type=type, outFile=buildPath, releaseBuild=True, keyFile=keyFile)
+ downloads.append(buildPath)
+ elif type == "chrome":
+ # We actually have to create three different builds: signed and unsigned
+ # Chrome builds (the latter for Chrome Web Store), and a signed Safari build.
+ metadata = packager.readMetadata(baseDir, type)
+ buildPath = os.path.join(downloadsRepo, packager.getDefaultFileName(metadata, version, 'crx'))
+ packager.createBuild(baseDir, type=type, outFile=buildPath, releaseBuild=True, keyFile=keyFiles[0])
+ downloads.append(buildPath)
+
+ buildPathUnsigned = os.path.join(baseDir, packager.getDefaultFileName(metadata, version, 'zip'))
+ packager.createBuild(baseDir, type=type, outFile=buildPathUnsigned, releaseBuild=True, keyFile=None)
+
+ import buildtools.packagerSafari as packagerSafari
+ metadataSafari = packagerSafari.readMetadata(baseDir, "safari")
+ buildPathSafari = os.path.join(downloadsRepo, packagerSafari.getDefaultFileName(metadataSafari, version, 'safariextz'))
+ packagerSafari.createBuild(baseDir, type="safari", outFile=buildPathSafari, releaseBuild=True, keyFile=keyFiles[1])
+ downloads.append(buildPathSafari)
+
+ # Create source archive
+ archivePath = os.path.splitext(buildPath)[0] + '-source.tgz'
+ create_sourcearchive(baseDir, archivePath)
+ downloads.append(archivePath)
+
+ # Now add the downloads and commit
+ subprocess.check_call(['hg', 'add', '-R', downloadsRepo] + downloads)
+ subprocess.check_call(['hg', 'commit', '-R', downloadsRepo, '-m', 'Releasing %s %s' % (extensionName, version)])
+
+ # Push all changes
+ subprocess.check_call(['hg', 'push', '-R', baseDir])
+ subprocess.check_call(['hg', 'push', '-R', downloadsRepo])
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-mozext/adblock-plus.git
More information about the Pkg-mozext-commits
mailing list