[Pkg-mozext-commits] [adblock-plus] 03/03: Revert "Restore buildtools as of 2.6.9"
David Prévot
taffit at moszumanska.debian.org
Tue Jun 9 19:17:24 UTC 2015
This is an automated email from the git hooks/post-receive script.
taffit pushed a commit to branch master
in repository adblock-plus.
commit 25da8317ed53b6d086126740f0c329bf4ce568c2
Author: David Prévot <taffit at debian.org>
Date: Tue Jun 9 14:22:43 2015 -0400
Revert "Restore buildtools as of 2.6.9"
This reverts commit f0cb4cc639e0c7c442ab9e19ba6a10097e69495c.
Git-Dch: Ignore
---
buildtools/build.py | 14 ++++++---
buildtools/ensure_dependencies.py | 64 ++++++++++++++++++++++++++++++++-------
buildtools/lib/prefs.js | 32 ++++++++++++--------
buildtools/packagerGecko.py | 6 ++--
4 files changed, 85 insertions(+), 31 deletions(-)
diff --git a/buildtools/build.py b/buildtools/build.py
index 3d9483f..4927826 100644
--- a/buildtools/build.py
+++ b/buildtools/build.py
@@ -369,10 +369,14 @@ def generateDocs(baseDir, scriptName, opts, args, type):
return
targetDir = args[0]
- command = ['jsdoc',
- '--destination', targetDir,
- '--access', 'all',
- os.path.join(baseDir, 'lib')]
+ source_dir = os.path.join(baseDir, 'lib')
+ sources = [source_dir]
+
+ # JSDoc struggles wih huge objects: https://github.com/jsdoc3/jsdoc/issues/976
+ if type == 'chrome':
+ sources = [os.path.join(source_dir, filename) for filename in os.listdir(source_dir) if filename != 'publicSuffixList.js']
+
+ command = ['jsdoc', '--destination', targetDir, '--access', 'all'] + sources
if any(opt in ('-q', '--quiet') for opt, _ in opts):
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = process.communicate()[1]
@@ -483,7 +487,7 @@ with addCommand(generateDocs, 'docs') as command:
command.description = 'Generate documentation files and write them into the specified directory. This operation requires JsDoc 3 to be installed.'
command.addOption('Suppress JsDoc output', short='q', long='quiet')
command.params = '[options] <directory>'
- command.supportedTypes = ('gecko')
+ command.supportedTypes = ('gecko', 'chrome')
with addCommand(runReleaseAutomation, 'release') as command:
command.shortDescription = 'Run release automation'
diff --git a/buildtools/ensure_dependencies.py b/buildtools/ensure_dependencies.py
index d8af9e7..cec2bbe 100755
--- a/buildtools/ensure_dependencies.py
+++ b/buildtools/ensure_dependencies.py
@@ -34,6 +34,10 @@ A dependencies file should look like this:
buildtools = buildtools hg:016d16f7137b git:f3f8692f82e5
"""
+SKIP_DEPENDENCY_UPDATES = os.environ.get(
+ "SKIP_DEPENDENCY_UPDATES", ""
+).lower() not in ("", "0", "false")
+
class Mercurial():
def istype(self, repodir):
return os.path.exists(os.path.join(repodir, ".hg"))
@@ -79,6 +83,9 @@ class Mercurial():
module = os.path.relpath(target, repo)
_ensure_line_exists(ignore_path, module)
+ def postprocess_url(self, url):
+ return url
+
class Git():
def istype(self, repodir):
return os.path.exists(os.path.join(repodir, ".git"))
@@ -94,7 +101,20 @@ class Git():
return subprocess.check_output(command, cwd=repo).strip()
def pull(self, repo):
+ # Fetch tracked branches, new tags and the list of available remote branches
subprocess.check_call(["git", "fetch", "--quiet", "--all", "--tags"], cwd=repo)
+ # Next we need to ensure all remote branches are tracked
+ newly_tracked = False
+ remotes = subprocess.check_output(["git", "branch", "--remotes"], cwd=repo)
+ for match in re.finditer(r"^\s*(origin/(\S+))$", remotes, re.M):
+ remote, local = match.groups()
+ with open(os.devnull, "wb") as devnull:
+ if subprocess.call(["git", "branch", "--track", local, remote],
+ cwd=repo, stdout=devnull, stderr=devnull) == 0:
+ newly_tracked = True
+ # Finally fetch any newly tracked remote branches
+ if newly_tracked:
+ subprocess.check_call(["git", "fetch", "--quiet", "origin"], cwd=repo)
def update(self, repo, rev):
subprocess.check_call(["git", "checkout", "--quiet", rev], cwd=repo)
@@ -104,6 +124,12 @@ class Git():
exclude_file = os.path.join(repo, ".git", "info", "exclude")
_ensure_line_exists(exclude_file, module)
+ def postprocess_url(self, url):
+ # Handle alternative syntax of SSH URLS
+ if "@" in url and ":" in url and not urlparse.urlsplit(url).scheme:
+ return "ssh://" + url.replace(":", "/", 1)
+ return url
+
repo_types = OrderedDict((
("hg", Mercurial()),
("git", Git()),
@@ -158,7 +184,7 @@ def read_deps(repodir):
def safe_join(path, subpath):
# This has been inspired by Flask's safe_join() function
- forbidden = set([os.sep, os.altsep]) - set([posixpath.sep, None])
+ forbidden = {os.sep, os.altsep} - {posixpath.sep, None}
if any(sep in subpath for sep in forbidden):
raise Exception("Illegal directory separator in dependency path %s" % subpath)
@@ -179,6 +205,11 @@ def ensure_repo(parentrepo, target, roots, sourcename):
if os.path.exists(target):
return
+ if SKIP_DEPENDENCY_UPDATES:
+ logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
+ "%s not cloned", target)
+ return
+
parenttype = get_repo_type(parentrepo)
type = None
for key in roots:
@@ -187,10 +218,14 @@ def ensure_repo(parentrepo, target, roots, sourcename):
if type is None:
raise Exception("No valid source found to create %s" % target)
- if os.path.exists(roots[type]):
- url = os.path.join(roots[type], sourcename)
+ postprocess_url = repo_types[type].postprocess_url
+ root = postprocess_url(roots[type])
+ sourcename = postprocess_url(sourcename)
+
+ if os.path.exists(root):
+ url = os.path.join(root, sourcename)
else:
- url = urlparse.urljoin(roots[type], sourcename)
+ url = urlparse.urljoin(root, sourcename)
logging.info("Cloning repository %s into %s" % (url, target))
repo_types[type].clone(url, target)
@@ -214,15 +249,21 @@ def update_repo(target, revisions):
return
resolved_revision = repo_types[type].get_revision_id(target, revision)
- if not resolved_revision:
- logging.info("Revision %s is unknown, downloading remote changes" % revision)
- repo_types[type].pull(target)
- resolved_revision = repo_types[type].get_revision_id(target, revision)
- if not resolved_revision:
- raise Exception("Failed to resolve revision %s" % revision)
-
current_revision = repo_types[type].get_revision_id(target)
+
if resolved_revision != current_revision:
+ if SKIP_DEPENDENCY_UPDATES:
+ logging.warning("SKIP_DEPENDENCY_UPDATES environment variable set, "
+ "%s not checked out to %s", target, revision)
+ return
+
+ if not resolved_revision:
+ logging.info("Revision %s is unknown, downloading remote changes" % revision)
+ repo_types[type].pull(target)
+ resolved_revision = repo_types[type].get_revision_id(target, revision)
+ if not resolved_revision:
+ raise Exception("Failed to resolve revision %s" % revision)
+
logging.info("Updating repository %s to revision %s" % (target, resolved_revision))
repo_types[type].update(target, resolved_revision)
@@ -234,6 +275,7 @@ def resolve_deps(repodir, level=0, self_update=True, overrideroots=None, skipdep
return
if level >= 10:
logging.warning("Too much subrepository nesting, ignoring %s" % repo)
+ return
if overrideroots is not None:
config["_root"] = overrideroots
diff --git a/buildtools/lib/prefs.js b/buildtools/lib/prefs.js
index ab1cc5c..ff5e181 100644
--- a/buildtools/lib/prefs.js
+++ b/buildtools/lib/prefs.js
@@ -8,29 +8,37 @@ Cu.import("resource://gre/modules/XPCOMUtils.jsm");
let {addonRoot, addonName} = require("info");
let branchName = "extensions." + addonName + ".";
let branch = Services.prefs.getBranch(branchName);
+let preconfiguredBranch =
+ Services.prefs.getBranch(branchName + "preconfigured.");
let ignorePrefChanges = false;
function init()
{
// Load default preferences and set up properties for them
let defaultBranch = Services.prefs.getDefaultBranch(branchName);
- let scope =
+
+ let request = new XMLHttpRequest();
+ request.open("GET", addonRoot + "defaults/prefs.json", false);
+ request.responseType = "json";
+ request.send();
+
+ let defaults = request.response.defaults;
+ let preconfigurable = new Set(request.response.preconfigurable);
+ for (let pref in defaults)
{
- pref: function(pref, value)
+ let value = defaults[pref];
+ let [getter, setter] = typeMap[typeof value];
+ if (preconfigurable.has(pref))
{
- if (pref.substr(0, branchName.length) != branchName)
+ try
{
- Cu.reportError(new Error("Ignoring default preference " + pref + ", wrong branch."));
- return;
+ value = getter(preconfiguredBranch, pref);
}
- pref = pref.substr(branchName.length);
-
- let [getter, setter] = typeMap[typeof value];
- setter(defaultBranch, pref, value);
- defineProperty(pref, false, getter, setter);
+ catch (e) {}
}
- };
- Services.scriptloader.loadSubScript(addonRoot + "defaults/prefs.js", scope);
+ setter(defaultBranch, pref, value);
+ defineProperty(pref, false, getter, setter);
+ }
// Add preference change observer
try
diff --git a/buildtools/packagerGecko.py b/buildtools/packagerGecko.py
index 72ec7ec..748bf28 100644
--- a/buildtools/packagerGecko.py
+++ b/buildtools/packagerGecko.py
@@ -32,6 +32,7 @@ KNOWN_APPS = {
'songbird': 'songbird at songbirdnest.com',
'thunderbird': '{3550f703-e582-4d05-9a08-453d09bdfdc6}',
'toolkit': 'toolkit at mozilla.org',
+ 'adblockbrowser': '{55aba3ac-94d3-41a8-9e25-5c21fe874539}',
}
defaultLocale = 'en-US'
@@ -230,9 +231,6 @@ def addMissingFiles(params, files):
templateData['hasChromeRequires'] = True
if name.startswith('lib/') and re.search(r'\bXMLHttpRequest\b', content):
templateData['hasXMLHttpRequest'] = True
- if name == 'defaults/prefs.js':
- if re.search(r'\.currentVersion"', content):
- templateData['hasVersionPref'] = True
if not '/' in name or name.startswith('lib/'):
if re.search(r'(?:^|\s)onShutdown\.', content):
templateData['hasShutdownHandlers'] = True
@@ -240,6 +238,8 @@ def addMissingFiles(params, files):
for name, content in files.iteritems():
if name == 'chrome.manifest':
templateData['hasChrome'] = True
+ elif name == 'defaults/prefs.json':
+ templateData['hasVersionPref'] = 'currentVersion' in json.loads(content).get('defaults', {})
elif name.endswith('.js'):
checkScript(name)
elif name.endswith('.xul'):
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-mozext/adblock-plus.git
More information about the Pkg-mozext-commits
mailing list