[Pkg-bazaar-commits] r126 ./bzr-builddeb/people/jdw/merge_upstream: Start work on import_dsc, which creates a branch from a set of source packages.
James Westby
jw+debian at jameswestby.net
Sun Jun 24 21:21:25 UTC 2007
------------------------------------------------------------
revno: 126
committer: James Westby <jw+debian at jameswestby.net>
branch nick: merge_upstream
timestamp: Sun 2007-06-24 22:21:25 +0100
message:
Start work on import_dsc, which creates a branch from a set of source packages.
The set of uploaded source packages contains some history about the package,
and can be used to create a branch for packaging.
This is the start of code to do that. It can currently import the first
source package.
added:
import_dsc.py
patches.py
specs/import-dsc
tests/test_import_dsc.py
modified:
tests/__init__.py
-------------- next part --------------
=== added file 'import_dsc.py'
--- a/import_dsc.py 1970-01-01 00:00:00 +0000
+++ b/import_dsc.py 2007-06-24 21:21:25 +0000
@@ -0,0 +1,153 @@
+# import_dsc.py -- Import a series of .dsc files.
+# Copyright (C) 2007 James Westby <jw+debian at jameswestby.net>
+#
+# This file is part of bzr-builddeb.
+#
+# bzr-builddeb is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# bzr-builddeb is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with bzr-builddeb; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+#
+
+import gzip
+from StringIO import StringIO
+import os
+
+import deb822
+from debian_bundle.debian_support import Version
+
+from bzrlib.bzrdir import BzrDir
+from bzrlib.errors import FileExists
+from bzrlib import generate_ids
+from bzrlib.transform import TreeTransform
+
+from bzrlib.plugins.bzrtools.upstream_import import (import_tar,
+ common_directory,
+ )
+
+import patches
+
+def _dsc_sorter(dscname1, dscname2):
+ f1 = open(dscname1)
+ try:
+ dsc1 = deb822.Dsc(f1)
+ finally:
+ f1.close()
+ f2 = open(dscname2)
+ try:
+ dsc2 = deb822.Dsc(f2)
+ finally:
+ f2.close()
+ v1 = Version(dsc1['Version'])
+ v2 = Version(dsc2['Version'])
+ return v1 > v2
+
+
+def import_orig(tree, dsc):
+ for file_details in dsc['Files']:
+ origname = file_details['name']
+ if origname.endswith('.orig.tar.gz'):
+ f = open(origname, 'rb')
+ try:
+ import_tar(tree, f)
+ tree.commit('import upstream from %s' % (os.path.basename(origname)))
+ finally:
+ f.close()
+
+
+def import_diff(tree, dsc):
+ for file_details in dsc['Files']:
+ diffname = file_details['name']
+ if diffname.endswith('.diff.gz'):
+ f = gzip.GzipFile(diffname, 'rb')
+ try:
+ tt = TreeTransform(tree)
+ implied_parents = set()
+ def add_implied_parents(path):
+ parent = os.path.dirname(path)
+ if parent == '':
+ return
+ if parent in implied_parents:
+ return
+ implied_parents.add(parent)
+ add_implied_parents(parent)
+ patch_list = patches.parse_patches(f)
+ oldfiles = [patch.oldname for patch in patch_list]
+ newfiles = [patch.newname for patch in patch_list]
+ oldprefix = common_directory(oldfiles)
+ newprefix = common_directory(newfiles)
+ for patch in patch_list:
+ oldfilename = patch.oldname
+ newfilename = patch.newname
+ if oldprefix is not None:
+ oldfilename = oldfilename[len(oldprefix)+1:]
+ oldfilename = oldfilename.rstrip('/')
+ if oldfilename == '':
+ continue
+ if newprefix is not None:
+ newfilename = newfilename[len(newprefix)+1:]
+ newfilename = newfilename.rstrip('/')
+ if newfilename == '':
+ continue
+ oldid = tree.path2id(oldfilename)
+ if oldid is not None:
+ oldtext = StringIO(tree.get_file_text(oldid))
+ else:
+ oldtext = []
+ trans_id = tt.trans_id_tree_path(oldfilename)
+ newtext = list(patches.iter_patched(oldtext, StringIO(patch)))
+ if newtext == []:
+ tt.delete_versioned(trans_id)
+ else:
+ if oldid is not None:
+ tt.delete_contents(trans_id)
+ tt.create_file(newtext, trans_id)
+ if tt.tree_file_id(trans_id) is None:
+ name = os.path.basename(newfilename.rstrip('/'))
+ file_id = generate_ids.gen_file_id(name)
+ tt.version_file(file_id, trans_id)
+ add_implied_parents(newfilename)
+ for path in implied_parents:
+ trans_id = tt.trans_id_tree_path(path)
+ if tree.path2id(path) is None:
+ tt.create_directory(trans_id)
+ if tt.tree_file_id(trans_id) is None:
+ file_id = generate_ids.gen_file_id(name)
+ tt.version_file(file_id, trans_id)
+ tt.apply()
+ tree.commit('merge packaging changes from %s' % \
+ (os.path.basename(diffname)))
+ finally:
+ f.close()
+
+
+def import_dsc(target_dir, dsc_files):
+
+ if os.path.exists(target_dir):
+ raise FileExists(target_dir)
+ os.mkdir(target_dir)
+ branch = BzrDir.create_branch_convenience(target_dir)
+ tree = branch.bzrdir.open_workingtree()
+ tree.lock_write()
+ try:
+ dsc_files.sort(cmp=_dsc_sorter)
+ for dscname in dsc_files:
+ f = open(dscname)
+ try:
+ dsc = deb822.Dsc(f)
+ finally:
+ f.close()
+ import_orig(tree, dsc)
+ import_diff(tree, dsc)
+ finally:
+ tree.unlock()
+
=== added file 'patches.py'
--- a/patches.py 1970-01-01 00:00:00 +0000
+++ b/patches.py 2007-06-24 21:21:25 +0000
@@ -0,0 +1,418 @@
+# Copyright (C) 2004 - 2006 Aaron Bentley, Canonical Ltd
+# <aaron.bentley at utoronto.ca>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+class PatchSyntax(Exception):
+ def __init__(self, msg):
+ Exception.__init__(self, msg)
+
+
+class MalformedPatchHeader(PatchSyntax):
+ def __init__(self, desc, line):
+ self.desc = desc
+ self.line = line
+ msg = "Malformed patch header. %s\n%r" % (self.desc, self.line)
+ PatchSyntax.__init__(self, msg)
+
+
+class MalformedHunkHeader(PatchSyntax):
+ def __init__(self, desc, line):
+ self.desc = desc
+ self.line = line
+ msg = "Malformed hunk header. %s\n%r" % (self.desc, self.line)
+ PatchSyntax.__init__(self, msg)
+
+
+class MalformedLine(PatchSyntax):
+ def __init__(self, desc, line):
+ self.desc = desc
+ self.line = line
+ msg = "Malformed line. %s\n%s" % (self.desc, self.line)
+ PatchSyntax.__init__(self, msg)
+
+
+class PatchConflict(Exception):
+ def __init__(self, line_no, orig_line, patch_line):
+ orig = orig_line.rstrip('\n')
+ patch = str(patch_line).rstrip('\n')
+ msg = 'Text contents mismatch at line %d. Original has "%s",'\
+ ' but patch says it should be "%s"' % (line_no, orig, patch)
+ Exception.__init__(self, msg)
+
+
+def get_patch_names(iter_lines):
+ try:
+ line = iter_lines.next()
+ if not line.startswith("--- "):
+ raise MalformedPatchHeader("No orig name", line)
+ else:
+ tab = line.find("\t")
+ orig_name = line[4:tab].rstrip("\n")
+ except StopIteration:
+ raise MalformedPatchHeader("No orig line", "")
+ try:
+ line = iter_lines.next()
+ if not line.startswith("+++ "):
+ raise PatchSyntax("No mod name")
+ else:
+ tab = line.find("\t")
+ mod_name = line[4:tab].rstrip("\n")
+ except StopIteration:
+ raise MalformedPatchHeader("No mod line", "")
+ return (orig_name, mod_name)
+
+
+def parse_range(textrange):
+ """Parse a patch range, handling the "1" special-case
+
+ :param textrange: The text to parse
+ :type textrange: str
+ :return: the position and range, as a tuple
+ :rtype: (int, int)
+ """
+ tmp = textrange.split(',')
+ if len(tmp) == 1:
+ pos = tmp[0]
+ range = "1"
+ else:
+ (pos, range) = tmp
+ pos = int(pos)
+ range = int(range)
+ return (pos, range)
+
+
+def hunk_from_header(line):
+ if not line.startswith("@@") or not line.endswith("@@\n") \
+ or not len(line) > 4:
+ raise MalformedHunkHeader("Does not start and end with @@.", line)
+ try:
+ (orig, mod) = line[3:-4].split(" ")
+ except Exception, e:
+ raise MalformedHunkHeader(str(e), line)
+ if not orig.startswith('-') or not mod.startswith('+'):
+ raise MalformedHunkHeader("Positions don't start with + or -.", line)
+ try:
+ (orig_pos, orig_range) = parse_range(orig[1:])
+ (mod_pos, mod_range) = parse_range(mod[1:])
+ except Exception, e:
+ raise MalformedHunkHeader(str(e), line)
+ if mod_range < 0 or orig_range < 0:
+ raise MalformedHunkHeader("Hunk range is negative", line)
+ return Hunk(orig_pos, orig_range, mod_pos, mod_range)
+
+
+class HunkLine:
+ def __init__(self, contents):
+ self.contents = contents
+
+ def get_str(self, leadchar):
+ if self.contents == "\n" and leadchar == " " and False:
+ return "\n"
+ if not self.contents.endswith('\n'):
+ terminator = '\n' + NO_NL
+ else:
+ terminator = ''
+ return leadchar + self.contents + terminator
+
+
+class ContextLine(HunkLine):
+ def __init__(self, contents):
+ HunkLine.__init__(self, contents)
+
+ def __str__(self):
+ return self.get_str(" ")
+
+
+class InsertLine(HunkLine):
+ def __init__(self, contents):
+ HunkLine.__init__(self, contents)
+
+ def __str__(self):
+ return self.get_str("+")
+
+
+class RemoveLine(HunkLine):
+ def __init__(self, contents):
+ HunkLine.__init__(self, contents)
+
+ def __str__(self):
+ return self.get_str("-")
+
+NO_NL = '\\ No newline at end of file\n'
+__pychecker__="no-returnvalues"
+
+def parse_line(line):
+ if line.startswith("\n"):
+ return ContextLine(line)
+ elif line.startswith(" "):
+ return ContextLine(line[1:])
+ elif line.startswith("+"):
+ return InsertLine(line[1:])
+ elif line.startswith("-"):
+ return RemoveLine(line[1:])
+ elif line == NO_NL:
+ return NO_NL
+ else:
+ raise MalformedLine("Unknown line type", line)
+__pychecker__=""
+
+
+class Hunk:
+ def __init__(self, orig_pos, orig_range, mod_pos, mod_range):
+ self.orig_pos = orig_pos
+ self.orig_range = orig_range
+ self.mod_pos = mod_pos
+ self.mod_range = mod_range
+ self.lines = []
+
+ def get_header(self):
+ return "@@ -%s +%s @@\n" % (self.range_str(self.orig_pos,
+ self.orig_range),
+ self.range_str(self.mod_pos,
+ self.mod_range))
+
+ def range_str(self, pos, range):
+ """Return a file range, special-casing for 1-line files.
+
+ :param pos: The position in the file
+ :type pos: int
+ :range: The range in the file
+ :type range: int
+ :return: a string in the format 1,4 except when range == pos == 1
+ """
+ if range == 1:
+ return "%i" % pos
+ else:
+ return "%i,%i" % (pos, range)
+
+ def __str__(self):
+ lines = [self.get_header()]
+ for line in self.lines:
+ lines.append(str(line))
+ return "".join(lines)
+
+ def shift_to_mod(self, pos):
+ if pos < self.orig_pos-1:
+ return 0
+ elif pos > self.orig_pos+self.orig_range:
+ return self.mod_range - self.orig_range
+ else:
+ return self.shift_to_mod_lines(pos)
+
+ def shift_to_mod_lines(self, pos):
+ assert (pos >= self.orig_pos-1 and pos <= self.orig_pos+self.orig_range)
+ position = self.orig_pos-1
+ shift = 0
+ for line in self.lines:
+ if isinstance(line, InsertLine):
+ shift += 1
+ elif isinstance(line, RemoveLine):
+ if position == pos:
+ return None
+ shift -= 1
+ position += 1
+ elif isinstance(line, ContextLine):
+ position += 1
+ if position > pos:
+ break
+ return shift
+
+
+def iter_hunks(iter_lines):
+ hunk = None
+ for line in iter_lines:
+ if line == "\n":
+ if hunk is not None:
+ yield hunk
+ hunk = None
+ continue
+ if hunk is not None:
+ yield hunk
+ hunk = hunk_from_header(line)
+ orig_size = 0
+ mod_size = 0
+ while orig_size < hunk.orig_range or mod_size < hunk.mod_range:
+ hunk_line = parse_line(iter_lines.next())
+ hunk.lines.append(hunk_line)
+ if isinstance(hunk_line, (RemoveLine, ContextLine)):
+ orig_size += 1
+ if isinstance(hunk_line, (InsertLine, ContextLine)):
+ mod_size += 1
+ if hunk is not None:
+ yield hunk
+
+
+class Patch:
+ def __init__(self, oldname, newname):
+ self.oldname = oldname
+ self.newname = newname
+ self.hunks = []
+
+ def __str__(self):
+ ret = self.get_header()
+ ret += "".join([str(h) for h in self.hunks])
+ return ret
+
+ def get_header(self):
+ return "--- %s\n+++ %s\n" % (self.oldname, self.newname)
+
+ def stats_str(self):
+ """Return a string of patch statistics"""
+ removes = 0
+ inserts = 0
+ for hunk in self.hunks:
+ for line in hunk.lines:
+ if isinstance(line, InsertLine):
+ inserts+=1;
+ elif isinstance(line, RemoveLine):
+ removes+=1;
+ return "%i inserts, %i removes in %i hunks" % \
+ (inserts, removes, len(self.hunks))
+
+ def pos_in_mod(self, position):
+ newpos = position
+ for hunk in self.hunks:
+ shift = hunk.shift_to_mod(position)
+ if shift is None:
+ return None
+ newpos += shift
+ return newpos
+
+ def iter_inserted(self):
+ """Iteraties through inserted lines
+
+ :return: Pair of line number, line
+ :rtype: iterator of (int, InsertLine)
+ """
+ for hunk in self.hunks:
+ pos = hunk.mod_pos - 1;
+ for line in hunk.lines:
+ if isinstance(line, InsertLine):
+ yield (pos, line)
+ pos += 1
+ if isinstance(line, ContextLine):
+ pos += 1
+
+
+def parse_patch(iter_lines):
+ (orig_name, mod_name) = get_patch_names(iter_lines)
+ patch = Patch(orig_name, mod_name)
+ for hunk in iter_hunks(iter_lines):
+ patch.hunks.append(hunk)
+ return patch
+
+
+def iter_file_patch(iter_lines):
+ saved_lines = []
+ orig_range = 0
+ for line in iter_lines:
+ if line.startswith('=== ') or line.startswith('*** '):
+ continue
+ if line.startswith('#'):
+ continue
+ if line.startswith('diff '):
+ continue
+ elif orig_range > 0:
+ if line.startswith('-') or line.startswith(' '):
+ orig_range -= 1
+ elif line.startswith('--- '):
+ if len(saved_lines) > 0:
+ yield saved_lines
+ saved_lines = []
+ elif line.startswith('@@'):
+ hunk = hunk_from_header(line)
+ orig_range = hunk.orig_range
+ saved_lines.append(line)
+ if len(saved_lines) > 0:
+ yield saved_lines
+
+
+def iter_lines_handle_nl(iter_lines):
+ """
+ Iterates through lines, ensuring that lines that originally had no
+ terminating \n are produced without one. This transformation may be
+ applied at any point up until hunk line parsing, and is safe to apply
+ repeatedly.
+ """
+ last_line = None
+ for line in iter_lines:
+ if line == NO_NL:
+ assert last_line.endswith('\n')
+ last_line = last_line[:-1]
+ line = None
+ if last_line is not None:
+ yield last_line
+ last_line = line
+ if last_line is not None:
+ yield last_line
+
+
+def parse_patches(iter_lines):
+ iter_lines = iter_lines_handle_nl(iter_lines)
+ return [parse_patch(f.__iter__()) for f in iter_file_patch(iter_lines)]
+
+
+def difference_index(atext, btext):
+ """Find the indext of the first character that differs between two texts
+
+ :param atext: The first text
+ :type atext: str
+ :param btext: The second text
+ :type str: str
+ :return: The index, or None if there are no differences within the range
+ :rtype: int or NoneType
+ """
+ length = len(atext)
+ if len(btext) < length:
+ length = len(btext)
+ for i in range(length):
+ if atext[i] != btext[i]:
+ return i;
+ return None
+
+
+def iter_patched(orig_lines, patch_lines):
+ """Iterate through a series of lines with a patch applied.
+ This handles a single file, and does exact, not fuzzy patching.
+ """
+ if orig_lines is not None:
+ orig_lines = orig_lines.__iter__()
+ seen_patch = []
+ patch_lines = iter_lines_handle_nl(patch_lines.__iter__())
+ get_patch_names(patch_lines)
+ line_no = 1
+ for hunk in iter_hunks(patch_lines):
+ while line_no < hunk.orig_pos:
+ orig_line = orig_lines.next()
+ yield orig_line
+ line_no += 1
+ for hunk_line in hunk.lines:
+ seen_patch.append(str(hunk_line))
+ if isinstance(hunk_line, InsertLine):
+ yield hunk_line.contents
+ elif isinstance(hunk_line, (ContextLine, RemoveLine)):
+ orig_line = orig_lines.next()
+ if orig_line != hunk_line.contents:
+ raise PatchConflict(line_no, orig_line, "".join(seen_patch))
+ if isinstance(hunk_line, ContextLine):
+ yield orig_line
+ else:
+ assert isinstance(hunk_line, RemoveLine)
+ line_no += 1
+ if orig_lines is not None:
+ for line in orig_lines:
+ yield line
=== added file 'specs/import-dsc'
--- a/specs/import-dsc 1970-01-01 00:00:00 +0000
+++ b/specs/import-dsc 2007-06-24 21:21:25 +0000
@@ -0,0 +1,71 @@
+Building a remote branch
+========================
+
+Status: Draft
+
+Aim
+---
+
+When a package has not been maintained in a VCS before the history information
+is in the source packages. There will generally be a series of these for each
+package.
+
+The plugin should support creating a branch from these files.
+
+It would also be good if the past versions could be obtained from
+snapshot.debian.net or similar.
+
+Design
+------
+
+If there are multiple .dscs provided then they should be sorted in to ascending
+version number. Then for each it should be imported. If it has an
+.orig.tar.gz then it should be imported first on to the upstream branch,
+adding a tag for use with new-upstream-release-handling. Then the .diff.gz
+should be applied, and the result commited on the packaging branch as a merge
+with the upstream.
+
+If there is no .orig.tar.gz the tree should be set at the corresponding
+upstream import and the diff applied on to the tree. However the commit should
+have a single parent of the last commit on the packaging branch.
+
+Each revision on the packaging branch should be tagged to match the tag
+scheme used for uploads.
+
+The revision graph will evolve like this, starting from an empty branch.
+
+ upstream
+
+ packaging
+
+Import the first upstream
+
+ upstream ---1
+
+ packaging
+
+Add the packaging diff
+
+ upstream ---1
+ \
+ packaging 1.1
+
+Add the next packaging diff.
+
+ upstream ---1
+ \
+ packaging 1.1---1.2
+
+Then import the next upstream, using the diff to provide the merge commit.
+
+ upstream ---1-----------2
+ \ \
+ packaging 1.1---1.2---2.1
+
+and continue on like that until all is imported.
+
+There should be no conflicts, as the merge commits aren't done as merges,
+the second parent (along the packaging branch) is just added to represent the
+history. In reality the tree of that commit is just the result of applying the
+.diff.gz to the .orig.tar.gz, i.e. the package.
+
=== added file 'tests/test_import_dsc.py'
--- a/tests/test_import_dsc.py 1970-01-01 00:00:00 +0000
+++ b/tests/test_import_dsc.py 2007-06-24 21:21:25 +0000
@@ -0,0 +1,148 @@
+# test_import_dsc.py -- Test importing .dsc files.
+# Copyright (C) 2007 James Westby <jw+debian at jameswestby.net>
+#
+# This file is part of bzr-builddeb.
+#
+# bzr-builddeb is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# bzr-builddeb is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with bzr-builddeb; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+#
+
+import os
+import shutil
+import tarfile
+
+from bzrlib.errors import FileExists
+from bzrlib.tests import TestCaseWithTransport
+from bzrlib.workingtree import WorkingTree
+
+from import_dsc import import_dsc
+
+def write_to_file(filename, contents):
+ f = open(filename, 'wb')
+ try:
+ f.write(contents)
+ finally:
+ f.close()
+
+class TestImportDsc(TestCaseWithTransport):
+
+ basedir = 'package'
+ target = 'target'
+ orig_1 = 'package_0.1.orig.tar.gz'
+ diff_1 = 'package_0.1-1.diff.gz'
+ dsc_1 = 'package_0.1-1.dsc'
+
+ def make_base_package(self):
+ os.mkdir(self.basedir)
+ write_to_file(os.path.join(self.basedir, 'README'), 'hello\n')
+ write_to_file(os.path.join(self.basedir, 'CHANGELOG'), 'version 1\n')
+ write_to_file(os.path.join(self.basedir, 'Makefile'), 'bad command\n')
+
+ def make_orig_1(self):
+ self.make_base_package()
+ tar = tarfile.open(self.orig_1, 'w:gz')
+ try:
+ tar.add(self.basedir)
+ finally:
+ tar.close()
+
+ def make_diff_1(self):
+ diffdir = 'package-0.1'
+ shutil.copytree(self.basedir, diffdir)
+ os.mkdir(os.path.join(diffdir, 'debian'))
+ write_to_file(os.path.join(diffdir, 'debian', 'changelog'),
+ 'version 1-1\n')
+ write_to_file(os.path.join(diffdir, 'Makefile'), 'good command\n')
+ os.system('diff -Nru %s %s | gzip -9 - > %s' % (self.basedir, diffdir,
+ self.diff_1))
+ shutil.rmtree(diffdir)
+
+ def make_dsc_1(self):
+ self.make_orig_1()
+ self.make_diff_1()
+ write_to_file(self.dsc_1, """Format: 1.0
+Source: package
+Version: 0.1-1
+Binary: package
+Maintainer: maintainer <maint at maint.org>
+Architecture: any
+Standards-Version: 3.7.2
+Build-Depends: debhelper (>= 5.0.0)
+Files:
+ 8636a3e8ae81664bac70158503aaf53a 1328218 %s
+ 1acd97ad70445afd5f2a64858296f21c 20709 %s
+""" % (self.orig_1, self.diff_1))
+
+ def import_dsc_1(self):
+ self.make_dsc_1()
+ import_dsc(self.target, [self.dsc_1])
+
+ def test_import_dsc_target_extant(self):
+ os.mkdir(self.target)
+ write_to_file('package_0.1.dsc', '')
+ self.assertRaises(FileExists, import_dsc, self.target, ['package_0.1.dsc'])
+
+ def test_import_one_dsc_tree(self):
+ self.import_dsc_1()
+ self.failUnlessExists(self.target)
+ tree = WorkingTree.open_containing(self.target)[0]
+ tree.lock_read()
+ expected_inv = ['README', 'CHANGELOG', 'Makefile', 'debian/',
+ 'debian/changelog']
+ try:
+ self.check_inventory_shape(tree.inventory, expected_inv)
+ finally:
+ tree.unlock()
+ for path in expected_inv:
+ self.failUnlessExists(os.path.join(self.target, path))
+ f = open(os.path.join(self.target, 'Makefile'))
+ try:
+ contents = f.read()
+ finally:
+ f.close()
+ self.assertEqual(contents, 'good command\n')
+ f = open(os.path.join(self.target, 'debian', 'changelog'))
+ try:
+ contents = f.read()
+ finally:
+ f.close()
+ self.assertEqual(contents, 'version 1-1\n')
+ self.assertEqual(tree.changes_from(tree.basis_tree()).has_changed(),
+ False)
+
+ def test_import_one_dsc_history(self):
+ self.import_dsc_1()
+ tree = WorkingTree.open_containing(self.target)[0]
+ rh = tree.branch.revision_history()
+ self.assertEqual(len(rh), 2)
+ msg = tree.branch.repository.get_revision(rh[0]).message
+ self.assertEqual(msg, 'import upstream from %s' % self.orig_1)
+ msg = tree.branch.repository.get_revision(rh[1]).message
+ self.assertEqual(msg, 'merge packaging changes from %s' % self.diff_1)
+ changes = tree.changes_from(tree.branch.repository.revision_tree(rh[0]))
+ added = changes.added
+ self.assertEqual(len(added), 2)
+ self.assertEqual(added[0][0], 'debian')
+ self.assertEqual(added[0][2], 'directory')
+ self.assertEqual(added[1][0], 'debian/changelog')
+ self.assertEqual(added[1][2], 'file')
+ self.assertEqual(len(changes.removed), 0)
+ self.assertEqual(len(changes.renamed), 0)
+ modified = changes.modified
+ self.assertEqual(len(modified), 1)
+ self.assertEqual(modified[0][0], 'Makefile')
+ self.assertEqual(modified[0][2], 'file')
+ self.assertEqual(modified[0][3], True)
+ self.assertEqual(modified[0][4], False)
+
=== modified file 'tests/__init__.py'
--- a/tests/__init__.py 2007-06-23 18:15:14 +0000
+++ b/tests/__init__.py 2007-06-24 21:21:25 +0000
@@ -135,6 +135,7 @@
testmod_names = [
'test_builder',
'test_config',
+ 'test_import_dsc',
'test_repack_tarball_extra',
'test_util',
]
More information about the Pkg-bazaar-commits
mailing list