[debhelper-devel] [debhelper] 04/04: dh_compress: Run in parallel
Niels Thykier
nthykier at moszumanska.debian.org
Sun Jun 4 19:15:13 UTC 2017
This is an automated email from the git hooks/post-receive script.
nthykier pushed a commit to branch parallel_dh_tools
in repository debhelper.
commit 58bacc6122b3ab2c2ff54be864145c5c7bbd8787
Author: Niels Thykier <niels at thykier.net>
Date: Sun Jun 4 19:09:05 2017 +0000
dh_compress: Run in parallel
---
dh_compress | 270 ++++++++++++++++++++++++++++++------------------------------
1 file changed, 133 insertions(+), 137 deletions(-)
diff --git a/dh_compress b/dh_compress
index dfb076e..49ca526 100755
--- a/dh_compress
+++ b/dh_compress
@@ -77,154 +77,150 @@ Debian policy, version 3.0
init();
-my $olddir;
-
-foreach my $package (@{$dh{DOPACKAGES}}) {
- my $tmp=tmpdir($package);
-
- my $compress=pkgfile($package,"compress");
-
- # Run the file name gathering commands from within the directory
- # structure that will be effected.
- next unless -d $tmp;
- $olddir = getcwd() if not defined $olddir;
- verbose_print("cd $tmp");
- chdir($tmp) || error("Can't cd to $tmp: $!");
-
- # Figure out what files to compress.
- my @files;
- # First of all, deal with any files specified right on the command line.
- if (($package eq $dh{FIRSTPACKAGE} || $dh{PARAMS_ALL}) && @ARGV) {
- push @files, map { s{^/+}{}; $_ } @ARGV;
- }
- if ($compress) {
- # The compress file is a sh script that outputs the files to be compressed
- # (typically using find).
- warning("$compress is deprecated; use -X or avoid calling dh_compress instead");
- push @files, split(/\n/,`sh $olddir/$compress 2>/dev/null`);
- }
- else {
- # Note that all the excludes of odd things like _z
- # are because gzip refuses to compress such files, assuming
- # they are zip files. I looked at the gzip source to get the
- # complete list of such extensions: ".gz", ".z", ".taz",
- # ".tgz", "-gz", "-z", "_z"
- push @files, split(/\n/,`
- find usr/share/info usr/share/man -type f ! -iname "*.gz" \\
- ! -iname "*.gif" ! -iname "*.png" ! -iname "*.jpg" \\
- ! -iname "*.jpeg" \\
- 2>/dev/null || true;
- find usr/share/doc \\
- \\( -type d -name _sources -prune -false \\) -o \\
- -type f \\( -size +4k -o -name "changelog*" -o -name "NEWS*" \\) \\
- \\( -name changelog.html -o ! -iname "*.htm*" \\) \\
- ! -iname "*.xhtml" \\
- ! -iname "*.gif" ! -iname "*.png" ! -iname "*.jpg" \\
- ! -iname "*.jpeg" ! -iname "*.gz" ! -iname "*.taz" \\
- ! -iname "*.tgz" ! -iname "*.z" ! -iname "*.bz2" \\
- ! -iname "*-gz" ! -iname "*-z" ! -iname "*_z" \\
- ! -iname "*.epub" ! -iname "*.jar" ! -iname "*.zip" \\
- ! -iname "*.odg" ! -iname "*.odp" ! -iname "*.odt" \\
- ! -iname ".htaccess" ! -iname "*.css" \\
- ! -iname "*.xz" ! -iname "*.lz" ! -iname "*.lzma" \\
- ! -iname "*.svg" ! -iname "*.svgz" ! -iname "*.js" \\
- ! -name "index.sgml" ! -name "objects.inv" ! -name "*.map" \\
- ! -name "*.devhelp2" \\
- ! -name "copyright" 2>/dev/null || true;
- find usr/share/fonts/X11 -type f -name "*.pcf" 2>/dev/null || true;
- `);
- }
+on_each_pkg_in_parallel {
+ my $olddir;
+
+ foreach my $package (@_) {
+ my $tmp=tmpdir($package);
+
+ my $compress=pkgfile($package,"compress");
+
+ # Run the file name gathering commands from within the directory
+ # structure that will be effected.
+ next unless -d $tmp;
+ $olddir = getcwd() if not defined $olddir;
+ verbose_print("cd $tmp");
+ chdir($tmp) || error("Can't cd to $tmp: $!");
+
+ # Figure out what files to compress.
+ my @files;
+ # First of all, deal with any files specified right on the command line.
+ if (($package eq $dh{FIRSTPACKAGE} || $dh{PARAMS_ALL}) && @ARGV) {
+ push @files, map { s{^/+}{}; $_ } @ARGV;
+ }
+ if ($compress) {
+ # The compress file is a sh script that outputs the files to be compressed
+ # (typically using find).
+ warning("$compress is deprecated; use -X or avoid calling dh_compress instead");
+ push @files, split(/\n/,`sh $olddir/$compress 2>/dev/null`);
+ } else {
+ # Note that all the excludes of odd things like _z
+ # are because gzip refuses to compress such files, assuming
+ # they are zip files. I looked at the gzip source to get the
+ # complete list of such extensions: ".gz", ".z", ".taz",
+ # ".tgz", "-gz", "-z", "_z"
+ push @files, split(/\n/,`
+ find usr/share/info usr/share/man -type f ! -iname "*.gz" \\
+ ! -iname "*.gif" ! -iname "*.png" ! -iname "*.jpg" \\
+ ! -iname "*.jpeg" \\
+ 2>/dev/null || true;
+ find usr/share/doc \\
+ \\( -type d -name _sources -prune -false \\) -o \\
+ -type f \\( -size +4k -o -name "changelog*" -o -name "NEWS*" \\) \\
+ \\( -name changelog.html -o ! -iname "*.htm*" \\) \\
+ ! -iname "*.xhtml" \\
+ ! -iname "*.gif" ! -iname "*.png" ! -iname "*.jpg" \\
+ ! -iname "*.jpeg" ! -iname "*.gz" ! -iname "*.taz" \\
+ ! -iname "*.tgz" ! -iname "*.z" ! -iname "*.bz2" \\
+ ! -iname "*-gz" ! -iname "*-z" ! -iname "*_z" \\
+ ! -iname "*.epub" ! -iname "*.jar" ! -iname "*.zip" \\
+ ! -iname "*.odg" ! -iname "*.odp" ! -iname "*.odt" \\
+ ! -iname ".htaccess" ! -iname "*.css" \\
+ ! -iname "*.xz" ! -iname "*.lz" ! -iname "*.lzma" \\
+ ! -iname "*.svg" ! -iname "*.svgz" ! -iname "*.js" \\
+ ! -name "index.sgml" ! -name "objects.inv" ! -name "*.map" \\
+ ! -name "*.devhelp2" \\
+ ! -name "copyright" 2>/dev/null || true;
+ find usr/share/fonts/X11 -type f -name "*.pcf" 2>/dev/null || true;
+ `);
+ }
- # Exclude files from compression.
- if (@files && defined($dh{EXCLUDE}) && $dh{EXCLUDE}) {
- my @new=();
- foreach (@files) {
- my $ok=1;
- foreach my $x (@{$dh{EXCLUDE}}) {
- if (/\Q$x\E/) {
- $ok='';
- last;
+ # Exclude files from compression.
+ if (@files && defined($dh{EXCLUDE}) && $dh{EXCLUDE}) {
+ my @new=();
+ foreach (@files) {
+ my $ok=1;
+ foreach my $x (@{$dh{EXCLUDE}}) {
+ if (/\Q$x\E/) {
+ $ok='';
+ last;
+ }
}
+ push @new,$_ if $ok;
}
- push @new,$_ if $ok;
+ @files=@new;
}
- @files=@new;
- }
-
- # Look for files with hard links. If we are going to compress both,
- # we can preserve the hard link across the compression and save
- # space in the end.
- my @f=();
- my %hardlinks;
- my %seen;
- foreach (@files) {
- my ($dev, $inode, undef, $nlink)=stat($_);
- if (defined $nlink && $nlink > 1) {
- if (! $seen{"$inode.$dev"}) {
- $seen{"$inode.$dev"}=$_;
+
+ # Look for files with hard links. If we are going to compress both,
+ # we can preserve the hard link across the compression and save
+ # space in the end.
+ my (@f, %hardlinks, %seen);
+ foreach (@files) {
+ my ($dev, $inode, undef, $nlink)=stat($_);
+ if (defined $nlink && $nlink > 1) {
+ if (! $seen{"$inode.$dev"}) {
+ $seen{"$inode.$dev"}=$_;
+ push @f, $_;
+ } else {
+ # This is a hardlink.
+ $hardlinks{$_}=$seen{"$inode.$dev"};
+ }
+ } else {
push @f, $_;
}
- else {
- # This is a hardlink.
- $hardlinks{$_}=$seen{"$inode.$dev"};
- }
}
- else {
- push @f, $_;
+
+ # normalize file names and remove duplicates
+ my $norm_from_dir = $tmp;
+ if ($norm_from_dir !~ m{^/}) {
+ $norm_from_dir = "${olddir}/${tmp}";
+ }
+ my $resolved = abs_path($norm_from_dir)
+ or error("Cannot resolve $norm_from_dir: $!");
+ my @normalized = normalize_paths($norm_from_dir, $resolved, $tmp, @f);
+ my %uniq_f; @uniq_f{@normalized} = ();
+ @f = sort keys %uniq_f;
+
+ # do it
+ if (@f) {
+ # Make executables not be anymore.
+ xargs(\@f,"chmod","a-x");
+ xargs(\@f,"gzip","-9nf");
}
- }
- # normalize file names and remove duplicates
- my $norm_from_dir = $tmp;
- if ($norm_from_dir !~ m{^/}) {
- $norm_from_dir = "${olddir}/${tmp}";
- }
- my $resolved = abs_path($norm_from_dir)
- or error("Cannot resolve $norm_from_dir: $!");
- my @normalized = normalize_paths($norm_from_dir, $resolved, $tmp, @f);
- my %uniq_f; @uniq_f{@normalized} = ();
- @f = sort keys %uniq_f;
-
- # do it
- if (@f) {
- # Make executables not be anymore.
- xargs(\@f,"chmod","a-x");
-
- xargs(\@f,"gzip","-9nf");
- }
-
- # Now change over any files we can that used to be hard links so
- # they are again.
- foreach (keys %hardlinks) {
- # Remove old file.
- doit("rm","-f","$_");
- # Make new hardlink.
- doit("ln","$hardlinks{$_}.gz","$_.gz");
- }
+ # Now change over any files we can that used to be hard links so
+ # they are again.
+ foreach (keys %hardlinks) {
+ # Remove old file.
+ doit("rm","-f","$_");
+ # Make new hardlink.
+ doit("ln","$hardlinks{$_}.gz","$_.gz");
+ }
- verbose_print("cd '$olddir'");
- chdir($olddir);
-
- # Fix up symlinks that were pointing to the uncompressed files.
- my %links = map { chomp; $_ => 1 } `find $tmp -type l`;
- my $changed;
- # Keep looping through looking for broken links until no more
- # changes are made. This is done in case there are links pointing
- # to links, pointing to compressed files.
- do {
- $changed = 0;
- foreach my $link (keys %links) {
- my ($directory) = $link =~ m:(.*)/:;
- my $linkval = readlink($link);
- if (! -e "$directory/$linkval" && -e "$directory/$linkval.gz") {
- doit("rm","-f",$link);
- doit("ln","-sf","$linkval.gz","$link.gz");
- delete $links{$link};
- $changed++;
+ verbose_print("cd '$olddir'");
+ chdir($olddir);
+
+ # Fix up symlinks that were pointing to the uncompressed files.
+ my %links = map { chomp; $_ => 1 } `find $tmp -type l`;
+ my $changed;
+ # Keep looping through looking for broken links until no more
+ # changes are made. This is done in case there are links pointing
+ # to links, pointing to compressed files.
+ do {
+ $changed = 0;
+ foreach my $link (keys %links) {
+ my ($directory) = $link =~ m:(.*)/:;
+ my $linkval = readlink($link);
+ if (! -e "$directory/$linkval" && -e "$directory/$linkval.gz") {
+ doit("rm","-f",$link);
+ doit("ln","-sf","$linkval.gz","$link.gz");
+ delete $links{$link};
+ $changed++;
+ }
}
- }
- } while $changed;
-}
+ } while $changed;
+ }
+};
sub normalize_paths {
my ($cwd, $cwd_resolved, $tmp, @paths) = @_;
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debhelper/debhelper.git
More information about the debhelper-devel
mailing list