[debhelper-devel] [debhelper] 02/02: Extract a find_hardlinks function

Niels Thykier nthykier at moszumanska.debian.org
Sat Oct 21 11:25:58 UTC 2017


This is an automated email from the git hooks/post-receive script.

nthykier pushed a commit to branch master
in repository debhelper.

commit ae55a1d2684e836b4aa89333a04ce2c41c5e9939
Author: Niels Thykier <niels at thykier.net>
Date:   Sat Oct 21 10:45:17 2017 +0000

    Extract a find_hardlinks function
    
    Signed-off-by: Niels Thykier <niels at thykier.net>
---
 dh_compress                    | 23 +++++------------------
 lib/Debian/Debhelper/Dh_Lib.pm | 28 +++++++++++++++++++++++++++-
 2 files changed, 32 insertions(+), 19 deletions(-)

diff --git a/dh_compress b/dh_compress
index 3858c46..2c8061e 100755
--- a/dh_compress
+++ b/dh_compress
@@ -156,21 +156,8 @@ on_pkgs_in_parallel {
 		# Look for files with hard links. If we are going to compress both,
 		# we can preserve the hard link across the compression and save
 		# space in the end.
-		my (@f, %hardlinks, %seen);
-		foreach (@files) {
-			my ($dev, $inode, undef, $nlink)=stat($_);
-			if (defined $nlink && $nlink > 1) {
-				if (! $seen{"$inode.$dev"}) {
-					$seen{"$inode.$dev"}=$_;
-					push @f, $_;
-				} else {
-					# This is a hardlink.
-					$hardlinks{$_}=$seen{"$inode.$dev"};
-				}
-			} else {
-				push @f, $_;
-			}
-		}
+		my ($unique_files, $hardlinks) = find_hardlinks(@files);
+		my @f = @{$unique_files};
 
 		# normalize file names and remove duplicates
 		my $norm_from_dir = $tmp;
@@ -179,7 +166,7 @@ on_pkgs_in_parallel {
 		}
 		my $resolved = abs_path($norm_from_dir)
 			or error("Cannot resolve $norm_from_dir: $!");
-		my @normalized = normalize_paths($norm_from_dir, $resolved, $tmp, @f);
+		my @normalized = normalize_paths($norm_from_dir, $resolved, $tmp, @{$unique_files});
 		my %uniq_f; @uniq_f{@normalized} = ();
 		@f = sort keys %uniq_f;
 
@@ -192,11 +179,11 @@ on_pkgs_in_parallel {
 
 		# Now change over any files we can that used to be hard links so
 		# they are again.
-		foreach (keys %hardlinks) {
+		foreach (keys %{$hardlinks}) {
 			# Remove old file.
 			rm_files($_);
 			# Make new hardlink.
-			doit("ln","-f","$hardlinks{$_}.gz","$_.gz");
+			doit("ln", "-f", "$hardlinks->{$_}.gz", "$_.gz");
 		}
 
 		verbose_print("cd '$olddir'");
diff --git a/lib/Debian/Debhelper/Dh_Lib.pm b/lib/Debian/Debhelper/Dh_Lib.pm
index 54a545d..195effe 100644
--- a/lib/Debian/Debhelper/Dh_Lib.pm
+++ b/lib/Debian/Debhelper/Dh_Lib.pm
@@ -65,7 +65,7 @@ our (@EXPORT, %dh);
 	    &glob_expand_error_handler_silently_ignore DH_BUILTIN_VERSION
 	    &print_and_complex_doit &default_sourcedir &qx_cmd
 	    &compute_doc_main_package &is_so_or_exec_elf_file
-	    &assert_opt_is_known_package &dbgsym_tmpdir
+	    &assert_opt_is_known_package &dbgsym_tmpdir &find_hardlinks
 );
 
 # The Makefile changes this if debhelper is installed in a PREFIX.
@@ -1991,6 +1991,32 @@ sub on_pkgs_in_parallel(&) {
 	goto \&on_items_in_parallel;
 }
 
+# Given a list of files, find all hardlinked files and return:
+# 1: a list of unique files (all files in the list are not hardlinked with any other file in that list)
+# 2: a map where the keys are names of hardlinks and the value points to the name selected as the file put in the
+#    list of unique files.
+#
+# This is can be used to relink hard links after modifying one of them.
+sub find_hardlinks {
+	my (@all_files) = @_;
+	my (%seen, %hardlinks, @unique_files);
+	for my $file (@all_files) {
+		my ($dev, $inode, undef, $nlink)=stat($file);
+		if (defined $nlink && $nlink > 1) {
+			if (! $seen{"$inode.$dev"}) {
+				$seen{"$inode.$dev"}=$file;
+				push(@unique_files, $file);
+			} else {
+				# This is a hardlink.
+				$hardlinks{$_}=$seen{"$inode.$dev"};
+			}
+		} else {
+			push(@unique_files, $file);
+		}
+	}
+	return (\@unique_files, \%hardlinks);
+}
+
 sub on_items_in_parallel {
 	my ($pkgs_ref, $code) = @_;
 	my @pkgs = @{$pkgs_ref};

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debhelper/debhelper.git




More information about the debhelper-devel mailing list