[findimagedupes] 06/08: Restore original upstream to use quilt 3.0 format

Andreas Tille tille at debian.org
Sat Jul 12 22:54:12 UTC 2014


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository findimagedupes.

commit 2a770f74a65ef5d32f2101ef6e711484aad96b47
Author: Andreas Tille <tille at debian.org>
Date:   Sun Jul 13 00:41:22 2014 +0200

    Restore original upstream to use quilt 3.0 format
---
 C.pm           | 116 ---------------------------------------------------------
 Makefile.PL    |   6 ---
 findimagedupes | 111 ++++++++++++++++++++++++++++++++++++++++++++++++++++--
 3 files changed, 108 insertions(+), 125 deletions(-)

diff --git a/C.pm b/C.pm
deleted file mode 100644
index 1dbffad..0000000
--- a/C.pm
+++ /dev/null
@@ -1,116 +0,0 @@
-package findimagedupes::C;
-
-use Inline
-        C => 'DATA',
-        NAME => 'findimagedupes::C',
-        VERSION => '0.01',
-;
-
-our $VERSION = '0.01';
-1;
-
-__DATA__
-
-__C__
-
-/* efficient bit-comparison */
-
-#include <stdint.h>
-#include <string.h>
-
-#define LOOKUP_SIZE 65536
-#define FP_CHUNKS 16
-
-typedef uint16_t FP[FP_CHUNKS];
-
-unsigned int simplecountbits (unsigned int i) {
-	unsigned int val = i, res = 0;
-
-	while (val) {
-		res += (val&1);
-		val >>= 1;
-	}
-	return(res);
-}
-
-void diffbits (SV* oldfiles, SV* newfiles, unsigned int threshold, unsigned limit) {
-	FP *the_data, *a, *b;
-	unsigned int lookup[LOOKUP_SIZE];
-	unsigned int i, j, k, m, bits, old, new;
-	HV *oldhash;
-	HE *oldhash_entry;
-	HV *newhash;
-	HE *newhash_entry;
-	unsigned int numkeys = 0;
-	SV *sv_val;
-	Inline_Stack_Vars;
-
-	if ((threshold<0) || (threshold>256)) {
-		croak("ridiculous threshold specified");
-	}
-
-	/* pack fingerprints into C array */
-	/* partly lifted from Inline::C-Cookbook */
-
-	if (! SvROK(newfiles)) {
-		croak("newfiles is not a reference");
-	}
-	newhash = (HV *)SvRV(newfiles);
-	new = hv_iterinit(newhash);
-
-	if (! SvROK(oldfiles)) {
-		croak("oldfiles is not a reference");
-	}
-	oldhash = (HV *)SvRV(oldfiles);
-	old = hv_iterinit(oldhash);
-
-	numkeys = new+old;
-	if (numkeys<2) {
-		/* minor optimization: return without doing anything */
-		/* malloc(0) could be bad... */
-		Inline_Stack_Void;
-	}
-	the_data = (FP *)malloc(numkeys*sizeof(FP));
-	if (!the_data) {
-		croak("malloc failed");
-	}
-
-	for (i = 0; i<new; i++) {
-		newhash_entry = hv_iternext(newhash);
-		sv_val = hv_iterval(newhash, newhash_entry);
-		memcpy(the_data+i, SvPV(sv_val, PL_na), sizeof(FP));
-	}
-	for (i = new; i<numkeys; i++) {
-		oldhash_entry = hv_iternext(oldhash);
-		sv_val = hv_iterval(oldhash, oldhash_entry);
-		memcpy(the_data+i, SvPV(sv_val, PL_na), sizeof(FP));
-	}
-
-	/* initialise lookup table */
-	/* XXX: fast enough? could optimise more or compile-in a static table */
-	for (i=0; i<LOOKUP_SIZE; i++) {
-		lookup[i] = simplecountbits(i);
-	}
-
-	/* look for matches */
-	Inline_Stack_Reset;
-	for (a=the_data, i=0, m=(limit>0 ? new : numkeys-1); i<m; a++, i++) {
-		for (b=a+1, j=i+1; j<numkeys; b++, j++) {
-			for (bits=0, k=0; k<FP_CHUNKS; k++) {
-				bits += lookup[(*a)[k]^(*b)[k]];
-				if (bits > threshold) goto abortmatch;
-			}
-			/* if (bits <= threshold) */ {
-				Inline_Stack_Push(sv_2mortal(newSViv(i)));
-				Inline_Stack_Push(sv_2mortal(newSViv(j)));
-				Inline_Stack_Push(sv_2mortal(newSViv(bits)));
-			}
-abortmatch:;
-		}
-	}
-	Inline_Stack_Done;
-
-	/* clean up */
-	free(the_data);
-}
-
diff --git a/Makefile.PL b/Makefile.PL
deleted file mode 100644
index 83df31d..0000000
--- a/Makefile.PL
+++ /dev/null
@@ -1,6 +0,0 @@
-use Inline::MakeMaker;
-WriteInlineMakefile(
-    'NAME'		=> 'findimagedupes::C',
-    'VERSION_FROM' => 'C.pm',
-    'EXE_FILES' => [ 'findimagedupes' ],
-);
diff --git a/findimagedupes b/findimagedupes
index ca6afd5..489a7a0 100755
--- a/findimagedupes
+++ b/findimagedupes
@@ -34,8 +34,10 @@ use Graphics::Magick;
 use MIME::Base64;
 use Pod::Usage;
 
-use lib "/usr/lib/findimagedupes/lib";
-use findimagedupes::C;
+use Inline
+	C => 'DATA',
+	NAME => 'findimagedupes',
+	DIRECTORY => '/usr/local/lib/findimagedupes';
 
 # ----------------------------------------------------------------------
 #
@@ -509,7 +511,7 @@ sub fingerprint {
 }
 
 sub finddupes {
-	my @matches = findimagedupes::C::diffbits(\%fpcache, \%filelist, $threshold, $add);
+	my @matches = diffbits(\%fpcache, \%filelist, $threshold, $add);
 
 	my (%set, %ptr, %val);
 
@@ -988,3 +990,106 @@ algorithm.
 
 =cut
 
+__C__
+
+/* efficient bit-comparison */
+
+#include <stdint.h>
+#include <string.h>
+
+#define LOOKUP_SIZE 65536
+#define FP_CHUNKS 16
+
+typedef uint16_t FP[FP_CHUNKS];
+
+unsigned int simplecountbits (unsigned int i) {
+	unsigned int val = i, res = 0;
+
+	while (val) {
+		res += (val&1);
+		val >>= 1;
+	}
+	return(res);
+}
+
+void diffbits (SV* oldfiles, SV* newfiles, unsigned int threshold, unsigned limit) {
+	FP *the_data, *a, *b;
+	unsigned int lookup[LOOKUP_SIZE];
+	unsigned int i, j, k, m, bits, old, new;
+	HV *oldhash;
+	HE *oldhash_entry;
+	HV *newhash;
+	HE *newhash_entry;
+	unsigned int numkeys = 0;
+	SV *sv_val;
+	Inline_Stack_Vars;
+
+	if ((threshold<0) || (threshold>256)) {
+		croak("ridiculous threshold specified");
+	}
+
+	/* pack fingerprints into C array */
+	/* partly lifted from Inline::C-Cookbook */
+
+	if (! SvROK(newfiles)) {
+		croak("newfiles is not a reference");
+	}
+	newhash = (HV *)SvRV(newfiles);
+	new = hv_iterinit(newhash);
+
+	if (! SvROK(oldfiles)) {
+		croak("oldfiles is not a reference");
+	}
+	oldhash = (HV *)SvRV(oldfiles);
+	old = hv_iterinit(oldhash);
+
+	numkeys = new+old;
+	if (numkeys<2) {
+		/* minor optimization: return without doing anything */
+		/* malloc(0) could be bad... */
+		Inline_Stack_Void;
+	}
+	the_data = (FP *)malloc(numkeys*sizeof(FP));
+	if (!the_data) {
+		croak("malloc failed");
+	}
+
+	for (i = 0; i<new; i++) {
+		newhash_entry = hv_iternext(newhash);
+		sv_val = hv_iterval(newhash, newhash_entry);
+		memcpy(the_data+i, SvPV(sv_val, PL_na), sizeof(FP));
+	}
+	for (i = new; i<numkeys; i++) {
+		oldhash_entry = hv_iternext(oldhash);
+		sv_val = hv_iterval(oldhash, oldhash_entry);
+		memcpy(the_data+i, SvPV(sv_val, PL_na), sizeof(FP));
+	}
+
+	/* initialise lookup table */
+	/* XXX: fast enough? could optimise more or compile-in a static table */
+	for (i=0; i<LOOKUP_SIZE; i++) {
+		lookup[i] = simplecountbits(i);
+	}
+
+	/* look for matches */
+	Inline_Stack_Reset;
+	for (a=the_data, i=0, m=(limit>0 ? new : numkeys-1); i<m; a++, i++) {
+		for (b=a+1, j=i+1; j<numkeys; b++, j++) {
+			for (bits=0, k=0; k<FP_CHUNKS; k++) {
+				bits += lookup[(*a)[k]^(*b)[k]];
+				if (bits > threshold) goto abortmatch;
+			}
+			/* if (bits <= threshold) */ {
+				Inline_Stack_Push(sv_2mortal(newSViv(i)));
+				Inline_Stack_Push(sv_2mortal(newSViv(j)));
+				Inline_Stack_Push(sv_2mortal(newSViv(bits)));
+			}
+abortmatch:;
+		}
+	}
+	Inline_Stack_Done;
+
+	/* clean up */
+	free(the_data);
+}
+

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/findimagedupes.git



More information about the debian-science-commits mailing list