[Pkg-mono-svn-commits] rev 3776 - in scripts: . pet pet/PET pet/Parse pet/templates
David Paleino
hanska-guest at alioth.debian.org
Sat Nov 22 21:23:33 UTC 2008
Author: hanska-guest
Date: 2008-11-22 21:23:32 +0000 (Sat, 22 Nov 2008)
New Revision: 3776
Added:
scripts/pet/
scripts/pet/CPANTSdump
scripts/pet/PET/
scripts/pet/PET/Archive.pm
scripts/pet/PET/BTS.pm
scripts/pet/PET/Cache.pm
scripts/pet/PET/Classification.pm
scripts/pet/PET/Common.pm
scripts/pet/PET/Config.pm
scripts/pet/PET/DebVersions.pm
scripts/pet/PET/Svn.pm
scripts/pet/PET/Watch.pm
scripts/pet/Parse/
scripts/pet/Parse/DebControl.pm
scripts/pet/README
scripts/pet/debianqa.conf-sample
scripts/pet/fetchdata
scripts/pet/htaccess
scripts/pet/pet
scripts/pet/pet-chlog.cgi
scripts/pet/pet.cgi
scripts/pet/pkginfo.cgi
scripts/pet/rss_upload.cgi
scripts/pet/templates/
scripts/pet/templates/by_category
scripts/pet/templates/by_category.js
scripts/pet/templates/default.css
scripts/pet/templates/pkginfo
scripts/pkg-mono.conf
Log:
First PET commit
Added: scripts/pet/CPANTSdump
===================================================================
--- scripts/pet/CPANTSdump (rev 0)
+++ scripts/pet/CPANTSdump 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,83 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: CPANTSdump 608 2008-08-30 05:31:49Z tincho $
+#
+# Generates data for CPANTS
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2008
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+use PET::Cache;
+use PET::Config;
+use PET::Svn;
+use Getopt::Long;
+
+my $p = new Getopt::Long::Parser;
+$p->configure(qw(no_ignore_case bundling pass_through));
+
+$p->getoptions('help|h|?' => \&help
+ ) or die "Error parsing command-line arguments!\n";
+
+my $opts = getopt_common(0, 1); # No passthru, load config
+
+my @pkglist = sort(get_pkglist());
+my $data = read_cache(consolidated => "");
+
+print "debian_pkg, CPAN_dist, CPAN_vers, N_bugs, N_patches\n";
+
+foreach my $pkg (@pkglist) {
+ next if($data->{svn}{$pkg}{watch_error} and
+ $data->{svn}{$pkg}{watch_error} eq 'Native');
+
+ my $ups_dir = $data->{watch}{$pkg}{upstream_dir};
+ my $ups_url = $data->{watch}{$pkg}{upstream_url};
+ my $dist;
+ if($ups_dir
+ and $ups_dir =~ m#http://search.cpan.org/dist/(.+?)(?:/|$|\s)#) {
+ $dist = $1;
+ } elsif($ups_url
+ and $ups_url =~ qr#^(?:(?:http|ftp)://\S*(?:cpan|backpan)\S*)/(?:modules/by-module|(?:by-)?authors/id)\b.*/(.*?)-v?\d#i) {
+ $dist = $1;
+ } else {
+ $dist = $ups_url || "unknown";
+ }
+ my $version = $data->{archive}{$pkg}{most_recent};
+ if($version) {
+ # Try to remove Debian stuff -- Not reliable!
+ $version =~ s/-[^-]+$//; # Debian revision
+ $version =~ s/^.*?://; # Epoch
+ $version =~ s/\+\w+$//; # Common suffixes (dfsg, svn, etc)
+ } else {
+ $version = "not-uploaded";
+ }
+ my $patches = $data->{svn}{$pkg}{patches};
+ if($patches and ref $patches) {
+ $patches = scalar @$patches;
+ } else {
+ $patches = 0;
+ }
+ my $bugs = $data->{bts}{$pkg};
+ if($bugs and ref $bugs) {
+ $bugs = scalar keys %$bugs;
+ } else {
+ $bugs = 0;
+ }
+ # "debian_pkg, CPAN_dist, CPAN_vers, N_bugs, N_patches\n";
+ my @data = ($pkg, $dist, $version, $bugs, $patches);
+ print join(", ", map({ qq("$_") } @data)), "\n";
+}
+
+sub help {
+ print <<END;
+Usage:
+ $0 [options]
+
+Options:
+ --help, -h This help.
+ --conf, -c FILE Specifies a configuration file, uses defaults if not
+ present.
+END
+ exit 0;
+}
Property changes on: scripts/pet/CPANTSdump
___________________________________________________________________
Name: svn:executable
+ *
Added: scripts/pet/PET/Archive.pm
===================================================================
--- scripts/pet/PET/Archive.pm (rev 0)
+++ scripts/pet/PET/Archive.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,262 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Archive.pm 610 2008-08-30 06:50:16Z tincho $
+#
+# Module for retrieving data from the Debian archive, it reads Source.gz files,
+# and also downloads package lists from the NEW and INCOMING queues.
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package PET::Archive;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(deb_download deb_get deb_get_consolidated);
+
+use PET::Cache;
+use PET::Common;
+use PET::Config '%CFG';
+use PET::Svn;
+use PET::DebVersions;
+use Fcntl qw(:seek);
+use LWP::UserAgent;
+#use IO::Uncompress::Gunzip; # Only in lenny
+use Compress::Zlib ();
+use HTML::TableExtract;
+
+my $ua = new LWP::UserAgent;
+$ua->timeout(10);
+$ua->env_proxy;
+
+# Module for extracting source package listings from the Debian archive.
+# * If $force, current cache is ignored.
+#
+# Re-generates and returns the cache of consolidated versions (key "archive"),
+# which is keyed on package name and contains:
+# {
+# most_recent => $most_recent_version,
+# testing => $version_in_testing,
+# ....
+# }
+sub deb_download {
+ my $force = shift;
+ my @list = split(/\s*,\s*/, $CFG{archive}{suites});
+ my @ttls = split(/\s*,\s*/, $CFG{archive}{suites_ttl});
+ my %ttl = map({ $list[$_] => $ttls[$_] } (0..$#list));
+
+ if($CFG{archive}{new_url}) {
+ push @list, "new";
+ $ttl{new} = $CFG{archive}{new_ttl} || 60;
+ }
+ if($CFG{archive}{incoming_url}) {
+ push @list, "incoming";
+ $ttl{incoming} = $CFG{archive}{incoming_ttl} || 60;
+ }
+ my $data = {};
+ unless($force) {
+ $data = read_cache("archive", "", 0);
+ }
+ my $modified;
+ foreach my $src (@list) {
+ # I use find_stamp incorrectly on purpose: so each key acts as a root
+ if($force or ! $data->{$src}
+ or $ttl{$src} * 60 < time - find_stamp($data->{$src}, "")) {
+ info("$src is stale, getting new version") unless($force);
+ my $d;
+ if($src eq "new") {
+ $d = get_new();
+ } elsif($src eq "incoming") {
+ $d = get_incoming();
+ } else {
+ $d = get_sources($src);
+ }
+ if($d) {
+ update_cache("archive", $d, $src, 1, 0);
+ $modified = 1;
+ }
+ }
+ }
+ $modified ||= (find_stamp(read_cache("consolidated", "pkglist", 0)) >
+ find_stamp(read_cache("consolidated", "archive", 0)));
+ unless($modified) {
+ info("Archive consolidated cache is up-to-date");
+ return;
+ }
+ info("Re-generating archive consolidated hash");
+ my $pkgs = get_pkglist_hashref();
+ # retain lock, we need consistency
+ $data = read_cache("archive", "", 1);
+ my $g = {};
+ foreach my $suite (keys(%$data)) {
+ next unless($ttl{$suite});
+ foreach my $pkg (keys(%{$data->{$suite}})) {
+ next if($pkg =~ m#^/#);
+ next if(%$pkgs and not $pkgs->{$pkg});
+ $g->{$pkg}{$suite} = $data->{$suite}{$pkg}{ver};
+ }
+ }
+ # Hash for comparing equivalent versions in different suites
+ my %src_compare = (
+ oldstable => 1, # not 0, so no need to test defined()
+ sarge => 1,
+ stable => 2,
+ etch => 2,
+ testing => 3,
+ lenny => 3,
+ experimental => 4,
+ incoming => 5,
+ new => 6,
+ unstable => 7,
+ sid => 7,
+ other => 8
+ );
+ foreach my $pkg (keys(%$g)) {
+ my @recent = sort( {
+ deb_compare_nofail($g->{$pkg}{$a}, $g->{$pkg}{$b}) or
+ ($src_compare{$a} || $src_compare{other}) <=>
+ ($src_compare{$b} || $src_compare{other})
+ } keys(%{$g->{$pkg}}));
+ $g->{$pkg}{most_recent} = $g->{$pkg}{$recent[-1]};
+ $g->{$pkg}{most_recent_src} = $recent[-1];
+ }
+ # Get control data from unstable only
+ if($data->{unstable}) {
+ foreach my $pkg (keys(%$g)) {
+ $g->{$pkg}{control} = $data->{unstable}{$pkg} or next;
+ delete $g->{$pkg}{control}{ver};
+ }
+ }
+ $g = update_cache("consolidated", $g, "archive", 1, 0);
+ unlock_cache("archive");
+ return;
+}
+# Returns the consolidated hash of versions. Doesn't download anything.
+sub deb_get_consolidated {
+ my $path = shift || "";
+ return read_cache("consolidated", "archive/$path", 0);
+}
+# Returns the hash of versions. Doesn't download anything.
+sub deb_get {
+ return read_cache("archive", shift, 0);
+}
+sub get_sources {
+ my($suite) = shift;
+ my @sections = split(/\s*,\s*/, $CFG{archive}{sections});
+ my %vers;
+ foreach my $section(@sections) {
+ my $url = $CFG{archive}{mirror} . "/dists/$suite/$section/source/Sources.gz";
+ info("Downloading $url");
+ open(TMP, "+>", undef) or die $!;
+ my $res = $ua->get($url, ":content_cb" => sub {
+ print TMP $_[0] or die $!;
+ });
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ seek(TMP, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $gz = Compress::Zlib::gzopen(\*TMP, "rb")
+ or die "Can't open compressed file: $!\n";
+
+ my $data;
+ open($data, "+>", undef) or die $!;
+ my $buffer = " " x 4096;
+ my $bytes;
+ while(($bytes = $gz->gzread($buffer)) > 0) {
+ print $data $buffer;
+ }
+ die $gz->gzerror if($bytes < 0);
+ close TMP;
+ #my $z = new IO::Uncompress::Gunzip(\$data);
+
+ seek($data, 0, SEEK_SET) or die "Can't seek: $!\n";
+ # Blank line as "line" separator, so a "line" is a full record
+ local $/ = "";
+ while(<$data>) {
+ s/\n\s+//gm;
+ my($pkg) = /^package:\s*(\S+)\s*$/mi or next;
+ my($ver) = /^version:\s*(\S+)\s*$/mi or next;
+ if($suite ne "unstable") {
+ $vers{$pkg} = { ver => $ver };
+ next;
+ }
+ my($b_d) = /^build-depends:\s*(.+)\s*$/mi;
+ my($b_d_i) = /^build-depends-indep:\s*(.+)\s*$/mi;
+ my($maint) = /^maintainer:\s*(.+)\s*$/mi;
+ my($upldr) = /^uploaders:\s*(.+)\s*$/mi;
+ my $dm = /^dm-upload-allowed:\s*yes\s*$/mi;
+ my(@b_d, @b_d_i, @maint, @upldr);
+ @b_d = split(/\s*,\s*/, $b_d) if($b_d);
+ @b_d_i = split(/\s*,\s*/, $b_d_i) if($b_d_i);
+ @maint = split(/\s*,\s*/, $maint) if($maint);
+ @upldr = split(/\s*,\s*/, $upldr) if($upldr);
+ $vers{$pkg} = {
+ ver => $ver,
+ b_d => \@b_d,
+ b_d_i => \@b_d_i,
+ maintainer => \@maint,
+ uploaders => \@upldr,
+ dm_allowed => $dm
+ };
+ }
+ close $data;
+ }
+ return \%vers;
+}
+sub get_incoming {
+ my $url = $CFG{archive}{incoming_url};
+ info("Downloading $url");
+ my $res = $ua->get($url);
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ my $data = $res->decoded_content();
+ my %vers;
+ while($data =~ /<a href="([^_]+)_(.+)\.dsc">/g) {
+ debug("existing $1: $vers{$1}{ver} / $2") if(defined($vers{$1}{ver}));
+ if(!defined $vers{$1}{ver} or deb_compare($2, $vers{$1}{ver}) > 0) {
+ debug("replaced $1: $vers{$1}{ver} -> $2") if(
+ defined($vers{$1}{ver}));
+ $vers{$1}{ver} = $2;
+ }
+ }
+ return \%vers;
+}
+sub get_new {
+ my $url = $CFG{archive}{new_url};
+ info("Downloading $url");
+ my $res = $ua->get($url);
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ my $data = $res->decoded_content();
+ my $te = new HTML::TableExtract( headers => [ qw(
+ Package Version Arch Distribution Age Maintainer Closes
+ ) ]);
+ $te->parse($data);
+ my %vers;
+ foreach my $table ($te->tables) {
+ foreach my $row ($table->rows) {
+ next unless $row->[2] =~ /source/;
+ my $pkg = $row->[0];
+ foreach(split(/\s+/, $row->[1])) {
+ next unless($_);
+ debug("existing $pkg: $vers{$pkg}{ver} / $_") if(
+ defined($vers{$pkg}{ver}));
+ if(!defined $vers{$pkg}{ver} or
+ deb_compare($_, $vers{$pkg}{ver}) > 0) {
+ debug("replaced $pkg: $vers{$pkg}{ver} -> $_") if(
+ defined($vers{$pkg}{ver}));
+ $vers{$pkg}{ver} = $_;
+ }
+ }
+ }
+ }
+ return \%vers;
+}
+1;
Added: scripts/pet/PET/BTS.pm
===================================================================
--- scripts/pet/PET/BTS.pm (rev 0)
+++ scripts/pet/PET/BTS.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,190 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: BTS.pm 610 2008-08-30 06:50:16Z tincho $
+#
+# Module for retrieving bugs from the BTS, using the SOAP interface
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package PET::BTS;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(bts_download bts_get bts_get_consolidated);
+
+use PET::Common;
+use PET::Config '%CFG';
+use PET::Cache;
+use PET::Svn;
+use SOAP::Lite;
+
+#my $maint = 'pkg-perl-maintainers at lists.alioth.debian.org';
+
+sub bts_download {
+ my($force, @pkglist) = @_;
+ $force ||= 0;
+ debug("bts_download($force, (@pkglist))");
+
+ my $replace = 0;
+
+ my $soap = SOAP::Lite->uri($CFG{bts}{soap_uri})->proxy(
+ $CFG{bts}{soap_proxy});
+
+ my $cdata = read_cache("bts", "", 0);
+ if(find_stamp($cdata, "") == 0) {
+ warn("Forcing complete update -- bts cache has old version");
+ $force = 1;
+ @pkglist = ();
+ }
+
+ my @users = split(/\s*,\s*/, $CFG{bts}{usertag_users});
+ my %usertags;
+ if(@users) {
+ if($force
+ or $CFG{bts}{ttl} * 60 < time - find_stamp($cdata, "usertags")
+ or grep({ ! $cdata->{usertags}{$_}} @users)) {
+ info("Scanning usertags");
+ foreach(@users) {
+ $usertags{$_} = $soap->get_usertag($_)->result();
+ }
+ }
+ }
+
+ my @list = ();
+ my $pkginfo = get_pkglist_hashref();
+ if(@pkglist) {
+ # A list of packages to update has been received
+ unless($force) {
+ @pkglist = grep( {
+ $CFG{bts}{ttl} * 60 < time - find_stamp($cdata, $_)
+ } @pkglist);
+ }
+ if(@pkglist) {
+ info("Downloading list of bugs for (", join(", ", @pkglist), ")");
+ @list = @{$soap->get_bugs( src => [ @pkglist ] )->result()};
+ }
+ } elsif($force or $CFG{bts}{ttl} * 60 < time - find_stamp($cdata, "")) {
+ # No list of packages; forced operation or stale cache
+ info("BTS info is stale") unless($force);
+ $replace = 1;
+ @pkglist = keys %$pkginfo;
+ # TODO: could verificate that pkglist and maint = $maint are the same
+ # packages
+ if(@pkglist) {
+ info("Downloading list of bugs of packages in the repo");
+ @list = @{$soap->get_bugs( src => [ @pkglist ] )->result()};
+ } else {
+ # Doesn't make sense to search bugs if we don't have the list
+ # of packages.
+ warn("No packages to look bugs for yet");
+ update_cache("bts", \%usertags, "usertags", 1, 0) if(%usertags);
+ return {};
+ }
+ } # If cache is up-to-date, @list will be empty
+ my $bugs_st = {};
+ if(@list) {
+ info("Downloading status for ", scalar @list, " bugs");
+ $bugs_st = $soap->get_status(@list)->result();
+ }
+
+ my %binmap;
+ foreach my $src (keys %$pkginfo) {
+ $binmap{$_} = $src foreach(@{$pkginfo->{$src}{binaries} || []});
+ }
+ my %bugs = ();
+ foreach my $bug (keys %$bugs_st) {
+ # Until #458822 is solved, we need to use our own bin -> src mapping
+ my $binname = $bugs_st->{$bug}->{package};
+ # There could be more than one package!
+ $binname =~ s/\s+//g;
+ my @binnames = split(/,/, $binname);
+ my $found = 0;
+ foreach(@binnames) {
+ my $srcname = exists $pkginfo->{$_} ? $_ : $binmap{$_} or next;
+ $bugs{$srcname}{$bug} = $bugs_st->{$bug};
+ $found++;
+ }
+ unless($found) {
+ warn("Can't find source package for $binname in bug #$bug");
+ next;
+ }
+ }
+ $bugs{usertags} = \%usertags if(%usertags);
+ # retain lock, we need consistency
+ $cdata = update_cache("bts", \%bugs, "", $replace, 1);
+ bts_consolidate($cdata, keys %$pkginfo);
+ unlock_cache("bts");
+ return $cdata;
+}
+sub bts_consolidate {
+ my($bugs, @pkglist) = @_;
+ info("Re-generating consolidated hash");
+
+ # Inverted index of usertags
+ my %usertags;
+ foreach my $user (keys %{$bugs->{usertags} || {}}) {
+ next if($user =~ m#^/#);
+ foreach my $tag (keys %{$bugs->{usertags}{$user} || {}}) {
+ foreach(@{$bugs->{usertags}{$user}{$tag}}) {
+ $usertags{$_} ||= [];
+ push @{$usertags{$_}}, { user => $user, tag => $tag };
+ }
+ }
+ }
+
+ # TODO: Interesting fields:
+ # keywords/tags, severity, subject, forwarded, date
+ my %cbugs;
+ foreach my $pkgname (@pkglist) {
+ $bugs->{$pkgname} ||= {};
+
+ # bugs to ignore if keyword present
+ my %ign_keywords = map({ $_ => 1 }
+ split(/\s*,\s*/, $CFG{bts}{ignore_keywords}));
+ # bugs to ignore if of specified severities
+ my %ign_severities = map({ $_ => 1 }
+ split(/\s*,\s*/, $CFG{bts}{ignore_severities}));
+
+ $cbugs{$pkgname} = {};
+ foreach my $bug (keys %{ $bugs->{$pkgname} }) {
+ next unless(ref $bugs->{$pkgname}{$bug});
+ # Remove done bugs
+ next if($bugs->{$pkgname}{$bug}{done});
+ # Remove if severity match
+ next if($ign_severities{$bugs->{$pkgname}{$bug}{severity}});
+ # Remove if keyword match
+ my @keywords = split(/\s+/, $bugs->{$pkgname}{$bug}{keywords});
+ next if(grep({ $ign_keywords{$_} } @keywords));
+ $cbugs{$pkgname}{$bug} = {
+ keywords => $bugs->{$pkgname}{$bug}{keywords},
+ # need to use a new key for compatibility
+ keywordsA => \@keywords,
+ severity => $bugs->{$pkgname}{$bug}{severity},
+ subject => $bugs->{$pkgname}{$bug}{subject},
+ forwarded=> $bugs->{$pkgname}{$bug}{forwarded},
+ };
+ if($usertags{$bug}) {
+ $cbugs{$pkgname}{$bug}{usertags} = $usertags{$bug};
+ foreach(@{$usertags{$bug}}) {
+ $cbugs{$pkgname}{$bug}{keywords} .= " usertag:$_->{tag}";
+ push(@{$cbugs{$pkgname}{$bug}{keywordsA}},
+ "usertag:$_->{tag}");
+ }
+ }
+ }
+ }
+ update_cache("consolidated", \%cbugs, "bts", 1, 0);
+}
+# Returns the hash of bugs. Doesn't download anything.
+sub bts_get {
+ return read_cache("bts", shift, 0);
+}
+# Returns the consolidated hash of bugs. Doesn't download anything.
+sub bts_get_consolidated {
+ my $path = shift || "";
+ return read_cache("consolidated", "bts/$path", 0);
+}
+1;
Added: scripts/pet/PET/Cache.pm
===================================================================
--- scripts/pet/PET/Cache.pm (rev 0)
+++ scripts/pet/PET/Cache.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,260 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Cache.pm 610 2008-08-30 06:50:16Z tincho $
+#
+# Routines for handling cache files
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package PET::Cache;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = (qw(
+ dump_cache unlock_cache read_cache update_cache find_stamp ));
+
+use PET::Config '%CFG';
+use PET::Common;
+use Storable qw(store_fd fd_retrieve);
+use Fcntl qw(:seek :flock);
+use File::Path;
+
+my %fd; # Hash of open FDs, to keep locks.
+my %memcache; # Memory cache for repeated requests
+
+sub dump_cache($;$);
+sub unlock_cache($);
+sub read_cache($;$$);
+sub update_cache($$;$$$$);
+sub clean_hash($);
+sub clean_hash_recurse($);
+sub dive_hash($;$);
+sub find_stamp($;$);
+sub find_stamp_recurse($$);
+
+sub dump_cache($;$) {
+ my($cache, $root) = @_;
+ $root ||= "";
+ $root =~ s{/+$}{};
+
+ if(! defined($fd{$cache})) {
+ mkpath $CFG{common}{cache_dir};
+ open $fd{$cache}, "<", "$CFG{common}{cache_dir}/$cache"
+ or die "Error opening cache: $!\n";
+ flock($fd{$cache}, LOCK_SH) or die "Error locking cache: $!\n";
+ }
+ my $fd = $fd{$cache};
+ seek($fd, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $data = {};
+ if(-s $fd) {
+ $data = fd_retrieve($fd) or die "Can't read cache: $!\n";
+ }
+ unlock_cache($cache);
+ require Data::Dumper;
+ print Data::Dumper::Dumper(dive_hash($data, $root));
+ 1;
+}
+# Releases any pending lock on a cache and closes the file.
+sub unlock_cache($) {
+ my $cache = shift;
+ return 0 unless($fd{$cache});
+ debug("Closing $CFG{common}{cache_dir}/$cache");
+ close($fd{$cache});
+ $fd{$cache} = undef;
+ 1;
+}
+sub read_cache($;$$) {
+ # * $root specifies a path inside the cache hash.
+ # * If $keep_lock, file is kept open and write-locked until the next
+ # operation.
+ #
+ # In scalar context returns the data as a hashref. In array context also
+ # returns the effective stamp as a second element. The effective
+ # stamp is the value of a "/stamp" key at the same level (or up) as
+ # $root. If there are single elements with newer stamps, they will have
+ # a "/stamp" subkey.
+ my($cache, $root, $keep_lock) = @_;
+ $root ||= "";
+ $keep_lock ||= 0;
+ debug("read_cache($cache, $root, $keep_lock) invoked");
+
+ $root = "/$root";
+ $root =~ s{/+$}{};
+
+ my $file = "$CFG{common}{cache_dir}/$cache";
+ unless(-e $file) {
+ return({}, 0) if(wantarray);
+ return {};
+ }
+ my $use_memcache = 0;
+ if(! defined($fd{$cache})) {
+ mkpath $CFG{common}{cache_dir};
+ if($keep_lock) {
+ debug("Opening $file in RW mode");
+ open $fd{$cache}, "+<", $file or die "Error opening cache: $!\n";
+ flock($fd{$cache}, LOCK_EX) or die "Error locking cache: $!\n";
+ } else {
+ if($memcache{$cache} and $memcache{$cache}{mtime} == -M $file) {
+ $use_memcache = 1;
+ } else {
+ debug("Opening $file in R mode");
+ open $fd{$cache}, "<", $file or die "Error opening cache: $!\n";
+ flock($fd{$cache}, LOCK_SH) or die "Error locking cache: $!\n";
+ }
+ }
+ }
+ my $data = {};
+ if($use_memcache) {
+ $data = $memcache{$cache}{data};
+ } else {
+ my $fd = $fd{$cache};
+ seek($fd, 0, SEEK_SET) or die "Can't seek: $!\n";
+ if(-s $fd) {
+ $data = fd_retrieve($fd) or die "Can't read cache: $!\n";
+ }
+ unlock_cache($cache) unless($keep_lock);
+ $memcache{$cache} = {
+ data => $data,
+ mtime => -M $file
+ };
+ }
+ my $rootd = dive_hash($data, $root);
+ return $rootd if(not wantarray);
+ return($rootd, find_stamp($data, $root));
+}
+sub update_cache($$;$$$$) {
+ # * $root specifies a path inside the cache hash.
+ # * $data is the data to merge/replace (depending on $replace) in the cache
+ # starting from $root. Note that it's merged at the first level: so
+ # existent data inside a key won't be kept.
+ # * If $keep_lock, file is kept open and write-locked until the next
+ # operation.
+ #
+ # A $stamp is added with key "/stamp", at the $root level if $replace,
+ # inside each key if not. If no $stamp is specified, the current unix time
+ # is used.
+ #
+ # Returns the whole cache
+ my($cache, $data, $root, $replace, $keep_lock, $stamp) = @_;
+ $root ||= "";
+ $root = "/$root";
+ $root =~ s{/+$}{};
+ $replace ||= 0;
+ $keep_lock ||= 0;
+ $stamp = time unless(defined $stamp);
+ debug("update_cache($cache, $data, $root, $replace, $keep_lock, $stamp) ",
+ "invoked");
+
+ my $file = "$CFG{common}{cache_dir}/$cache";
+ if(! defined($fd{$cache})) {
+ debug("Opening $file in RW mode");
+ if(-e $file) {
+ open($fd{$cache}, "+<", $file) or die "Error opening cache: $!\n";
+ } else {
+ mkpath $CFG{common}{cache_dir};
+ open($fd{$cache}, "+>", $file) or die "Error opening cache: $!\n";
+ }
+ flock($fd{$cache}, LOCK_EX) or die "Error locking cache: $!\n";
+ }
+ my $fd = $fd{$cache};
+ seek($fd, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $cdata = {};
+ if(-s $fd) {
+ $cdata = fd_retrieve($fd) or die "Can't read cache: $!\n";
+ }
+ if($replace) {
+ if($root =~ m{^/*$}) {
+ $root = $cdata = $data;
+ } else {
+ $root =~ s{/+([^/]+)$}{};
+ my $leaf = $1;
+ $root = dive_hash($cdata, $root);
+ $root = ($root->{$leaf} = $data);
+ }
+ clean_hash($root);
+ $root->{"/stamp"} = $stamp;
+ $root->{"/version"} = $VERSION;
+ } else {
+ $root = dive_hash($cdata, $root);
+ foreach(keys(%$data)) {
+ $root->{$_} = $data->{$_};
+ $root->{$_}{"/stamp"} = $stamp;
+ }
+ }
+ seek($fd, 0, SEEK_SET) or die "Can't seek: $!\n";
+ truncate($fd, 0) or die "Can't truncate: $!\n";
+ store_fd($cdata, $fd) or die "Can't save cache: $!\n";
+ unless($keep_lock) {
+ unlock_cache($cache);
+ $memcache{$cache} = {
+ data => $cdata,
+ mtime => -M $file
+ };
+ }
+ return $cdata;
+}
+# Deep-greps a hash looking for "magic" keys and removes them
+sub clean_hash($) {
+ my($hash) = @_;
+ debug("clean_hash($hash) invoked");
+ die "Invalid hashref" unless($hash and ref $hash and ref $hash eq "HASH");
+ clean_hash_recurse($hash);
+}
+sub clean_hash_recurse($) {
+ my($hash) = @_;
+ foreach(keys %$hash) {
+ delete $hash->{$_} if(m#^/#);
+ }
+ foreach(values %$hash) {
+ clean_hash_recurse($_) if($_ and ref $_ and ref $_ eq "HASH");
+ }
+}
+# Return a reference into $hash, as specified with $path
+# Creates or replaces any component that is not a hashref
+sub dive_hash($;$) {
+ my($hash, $path) = @_;
+ $path ||= "";
+ debug("dive_hash($hash, $path) invoked");
+ die "Invalid hashref" unless($hash and ref $hash and ref $hash eq "HASH");
+ my @path = split(m#/+#, $path);
+ my $ref = $hash;
+ foreach(@path) {
+ next unless($_);
+ my $r = $ref->{$_};
+ unless($r and ref $r and ref $r eq "HASH") {
+ $r = $ref->{$_} = {};
+ }
+ $ref = $r;
+ }
+ return $ref;
+}
+# Search a stamp in $hash, starting at $path and going upwards until the
+# root. Returns 0 if not found.
+# Remember to call it with the root of the cache, to have proper stamp and
+# version handling.
+sub find_stamp($;$) {
+ my($hash, $path) = @_;
+ $path ||= "";
+ debug("find_stamp($hash, $path) invoked");
+ die "Invalid hashref" unless($hash and ref $hash and ref $hash eq "HASH");
+ if(not $path and (not $hash->{"/version"} or
+ $hash->{"/version"} < $VERSION)) {
+ info("find_stamp: returning 0 as cache has old version");
+ return 0;
+ }
+ return find_stamp_recurse($hash, $path);
+}
+sub find_stamp_recurse($$) {
+ my($hash, $path) = @_;
+ my $ctsmp = 0;
+ if($path =~ s{^/*([^/]+)}{}) {
+ my $root = $1;
+ $ctsmp = find_stamp_recurse($hash->{$root}, $path) if($hash->{$root});
+ }
+ if(not $ctsmp and exists($hash->{"/stamp"})) {
+ $ctsmp = $hash->{"/stamp"};
+ }
+ return $ctsmp || 0;
+}
+1;
Added: scripts/pet/PET/Classification.pm
===================================================================
--- scripts/pet/PET/Classification.pm (rev 0)
+++ scripts/pet/PET/Classification.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,228 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Classification.pm 610 2008-08-30 06:50:16Z tincho $
+#
+# Module for classifying packages into problem clases. The idea is to make the
+# reporting scripts absolutely minimal, and to have a common code in different
+# report implementations.
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package PET::Classification;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(classify);
+
+use PET::Cache;
+#use PET::Common;
+use PET::Config '%CFG';
+use PET::DebVersions;
+
+# Takes a list of packages to process.
+# Returns an unique hash ready to use in reporting, keyed by package name.
+# package_name => {
+# status => { # Hash to ease lookup, empty if OK (@notes)
+# needs_upload => 1,
+# needs_upgrade => 1,
+# invalid_svn_version => 1,
+# ...
+# },
+# notes => [ ... ],
+# hilight => { # Problems indexed by highlighted item
+# archive => { needs_upload => 1, ... },
+# bts => { has_bugs => 1 }, ...
+# },
+# svn_path => "...",
+# upstream_url => "...", # Already extracted data for ease of use
+#
+# bts => {},
+# archive => {},
+# svn => {},
+# watch => {} # Copies from the caches
+# }
+
+my %error_hilight = (
+ archive_waiting => "archive",
+ tagged_wait => "archive",
+ needs_upload => "archive",
+ never_uploaded => "archive",
+ archive_foreign => "archive",
+ has_rc_bugs => "bts",
+ has_bugs => "bts",
+ not_finished => "svn",
+ repo_ancient => "svn",
+ svn_foreign => "svn",
+ invalid_tag => "svn",
+ missing_tag => "svn",
+ name_mismatch => "svn",
+ needs_upgrade => "upstream",
+ upstream_ancient => "upstream",
+ watch_error => "upstream",
+# native => "",
+);
+my %warn_hilight = (
+ archive_nonmaint => "archive",
+ svn_nonmaint => "svn",
+ upgrade_in_progress => "svn",
+);
+
+sub classify(@) {
+ my @pkglist = @_;
+ my $data = read_cache(consolidated => "");
+ my %res = ();
+
+ foreach my $pkg (@pkglist) {
+ next if($pkg =~ /^\//);
+ my(%status, @notes);
+ # SVN versus archive
+ my $archive_ver = $data->{archive}{$pkg}{most_recent};
+ my $svn_ver = $data->{svn}{$pkg}{version};
+ my $svn_unrel_ver = $data->{svn}{$pkg}{un_version};
+ my $tag_ver = $data->{svn}{$pkg}{tags}[-1];
+ my $tag_ok = ($tag_ver and $svn_ver
+ and not deb_compare($tag_ver, $svn_ver));
+ if($tag_ver and $svn_ver and deb_compare($tag_ver, $svn_ver) > 0) {
+ $status{invalid_tag} = 1;
+ }
+ if($tag_ver and $archive_ver
+ and deb_compare($tag_ver, $archive_ver) < 0) {
+ $status{missing_tag} = 1;
+ }
+ if(not $svn_ver or not $archive_ver) {
+ if(not $svn_ver) {
+ $status{not_finished} = 1;
+ }
+ if(not $archive_ver) {
+ if($tag_ok) {
+ $status{tagged_wait} = 1;
+ } else {
+ $status{never_uploaded} = 1;
+ # catch NEW packages that need to be uploaded
+ $status{needs_upload} = 1 if $svn_ver;
+ }
+ }
+ } elsif(deb_compare($archive_ver, $svn_ver) > 0) {
+ $status{repo_ancient} = 1;
+ push @notes, "$archive_ver > $svn_ver";
+ } elsif(deb_compare($archive_ver, $svn_ver) != 0
+ and not $svn_unrel_ver) {
+ if($tag_ok) {
+ $status{tagged_wait} = 1;
+ } else {
+ $status{needs_upload} = 1;
+ }
+ }
+ if($pkg ne $data->{svn}{$pkg}{dir}) {
+ $status{name_mismatch} = 1;
+ }
+ # SVN versus upstream
+ my $repo_mangled_ver = $data->{svn}{$pkg}{mangled_ver};
+ my $repo_unrel_mangled_ver = $data->{svn}{$pkg}{mangled_un_ver};
+ my $upstream_mangled_ver = $data->{watch}{$pkg}{upstream_mangled};
+ # watch_error from svn is not needed, as Watch.pm copies it
+ my $watch_error = $data->{watch}{$pkg}{error};
+ if($watch_error and $watch_error eq "Native") {
+ #$status{native} = 1;
+ } elsif($watch_error) {
+ $status{watch_error} = 1;
+ push @notes, "Watch problem: $watch_error";
+ } elsif((not $repo_mangled_ver and not $repo_unrel_mangled_ver)
+ or not $upstream_mangled_ver) {
+ $status{watch_error} = 1; # Should not happen
+ push @notes, "Unexpected watchfile problem";
+ } elsif($repo_mangled_ver) { # Will not check if UNRELEASED (?)
+ if(deb_compare($repo_mangled_ver, $upstream_mangled_ver) > 0) {
+ $status{upstream_ancient} = 1;
+ push @notes, "$repo_mangled_ver > $upstream_mangled_ver";
+ }
+ if(deb_compare($repo_mangled_ver, $upstream_mangled_ver) < 0) {
+ $status{needs_upgrade} = 1;
+ $status{upgrade_in_progress} = 1 if(
+ deb_compare($repo_unrel_mangled_ver,
+ $upstream_mangled_ver) == 0);
+ }
+ }
+ # Archive
+ my $archive_latest = $data->{archive}{$pkg}{most_recent_src} || "";
+ if($archive_latest =~ /new|incoming/) {
+ $status{archive_waiting} = 1;
+ }
+ if($data->{bts}{$pkg} and %{$data->{bts}{$pkg}}) {
+ $status{has_bugs} = 1;
+ foreach(keys %{$data->{bts}{$pkg}}) {
+ # enumerating non-RC severities allows automatic support for
+ # new RC severities
+ $status{has_rc_bugs} = 1
+ unless $data->{bts}{$pkg}{$_}{severity} =~ m/
+ minor|wishlist|normal|important
+ /x;
+
+ next unless($data->{svn}{$pkg}{closes}{$_});
+ next if($data->{svn}{$pkg}{closes}{$_} eq "released" and
+ not $status{needs_upload});
+ # Values is released|unreleased.
+ my $tag = "svn-".$data->{svn}{$pkg}{closes}{$_}.":pending";
+ push(@{$data->{bts}{$pkg}{$_}{keywordsA}}, $tag);
+ $data->{bts}{$pkg}{$_}{keywords} .= " $tag";
+ }
+ }
+ if($CFG{common}{group_email}
+ and $data->{archive}{$pkg}{control}{maintainer}
+ and $data->{archive}{$pkg}{control}{uploaders}
+ and not grep( { /<\Q$CFG{common}{group_email}\E>/ }
+ @{$data->{archive}{$pkg}{control}{maintainer}}
+ )) {
+ if(grep({ /<\Q$CFG{common}{group_email}\E>/ }
+ @{$data->{archive}{$pkg}{control}{uploaders}}
+ )) {
+ $status{archive_nonmaint} = 1;
+ } else {
+ $status{archive_foreign} = 1;
+ }
+ }
+ if($CFG{common}{group_email}
+ and $data->{svn}{$pkg}{maintainer}
+ and $data->{svn}{$pkg}{uploaders}
+ and not grep( { /<\Q$CFG{common}{group_email}\E>/ }
+ @{$data->{svn}{$pkg}{maintainer}}
+ )) {
+ if(grep({ /<\Q$CFG{common}{group_email}\E>/ }
+ @{$data->{svn}{$pkg}{uploaders}}
+ )) {
+ $status{svn_nonmaint} = 1;
+ } else {
+ $status{svn_foreign} = 1;
+ }
+ }
+ my(%hilight, %warning);
+ foreach(keys %status) {
+ die "Internal error: $_ is not a valid status" unless(
+ $error_hilight{$_} or $warn_hilight{$_});
+ $hilight{$error_hilight{$_}}{$_} = 1 if($error_hilight{$_});
+ $warning{$warn_hilight{$_}}{$_} = 1 if($warn_hilight{$_});
+ }
+ $res{$pkg} = {
+ name => $pkg,
+ #
+ watch => $data->{watch}{$pkg},
+ archive => $data->{archive}{$pkg},
+ svn => $data->{svn}{$pkg},
+ bts => $data->{bts}{$pkg},
+ #
+ svn_path => $data->{svn}{$pkg}{dir},
+ upstream_url => $data->{watch}{$pkg}{upstream_url},
+ #
+ status => \%status,
+ notes => \@notes,
+ hilight => \%hilight,
+ warning => \%warning
+ };
+ }
+ return \%res;
+}
+
+1;
Added: scripts/pet/PET/Common.pm
===================================================================
--- scripts/pet/PET/Common.pm (rev 0)
+++ scripts/pet/PET/Common.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,58 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Common.pm 610 2008-08-30 06:50:16Z tincho $
+#
+# Common helper routines
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package PET::Common;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(debug info warn error $VERSION);
+#our $VERSION = join(".", q$Revision: 606 $ =~ /(\d+)/g);
+
+# Cannot use this on alioth
+#use version;
+#our $VERSION = qv("1.000");
+
+# Bump this version in case of data file change
+our $VERSION = 1.004;
+
+use PET::Config '%CFG';
+use POSIX;
+
+my $basename;
+
+sub print_msg {
+ my($level, @msg) = @_;
+ return if($level > $CFG{common}{verbose});
+ unless($basename) {
+ $basename = $0;
+ $basename =~ s{.*/+}{};
+ }
+ @msg = split(/\n+/, join("", @msg));
+ foreach(@msg) {
+ if($CFG{common}{formatted_log}) {
+ printf(STDERR "%s %s[%d]: %s\n",
+ strftime("%b %e %H:%M:%S", localtime), $basename, $$, $_);
+ } else {
+ printf(STDERR $_);
+ }
+ }
+}
+sub error {
+ print_msg(0, @_);
+}
+sub warn {
+ print_msg(1, @_);
+}
+sub info {
+ print_msg(2, @_);
+}
+sub debug {
+ print_msg(3, @_);
+}
+1;
Added: scripts/pet/PET/Config.pm
===================================================================
--- scripts/pet/PET/Config.pm (rev 0)
+++ scripts/pet/PET/Config.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,173 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Config.pm 610 2008-08-30 06:50:16Z tincho $
+#
+# Module that holds configuration variables. Also has subroutines for parsing
+# command line options and the configuration file.
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+package PET::Config;
+
+use strict;
+use warnings;
+
+use FindBin;
+use Getopt::Long;
+
+our @EXPORT = qw(%CFG read_config getopt_common);
+our @ISA = "Exporter";
+
+# Default values
+my %defaults = (
+ pet_cgi => {
+ templates_path => "templates",
+ default_template => "by_category",
+ group_name => "Unnamed Packaging Group",
+ group_url => "http://www.debian.org/",
+ scm_web_dir => undef,
+ scm_web_file => undef,
+ # deprecated
+ viewsvn_url => undef,
+ wsvn_url => undef,
+ default_show_all => undef,
+ default_start_collapsed => undef,
+ default_hide_binaries => undef,
+ default_refresh => 1800, # 30 minutes
+ default_format => "categories", # or list
+ default_ignore_keywords => undef # comma-separated list
+ },
+ svn => {
+ repository => undef,
+ packages_path => "trunk",
+ post_path => "",
+ track_tags => 0,
+ tags_path => "tags",
+ tags_post_path => ""
+ },
+ archive => {
+ mirror => "ftp://ftp.debian.org/debian",
+ suites => "unstable, testing, stable, oldstable, experimental",
+ sections => "main, contrib, non-free",
+ suites_ttl => "360, 360, 10080, 10080, 360",
+ new_url => 'http://ftp-master.debian.org/new.html',
+ new_ttl => 60,
+ incoming_url => 'http://incoming.debian.org',
+ incoming_ttl => 60,
+ },
+ watch => {
+ ttl => 360,
+ use_cpan => 1,
+ cpan_mirror => "ftp://cpan.org/ls-lR.gz",
+ cpan_ttl => 360 # 6 hours
+ },
+ bts => {
+ ttl => 60, # 1 hour
+ soap_proxy => 'http://bugs.debian.org/cgi-bin/soap.cgi',
+ soap_uri => 'Debbugs/SOAP',
+ usertag_users => 'debian-qa at lists.debian.org',
+ ignore_keywords => "",
+ ignore_severities => ""
+ },
+ common => {
+ cache_dir => "~/.pet/cache",
+ # verbosity level: error => 0, warn => 1, info => 2, debug => 3
+ # Should be 1 by default, 0 for quiet mode
+ verbose => 1,
+ # Prepend syslog-style format?
+ formatted_log => 1,
+ group_email => undef,
+ }
+);
+our %CFG = %defaults; # Global configuration
+my %valid_cfg;
+foreach my $section (keys %defaults) {
+ $valid_cfg{$section} = { map({ $_ => 1 } keys(%{$defaults{$section}})) };
+}
+
+sub read_config(;$) {
+ my $file = shift;
+ unless($file) {
+ if($ENV{PET_CONF}) {
+ $file = $ENV{PET_CONF};
+ } elsif($ENV{HOME} and -e "$ENV{HOME}/.pet/pet.conf") {
+ $file = "$ENV{HOME}/.pet/pet.conf";
+ } elsif(-e "/etc/pet.conf") {
+ $file = "/etc/pet.conf";
+ } elsif(-e "pet.conf") {
+ $file = "pet.conf";
+ } elsif(-e "$FindBin::Bin/pet.conf") {
+ $file = "$FindBin::Bin/pet.conf";
+ } else {
+ die "Can't find any configuration file!\n";
+ }
+ }
+ die "Can't read configuration file: $file\n" unless(-r $file);
+
+ my $section = "common";
+ open(CFG, "<", $file) or die "Can't open $file: $!\n";
+ while(<CFG>) {
+ chomp;
+ s/(?<!\S)[;#].*//;
+ s/\s+$//;
+ next unless($_);
+ if(/^\s*\[\s*(\w+)\s*\]\s*$/) {
+ $section = lc($1);
+ die "Invalid section in configuration file: $section\n" unless(
+ exists($valid_cfg{$section}));
+ next;
+ }
+ unless(/^\s*([^=]+?)\s*=\s*(.*)/) {
+ die "Unrecognised line in configuration file: $_\n";
+ }
+ my($key, $val) = ($1, $2);
+ unless(exists($valid_cfg{$section}{$key})) {
+ die("Unrecognised configuration parameter $key in section " .
+ "$section\n");
+ }
+ if($val =~ s/^~\///) { # UGLY!
+ die "Can't use ~/ paths if \$HOME is not set!\n" unless($ENV{HOME});
+ $val = $ENV{HOME} . "/$val";
+ }
+ $CFG{$section}{$key} = $val;
+ }
+ close CFG;
+
+ ## provide backward-compatibility with instances that still use (w|view)svn
+ # file
+ $CFG{pet_cgi}{scm_web_file} ||=
+ $CFG{pet_cgi}{viewsvn_url}.'?view=markup'
+ if $CFG{pet_cgi}{viewsvn_url};
+ $CFG{pet_cgi}{scm_web_file} ||=
+ $CFG{pet_cgi}{wsvn_url}.'/?op=file&rev=0&sc='
+ if $CFG{pet_cgi}{wsvn_url};
+ # dir
+ $CFG{pet_cgi}{scm_web_dir} ||=
+ $CFG{pet_cgi}{viewsvn_url}.'/?'
+ if $CFG{pet_cgi}{viewsvn_url};
+ $CFG{pet_cgi}{scm_web_dir} ||=
+ $CFG{pet_cgi}{wsvn_url}.'/?rev=0&sc='
+ if $CFG{pet_cgi}{wsvn_url};
+ $CFG{pet_cgi}{scm_web_file} =~ s'%s'${pkg}/${file}';
+ $CFG{pet_cgi}{scm_web_dir} =~ s'%s'${pkg}/${dir}';
+}
+# Parses command line options, loads configuration file if specified, removes
+# arguments from @ARGV and returns a hash with the parsed options.
+# If $passthru, ignores unknown parameters and keeps them in @ARGV.
+# If $readconf, will call read_config even if the user didn't say --conf
+sub getopt_common(;$$) {
+ my($passthru, $readconf) = @_;
+ my($conffile, $force, $v, $q) = (undef, 0, 0, 0);
+ my $p = new Getopt::Long::Parser;
+ $p->configure(qw(no_ignore_case bundling),
+ $passthru ? ("pass_through") : ());
+ $p->getoptions(
+ 'conf|c=s' => \$conffile, 'force|f!' => \$force,
+ 'verbose|v:+' => \$v, 'quiet|q:+' => \$q
+ ) or die("Error parsing command-line arguments\n");
+ read_config($conffile) if($conffile or $readconf);
+ $CFG{common}{verbose} += $v - $q;
+ return {
+ force => $force # only one argument for now
+ };
+}
+1;
Added: scripts/pet/PET/DebVersions.pm
===================================================================
--- scripts/pet/PET/DebVersions.pm (rev 0)
+++ scripts/pet/PET/DebVersions.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,94 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: DebVersions.pm 610 2008-08-30 06:50:16Z tincho $
+#
+# Routines for comparing package versions, based on policy + dpkg code
+# I'm not using AptPkg::Version since it depends on having a working apt and
+# dpkg, it's overly complicated and underdocumented.
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package PET::DebVersions;
+use strict;
+use warnings;
+use Carp;
+
+our @ISA = "Exporter";
+our @EXPORT = qw( deb_compare deb_compare_nofail deb_valid );
+
+sub deb_valid($) {
+ my $v = shift;
+ return (defined $v and
+ $v =~ /^(?:(\d+):)?([A-Za-z0-9+.:~_-]*?)(?:-([+.~_A-Za-z0-9]+))?$/);
+}
+sub deb_parse($) {
+ my $v = shift;
+ unless(defined $v) {
+ carp "Empty debian package version passed";
+ return ();
+ }
+ unless($v =~ /^(?:(\d+):)?([A-Za-z0-9+.:~_-]*?)(?:-([+.~_A-Za-z0-9]+))?$/) {
+ warn "Invalid debian package version: $v\n";
+ return ();
+ };
+ return($1 || 0, $2, $3 || "");
+}
+sub dpkg_order($) {
+ my $v = shift;
+ return 0 if (! defined($v) or $v =~ /[0-9]/);
+ return -1 if ($v eq '~');
+ return ord($v) if ($v =~ /[a-zA-Z]/);
+ return ord($v) + 256;
+}
+sub deb_verrevcmp($$) {
+ my($a, $b) = @_;
+ my($x, $y);
+ while(length($a) or length($b)) {
+ while(1) {
+ $x = length($a) ? substr($a, 0, 1) : undef;
+ $y = length($b) ? substr($b, 0, 1) : undef;
+ last unless((defined $x and $x =~ /\D/) or
+ (defined $y and $y =~ /\D/));
+ my $r = dpkg_order($x) <=> dpkg_order($y);
+ return $r if($r);
+ substr($a, 0, 1, "") if(defined $x);
+ substr($b, 0, 1, "") if(defined $y);
+ }
+ $a =~ s/^(\d*)//;
+ $x = $1 || 0;
+ $b =~ s/^(\d*)//;
+ $y = $1 || 0;
+ my $r = $x <=> $y;
+ return $r if($r);
+ }
+ return 0;
+}
+sub deb_compare($$) {
+ return undef unless(deb_valid($_[0]) and deb_valid($_[1]));
+ my @va = deb_parse($_[0]);
+ my @vb = deb_parse($_[1]);
+
+ # Epoch
+ return $va[0] <=> $vb[0] unless($va[0] == $vb[0]);
+
+ my $upstreamcmp = deb_verrevcmp($va[1], $vb[1]);
+ return $upstreamcmp unless(defined $upstreamcmp and $upstreamcmp == 0);
+
+ return deb_verrevcmp($va[2], $vb[2]);
+}
+sub deb_compare_nofail($$) {
+ return 1 unless(deb_valid($_[0]));
+ return -1 unless(deb_valid($_[1]));
+ my @va = deb_parse($_[0]);
+ my @vb = deb_parse($_[1]);
+
+ # Epoch
+ return $va[0] <=> $vb[0] unless($va[0] == $vb[0]);
+
+ my $upstreamcmp = deb_verrevcmp($va[1], $vb[1]);
+ return $upstreamcmp unless(defined $upstreamcmp and $upstreamcmp == 0);
+
+ return deb_verrevcmp($va[2], $vb[2]);
+}
+
+1;
Added: scripts/pet/PET/Svn.pm
===================================================================
--- scripts/pet/PET/Svn.pm (rev 0)
+++ scripts/pet/PET/Svn.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,598 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Svn.pm 610 2008-08-30 06:50:16Z tincho $
+#
+# Module for retrieving data from the SVN repository. It understands SVN
+# revisions and uses them instead of timestamps for checking cache validity. It
+# parses changelog and watch files.
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package PET::Svn;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = (qw(
+ svn_download svn_get svn_get_consolidated
+ svndir2pkgname pkgname2svndir get_pkglist get_pkglist_hashref
+ ));
+
+use IO::Scalar;
+use Digest::MD5 "md5_hex";
+use Parse::DebianChangelog;
+use PET::Cache;
+use PET::Common;
+use PET::Config '%CFG';
+use PET::DebVersions;
+use Parse::DebControl;
+use SVN::Client;
+use List::Util 'min';
+
+# shared
+our $svn = SVN::Client->new();
+
+# Returns the list of changed directories
+sub svn_download {
+ my($force, $revision, @dirlist) = @_;
+ $force ||= 0;
+ $revision ||= 0;
+ debug("svn_download($force, $revision, (@dirlist))");
+
+ die "Missing SVN repository" unless($CFG{svn}{repository});
+ my $svnpath = $CFG{svn}{repository};
+ $svnpath =~ m{^([^/:]+)://([^/]*)/(.*)$}
+ or die "Invalid SVN repository: $svnpath";
+ my($repoproto, $repohost, $repopath) = ($1, $2, $3);
+ $repopath =~ s#/+#/#g;
+ $repopath =~ s{/$}{};
+ $repopath =~ s{^/}{};
+ $svnpath = "$repoproto://$repohost/$repopath";
+
+ # Starts with a slash
+ my $svnpkgpath = "/".$CFG{svn}{packages_path} if($CFG{svn}{packages_path});
+ $svnpkgpath =~ s#/+#/#g;
+ $svnpkgpath =~ s{/$}{};
+
+ my $svnpkgpostpath = $CFG{svn}{post_path} || "";
+ # Always has a slash if not empty
+ $svnpkgpostpath =~ s{^/*(.*?)/*$}{/$1} if($svnpkgpostpath);
+ $svnpkgpostpath =~ s#/+#/#g;
+
+ my $complete = ! @dirlist;
+ unless(@dirlist) {
+ info("Retrieving list of directories in SVN");
+ my %dirlist = %{$svn->ls("$svnpath$svnpkgpath", 'HEAD', 0)};
+ @dirlist = grep({ $dirlist{$_}->kind() == $SVN::Node::dir }
+ keys(%dirlist));
+ info(scalar @dirlist, " directories to process");
+ }
+ unless($revision) {
+ info("Retrieving last revision number from SVN");
+ $svn->info("$svnpath$svnpkgpath", undef, "HEAD", sub {
+ $revision = $_[1]->rev();
+ }, 0);
+ }
+ unless($force) {
+ my $cdata = read_cache("svn", "", 0);
+ if(find_stamp($cdata, "")) {
+ my @new = grep({! $cdata->{$_}} @dirlist);
+ if(find_stamp($cdata, "") == $revision and not @new
+ # don't return if consolidated caches are missing or old
+ and find_stamp(read_cache("consolidated", "pkglist", 0))
+ and find_stamp(read_cache("consolidated", "svn", 0))
+ ) {
+ info("SVN cache is up-to-date");
+ return ();
+ }
+ } else { # New or old file format
+ $force = 1;
+ }
+ }
+
+ my($pkgdata, @changed) = svn_scanpackages($force, $revision, $svnpath,
+ $svnpkgpath, $svnpkgpostpath, @dirlist);
+ if($CFG{svn}{track_tags}) {
+ # Starts with a slash
+ my $svntagpath = "/" . $CFG{svn}{tags_path} if($CFG{svn}{tags_path});
+ $svntagpath =~ s#/+#/#g;
+ $svntagpath =~ s{/$}{};
+
+ my $svntagpostpath = $CFG{svn}{tags_post_path} || "";
+ # Always has a slash if not empty
+ $svntagpostpath =~ s{^/*(.*?)/*$}{/$1} if($svntagpostpath);
+ $svntagpostpath =~ s#/+#/#g;
+
+ my $tagdata = svn_scantags($force, $revision, $svnpath, $svntagpath,
+ $svntagpostpath, @dirlist);
+ foreach(keys %$pkgdata) {
+ $pkgdata->{$_}{tags} = $tagdata->{$_} if($tagdata->{$_});
+ }
+ }
+ # Retain lock
+ my $cdata = update_cache("svn", $pkgdata, "", $complete, 1, $revision);
+
+ my @pkglist = grep({ ref $cdata->{$_} and $cdata->{$_}{pkgname} }
+ keys(%$cdata));
+ my %pkglist;
+ foreach(@pkglist) {
+ $pkglist{$cdata->{$_}{pkgname}} = {
+ svndir => $_,
+ binaries => $cdata->{$_}{binaries}
+ };
+ }
+ update_cache("consolidated", \%pkglist, "pkglist", 1, 1);
+ my %svn;
+ foreach(keys(%$cdata)) {
+ next unless ref($cdata->{$_});
+ my $pkgname = $cdata->{$_}{pkgname} or next;
+ # Shallow copy, it's enough here, but can't be used for anything else
+ $svn{$pkgname} = { %{$cdata->{$_}} };
+ $svn{$pkgname}{dir} = $_;
+ delete $svn{$pkgname}{$_} foreach(
+ qw(watch pkgname text un_text long_descr bindata)
+ );
+ }
+ update_cache("consolidated", \%svn, "svn", 1, 0);
+ unlock_cache("svn");
+ return @changed;
+}
+sub svn_scantags {
+ my($force, $revision, $svnpath, $prepath, $postpath, @dirlist) = @_;
+
+ info("Scanning tags from SVN");
+ my $cdata;
+ my %dirs = map({ ( $_ => 1 ) } @dirlist);
+ my %changed;
+ if($force) {
+ %changed = %dirs;
+ } else {
+ $cdata = read_cache("svn", "", 0);
+ my @candidates;
+ # Find oldest non-zero stamp, and force changed for new dirs
+ foreach(@dirlist) {
+ my $stamp = find_stamp($cdata, $_);
+ if($stamp and $cdata->{$_}{tags}) {
+ push @candidates, $stamp;
+ } else {
+ $changed{$_} = 1;
+ }
+ }
+ my $old_rev = min(@candidates);
+ if($old_rev) {
+ info("Retrieving SVN log since $old_rev");
+ my $ret = safe_svn_op($svn, log => [ "$svnpath$prepath" ], $old_rev,
+ "HEAD", 1, 1, sub {
+ foreach (keys %{$_[0]}) {
+ debug("Changed path: $_");
+ $changed{$1} = 1 if(m{^\Q$prepath\E/(.*?)/});
+ }
+ });
+ unless($ret) {
+ warn("svn log had problems!");
+ %changed = %dirs; # fallback
+ }
+ } else {
+ %changed = %dirs;
+ }
+ }
+ my %tags;
+ foreach my $dir (@dirlist) {
+ unless($changed{$dir}) {
+ $tags{$dir} = $cdata->{$dir}{tags};
+ next;
+ }
+ info("Retrieving tags for $dir");
+ my $pkghome = "$svnpath$prepath/$dir$postpath";
+ my $tagdirs = safe_svn_op($svn, ls => $pkghome, 'HEAD', 0);
+ my @tagdirs = grep({ $tagdirs->{$_}->kind() == $SVN::Node::dir }
+ keys(%{$tagdirs || {}}));
+ map({ s/^(?:debian_)?(?:(?:release|version)_)?//i; s/_/./g } @tagdirs);
+ @tagdirs = sort( { deb_compare_nofail($a, $b) } @tagdirs);
+ debug("Tags for $dir: @tagdirs");
+ $tags{$dir} = \@tagdirs;
+ }
+ return \%tags;
+}
+sub svn_scanpackages {
+ my($force, $revision, $svnpath, $prepath, $postpath, @dirlist) = @_;
+
+ info("Scanning packages from SVN");
+ my(%svn, %changed);
+ my %dirs = map({ ( $_ => 1 ) } @dirlist);
+ if($force) {
+ %changed = %dirs;
+ } else {
+ my $cdata = read_cache("svn", "", 0);
+ my @candidates;
+ # Find oldest non-zero stamp, and force changed for new dirs
+ foreach(@dirlist) {
+ my $stamp = find_stamp($cdata, $_);
+ if($stamp and $cdata->{$_}) {
+ push @candidates, $stamp;
+ } else {
+ $changed{$_} = 1;
+ }
+ }
+ my $old_rev = min(@candidates);
+ if($old_rev) {
+ # Now search in the SVN log to see if there's any interesting change
+ # Remove from list already updated parts of the cache
+ info("Retrieving SVN log since $old_rev");
+ my $ret = safe_svn_op($svn, log => [ "$svnpath$prepath" ], $old_rev,
+ "HEAD", 1, 1, sub {
+ foreach (keys %{$_[0]}) {
+ debug("Changed path: $_");
+ $changed{$1} = 1 if(m{^\Q$prepath\E/(.*?)\Q$postpath\E/debian/(?:(?:changelog|control|watch)$|patches/[^/]+$)});
+ }
+ });
+ unless($ret) {
+ warn("svn log had problems!");
+ %changed = %dirs; # fallback
+ }
+ } else {
+ %changed = %dirs;
+ }
+ # Copy the not-changed dirs that we want to have the stamp bumped
+ foreach(grep({ ! $changed{$_} } @dirlist)) {
+ $svn{$_} = $cdata->{$_};
+ }
+ }
+ my @changed = keys %changed;
+ foreach my $dir (@changed) {
+ $dir =~ s{^/*(.*?)/*$}{$1};
+ my $debdir = "$svnpath$prepath/$dir$postpath/debian";
+ $svn{$dir} = {};
+
+ info("Retrieving control information for $dir");
+ my $control = get_svn_file($svn, "$debdir/control");
+
+ unless($control) {
+ $svn{$dir}{error} = "MissingControl";
+ # Check if it's an invalid dir
+ safe_svn_op($svn, "ls", $debdir, 'HEAD', 0) and next;
+ info("Removing invalid $dir directory");
+ $svn{$dir} = {};
+ next;
+ }
+
+ info("Retrieving changelog for $dir");
+ my $changelog = get_svn_file($svn, "$debdir/changelog");
+
+ unless($changelog) {
+ $svn{$dir}{error} = "MissingChangelog";
+ next;
+ }
+
+ # Parse::DebControl hands back a strange structure... A hash-like
+ # thing, where [0] includes the debian/control fields for the
+ # source package and [1] for the first binary package (and, were
+ # they to exist, [2] and on for the other binary packages - which
+ # we will wisely ignore)
+ my ($ctrl_data, $short, $long);
+ $control =~ s/^#.*\n//gm; # stripComments looks like nonsense to me
+ $ctrl_data = Parse::DebControl->new->parse_mem($control, {
+ discardCase => 1 # unreliable if don't
+ });
+ ($short, $long) = split_description($ctrl_data->[1]{description});
+
+ $svn{$dir}{pkgname} = $ctrl_data->[0]{source};
+ my @section = split(/\s*\/\s*/, $ctrl_data->[0]{section});
+ unshift @section, "main" unless(@section > 1);
+ $svn{$dir}{section} = $section[0];
+ $svn{$dir}{subsection} = $section[1];
+ $svn{$dir}{maintainer} = [ split(/\s*,\s*/,
+ $ctrl_data->[0]{maintainer} || "") ];
+ $svn{$dir}{uploaders} = [ split(/\s*,\s*/,
+ $ctrl_data->[0]{uploaders} || "") ];
+ $svn{$dir}{b_d} = [ split(/\s*,\s*/, $ctrl_data->[0]{b_d} || "") ];
+ $svn{$dir}{b_d_i} = [ split(/\s*,\s*/, $ctrl_data->[0]{b_d_i} || "") ];
+ $svn{$dir}{std_version} = $ctrl_data->[0]{'standards-version'};
+ $svn{$dir}{short_descr} = $short;
+ $svn{$dir}{long_descr} = $long;
+ my %bins;
+ foreach(1..$#$ctrl_data) {
+ my $bin = $ctrl_data->[$_];
+ my ($shd, $lnd) = split_description($bin->{description});
+ $svn{$dir}{bindata}[$_-1] = {
+ %$bin,
+ short_descr => $shd,
+ long_descr => $lnd,
+ };
+ delete $svn{$dir}{bindata}[$_-1]{description};
+ $bins{$bin->{package}} = 1;
+ if($bin->{provides}) {
+ foreach(split(/\s*,\s*/, $bin->{provides})) {
+ $bins{$_} = 1;
+ }
+ }
+ }
+ $svn{$dir}{binaries} = [ sort keys %bins ];
+ my $parser = Parse::DebianChangelog->init({
+ instring => $changelog });
+ my $error = $parser->get_error() or $parser->get_parse_errors();
+ if($error) {
+ error($error);
+ $svn{$dir}{error} = "InvalidChangelog";
+ next;
+ }
+
+ my($lastchl, $unfinishedchl);
+ foreach($parser->data()) {
+ if($_->Distribution =~ /^(?:unstable|experimental)$/) {
+ $lastchl = $_;
+ last;
+ }
+ if(! $unfinishedchl and $_->Distribution eq "UNRELEASED") {
+ $unfinishedchl = $_;
+ }
+ }
+ unless($lastchl or $unfinishedchl) {
+ $svn{$dir}{error} = "InvalidChangelog";
+ next;
+ }
+ if($lastchl) {
+ $svn{$dir}{version} = $lastchl->Version;
+ $svn{$dir}{date} = $lastchl->Date;
+ $svn{$dir}{changer} = $lastchl->Maintainer;
+ $svn{$dir}{text} = join(
+ "\n",
+ map( $lastchl->$_, qw(Header Changes Trailer) ),
+ );
+ $svn{$dir}{closes}{$_} = "released" foreach(@{$lastchl->Closes});
+ }
+ if($unfinishedchl) {
+ $svn{$dir}{un_version} = $unfinishedchl->Version;
+ $svn{$dir}{un_date} = $unfinishedchl->Date;
+ $svn{$dir}{un_changer} = $unfinishedchl->Maintainer;
+ $svn{$dir}{un_text} = join(
+ "\n",
+ map( $unfinishedchl->$_, qw(Header Changes Trailer) ),
+ );
+ $svn{$dir}{closes}{$_} = "unreleased"
+ foreach(@{$unfinishedchl->Closes});
+ }
+ if($svn{$dir}{pkgname} ne $parser->dpkg()->{Source}) {
+ $svn{$dir}{error} = "SourceNameMismatch";
+ next;
+ }
+
+ info("Retrieving watchfile for $dir");
+ my $watchdata = get_svn_file($svn, "$debdir/watch");
+ unless($watchdata) {
+ if($svn{$dir}{version} and $svn{$dir}{version} !~ /-/) {
+ $svn{$dir}{watch_error} = "Native";
+ } else {
+ $svn{$dir}{watch_error} = "Missing";
+ }
+ next;
+ }
+ my $watch = parse_watch($svn{$dir}{version}, $watchdata);
+ # Returns undef on error
+ unless($watch and @$watch) {
+ $svn{$dir}{watch_error} = "Invalid";
+ next;
+ }
+ my @versions = sort({ deb_compare_nofail($a, $b) }
+ grep(defined, map({ $_->{mangled_ver} } @$watch)));
+
+ $svn{$dir}{mangled_ver} = $versions[-1];
+ $svn{$dir}{watch} = $watch;
+
+ # Again for unreleased
+ $watch = parse_watch($svn{$dir}{un_version}, $watchdata) if(
+ $svn{$dir}{un_version});
+ # Returns undef on error
+ if($watch and @$watch) {
+ @versions = sort({ deb_compare_nofail($a, $b) }
+ grep(defined, map({ $_->{mangled_ver} } @$watch)));
+ $svn{$dir}{mangled_un_ver} = $versions[-1];
+ }
+
+ info("Retrieving patches for $dir");
+ my $patches = safe_svn_op($svn, ls => "$debdir/patches", 'HEAD', 0);
+ if(ref $patches) {
+ my @patches = grep({ $patches->{$_}->kind() == $SVN::Node::file }
+ keys(%$patches));
+ info(scalar @patches, " files in patches/ dir");
+ my $ptype;
+ my $series = "";
+ foreach(@patches) {
+ if($_ eq "series") {
+ $ptype = "quilt";
+ debug("Reading quilt series file");
+ $series = get_svn_file($svn, "$debdir/patches/series");
+ $series =~ s/(^|\s+)#.*$//mg;
+ last;
+ } elsif(/^00list/) {
+ $ptype = "dpatch";
+ debug("Reading dpatch series file");
+ my $s = get_svn_file($svn, "$debdir/patches/$_");
+ # NOTE that this doesn't support CPP-processed lists
+ $s =~ s/^#.*$//mg;
+ $series .= $s;
+ }
+ }
+ if($ptype) {
+ $svn{$dir}{patchsys} = $ptype;
+ $svn{$dir}{patches} = [];
+ } else {
+ warn("Cannot detect patch system for $dir -- ",
+ "asuming simple-patchsys");
+ $svn{$dir}{patchsys} = "simple-patchsys";
+ $svn{$dir}{patches} = [ map({ "debian/patches/$_" } @patches) ];
+ $series = ""; # make next foreach pass-by
+ }
+ foreach(split(/\n+/, $series)) {
+ next unless $_;
+ my $p;
+ if($ptype eq "quilt") {
+ $p = "patches/$_";
+ } elsif($ptype eq "dpatch") {
+ $p = "patches/$_";
+ unless(safe_svn_op($svn, "ls", "$debdir/$p", 'HEAD', 0)) {
+ $p .= ".dpatch";
+ }
+ } else {
+ next;
+ }
+ # FIXME: do something useful with this
+ if(safe_svn_op($svn, "ls", "$debdir/$p", 'HEAD', 0)) {
+ push @{$svn{$dir}{patches}}, "debian/$p";
+ } else {
+ $svn{$dir}{patcherr} = "missing_file";
+ warn("Patchfile $p cannot be found in $dir");
+ }
+ }
+ info(scalar @{$svn{$dir}{patches} || []}, " patches found");
+ }
+ }
+ return(\%svn, @changed);
+}
+# Returns the hash of svn info. Doesn't download anything.
+sub svn_get {
+ return read_cache("svn", shift, 0);
+}
+# Returns the consolidated hash of svn info. Doesn't download anything.
+sub svn_get_consolidated {
+ my $path = shift || "";
+ return read_cache("consolidated", "svn/$path", 0);
+}
+# Searches the source package name given a svn directory name
+# Returns undef if not found
+sub svndir2pkgname($) {
+ my $dir = shift;
+ my $data = read_cache("svn", $dir, 0);
+ return $data->{pkgname};
+}
+# Searches the svn directory name given a source package name
+# Returns undef if not found
+sub pkgname2svndir($) {
+ my $pkg = shift;
+ my $data = read_cache("svn", "", 0);
+ my @dirs = grep({ ref $data->{$_} and $data->{$_}{pkgname} and
+ $data->{$_}{pkgname} eq $pkg } keys %$data);
+ return $dirs[0] if(@dirs);
+ return undef;
+}
+# Returns the list of source packages detected in the svn repository
+sub get_pkglist {
+ my $list = get_pkglist_hashref();
+ return keys %$list;
+}
+sub get_pkglist_hashref {
+ my $list = read_cache("consolidated", "pkglist", 0);
+ foreach(grep({ /^\// } keys %$list)) {
+ delete $list->{$_};
+ }
+ return $list;
+}
+# Parses watchfile, returns an arrayref containing one element for each source,
+# consisting of the URL spec, an MD5 sum of the line (to detect changes from
+# the watch module), the mangled debian version, and a hash of options.
+sub parse_watch($$) {
+ my($version, $watch) = @_;
+ $version ||= '';
+ $watch ||= '';
+ debug("parse_watch('$version', '...')");
+
+ # Strip epoch and debian release
+ $version =~ s/^(?:\d+:)?(.+?)(?:-[^-]+)?$/$1/;
+
+ $watch =~ s/^#.*$//gm;
+ $watch =~ s/\\\n//gs;
+ $watch =~ s/^\s+//gm;
+ $watch =~ s/\s+$//gm;
+ my @watch_lines = split(/\n/, $watch);
+ @watch_lines = grep((!/^version\s*=/ and !/^\s*$/),
+ @watch_lines);
+
+ my @wspecs;
+ foreach(@watch_lines) {
+ debug("Watch line: $_");
+
+ # opts either contain no spaces, or is enclosed in double-quotes
+ my $opts;
+ $opts = $1 if(s!^\s*opt(?:ion)?s="([^"]*)"\s+!!
+ or s!^\s*opt(?:ion)?s=(\S*)\s+!!);
+ debug("Watch line options: $opts") if($opts);
+
+ # several options are separated by comma and commas are not allowed
+ # within
+ my(@opts, %opts);
+ @opts = split(/\s*,\s*/, $opts) if($opts);
+ foreach(@opts) {
+ next if /^(?:active|passive|pasv)$/;
+ /([^=]+)=(.*)/;
+ my($k, $v) = ($1, $2);
+ debug("Watch option $k: $v");
+ if($k eq 'versionmangle') {
+ push @{$opts{uversionmangle}}, $v;
+ push @{$opts{dversionmangle}}, $v;
+ } else {
+ push @{$opts{$k}}, $v;
+ }
+ }
+ my $mangled = $version;
+ if($version and $opts{dversionmangle}) {
+ foreach(split(/;/, join(";", @{$opts{dversionmangle}}))) {
+ debug("Executing \$mangled =~ $_");
+ eval "\$mangled =~ $_";
+ if($@) {
+ error("Invalid watchfile: $@");
+ return undef;
+ }
+ }
+ }
+ debug("Mangled version: $mangled");
+ push @wspecs, {
+ line => $_,
+ mangled_ver => $mangled,
+ md5 => md5_hex(($opts || "").$_),
+ opts => \%opts
+ };
+ }
+ return \@wspecs;
+}
+sub get_svn_file($$) {
+ my($svn, $target) = @_;
+ my $svn_error;
+ my $data;
+ my $fh = IO::Scalar->new(\$data);
+ safe_svn_op($svn, "cat", $fh, $target , 'HEAD');
+ return $data;
+}
+sub safe_svn_op($$@) {
+ my($svn, $op, @opts) = @_;
+ local $SVN::Error::handler = undef;
+ my ($svn_out) = eval "\$svn->$op(\@opts)";
+ if(SVN::Error::is_error($svn_out)) {
+ if($svn_out->apr_err() == $SVN::Error::FS_NOT_FOUND) {
+ $svn_out->clear();
+ return 0;
+ } else {
+ SVN::Error::croak_on_error($svn_out);
+ }
+ }
+ return $svn_out || "0E0";
+}
+
+sub split_description($) {
+ # The 'description' field in debian/control is, IMHO, wrongly handled - Its
+ # first line is the short description, and the rest (second to last lines)
+ # is the long description. So... Here we just split it, for proper
+ # handling.
+ #
+ # Gets the full description as its only parameter, returns the short and
+ # the long descriptions.
+ my ($str, $offset, $short, $long);
+ $str = shift;
+ $offset = index($str, "\n");
+ $short = substr($str, 0, $offset);
+ $long = substr($str, $offset+1);
+ return ($short, $long);
+}
+
+1;
Added: scripts/pet/PET/Watch.pm
===================================================================
--- scripts/pet/PET/Watch.pm (rev 0)
+++ scripts/pet/PET/Watch.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,494 @@
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: Watch.pm 610 2008-08-30 06:50:16Z tincho $
+#
+# Module for scanning watch files and checking upstream versions.
+#
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+
+package PET::Watch;
+use strict;
+use warnings;
+
+our @ISA = "Exporter";
+our @EXPORT = qw(watch_download watch_get watch_get_consolidated);
+
+use Compress::Zlib ();
+use CPAN::DistnameInfo;
+use PET::Cache;
+use PET::Common;
+use PET::Config '%CFG';
+use PET::Svn;
+use PET::DebVersions;;
+use Fcntl qw(:seek);
+use LWP::UserAgent;
+
+my $cpanregex = qr#^((?:http|ftp)://\S*(?:cpan|backpan)\S*)/(dist|modules/by-module|(?:by-)?authors/id)\b#i;
+
+my $ua = new LWP::UserAgent;
+$ua->timeout(30);
+$ua->env_proxy;
+
+sub watch_download {
+ my($force, @pkglist) = @_;
+ $force ||= 0;
+ debug("watch_download($force, (@pkglist))");
+
+ if($CFG{watch}{use_cpan}) {
+ cpan_dist_download($force);
+ cpan_index_download($force);
+ }
+ if(find_stamp(watch_get(), "") == 0) {
+ warn("Forcing complete update -- watch cache has old version");
+ @pkglist = ();
+ $force = 1;
+ }
+ my $complete;
+ if(not @pkglist) {
+ $complete = 1;
+ @pkglist = get_pkglist();
+ }
+ my $cdata;
+ $cdata = watch_get() unless($force);
+ my(%watch, $some_uptodate, @updated);
+ foreach my $pkg (@pkglist) {
+ debug("Retrieving watchfile from svn for $pkg");
+ my $svndata = svn_get(pkgname2svndir($pkg));
+ unless(not $svndata->{watch_error}
+ and $svndata->{watch} and ref $svndata->{watch}
+ and ref $svndata->{watch} eq "ARRAY"
+ and @{$svndata->{watch}}) {
+ push @updated, $pkg; # Those have no stamp, so force rescan
+ next; # Errors will be set in consolidated
+ }
+
+ my(@wresult);
+ foreach my $wline (@{$svndata->{watch}}) {
+ my $md5 = $wline->{md5};
+ next unless($md5);
+ if(not $force and $cdata->{$md5} and
+ $CFG{watch}{ttl} * 60 > time - find_stamp($cdata, $md5)) {
+ $some_uptodate = 1;
+ next;
+ }
+ my ($watcherr, %uscand) = uscan($wline->{line}, %{$wline->{opts}});
+ unless($watcherr) {
+ info("Found: version $uscand{upstream_version} ",
+ "from $uscand{upstream_url} ",
+ "(mangled: $uscand{upstream_mangled})");
+ }
+ $watch{$md5} = { error => $watcherr, %uscand };
+ push @updated, $pkg;
+
+ if(not $watch{$md5}{upstream_mangled}) {
+ $watch{$md5}{error} ||= "Error";
+ } elsif(not deb_valid($watch{$md5}{upstream_mangled})) {
+ $watch{$md5}{error} ||= "InvalidUpstreamVersion";
+ } elsif($wline->{mangled_ver}
+ and not deb_valid($wline->{mangled_ver})) {
+ $watch{$md5}{error} ||= "InvalidDebianVersion";
+ }
+ warn("Error while processing $pkg watch file: ",
+ $watch{$md5}{error}) if($watch{$md5}{error});
+ }
+ }
+ info("watch: ", scalar @pkglist, " packages scanned");
+ if(not @updated and (
+ find_stamp(read_cache("consolidated", "pkglist", 0)) <=
+ find_stamp(read_cache("consolidated", "watch", 0)))) {
+ info("Watch cache is up-to-date");
+ return;
+ }
+ $cdata = update_cache("watch", \%watch, "",
+ ($complete and not $some_uptodate), 1);
+ # Start consolidated build
+ my %watch2;
+ unless($complete) {
+ # Only re-process updated entries
+ @pkglist = @updated;
+ }
+ foreach my $pkg (@pkglist) {
+ my $svndata = svn_get(pkgname2svndir($pkg));
+ $watch2{$pkg} = {};
+
+ if($svndata->{watch_error}) {
+ $watch2{$pkg}{error} = $svndata->{watch_error};
+ } elsif(not $svndata->{watch} or not ref $svndata->{watch}
+ or not ref $svndata->{watch} eq "ARRAY") {
+ $watch2{$pkg}{error} = "Missing";
+ } elsif(not @{$svndata->{watch}}) {
+ $watch2{$pkg}{error} = "Empty";
+ }
+ next if($watch2{$pkg}{error});
+
+ my(@wresult, $error);
+ foreach my $wline (@{$svndata->{watch}}) {
+ my $md5 = $wline->{md5};
+ next unless($md5);
+ if($cdata->{$md5}{error}) {
+ $error = $cdata->{$md5}{error};
+ next;
+ }
+ next unless($cdata->{$md5}{upstream_mangled});
+ if($wline->{mangled_ver}) {
+ push @wresult, {
+ diff => deb_compare($wline->{mangled_ver},
+ $cdata->{$md5}{upstream_mangled}),
+ %{$cdata->{$md5}}
+ };
+ } else { # There's no debian version
+ push @wresult, {
+ diff => -1,
+ %{$cdata->{$md5}}
+ };
+ }
+ }
+ unless(@wresult) {
+ $watch2{$pkg} = { error => $error || "MissingData?" };
+ next;
+ }
+ @wresult = sort({
+ deb_compare_nofail($a->{upstream_mangled},
+ $b->{upstream_mangled}) } @wresult);
+ my @result;
+ if(@result = grep({ $_->{diff} < 0 } @wresult)) {
+ $watch2{$pkg} = $result[-1];
+ } elsif(@result = grep( { not $_->{diff} } @wresult)) {
+ $watch2{$pkg} = $result[0];
+ } else {
+ $watch2{$pkg} = $wresult[0];
+ }
+ delete($watch2{$pkg}{diff}) unless($watch2{$pkg}{diff});
+ delete($watch2{$pkg}{error}) unless($watch2{$pkg}{error});
+ }
+
+ update_cache("consolidated", \%watch2, "watch", $complete, 0);
+ unlock_cache("watch");
+}
+# Returns the hash of bugs. Doesn't download anything.
+sub watch_get {
+ return read_cache("watch", shift, 0);
+}
+# Returns the consolidated hash of bugs. Doesn't download anything.
+sub watch_get_consolidated {
+ my $path = shift || "";
+ return read_cache("consolidated", "watch/$path", 0);
+}
+sub uscan($) {
+ my($wline, %opts) = @_;
+ info("Processing watch line $wline");
+
+ $wline =~ s{^http://sf\.net/(\S+)}{http://qa.debian.org/watch/sf.php/$1};
+ # Fix URIs with no path
+ $wline =~ s{^(\w+://[^\s/]+)(\s|$)}{$1/$2};
+ unless($wline =~ m{^(?:(?:https?|ftp)://\S+?)/}) {
+ warn("Invalid watch line: $wline");
+ return("Invalid");
+ }
+ my @items = split(/\s+/, $wline);
+
+ my($dir, $filter);
+ # Either we have single URL/pattern
+ # or URL/pattern + extra
+ if($items[0] =~ /\(/) {
+ # Since '+' is greedy, the second capture has no slashes
+ ($dir, $filter) = $items[0] =~ m{^(.+/)(.+)$};
+ } elsif(@items >= 2 and $items[1] =~ /\(/) {
+ # or, we have a homepage plus pattern
+ # (plus optional other non-interesting stuff)
+ ($dir, $filter) = @items[0,1];
+ }
+ unless($dir and $filter) {
+ return("Invalid");
+ }
+ debug("uscan $dir $filter");
+ my @vers;
+ if($CFG{watch}{use_cpan} and $dir =~ $cpanregex) {
+ @vers = cpan_lookup($dir, $filter);
+ my $status = shift @vers;
+ if($status) {
+ warn("CPAN lookup failed for $dir + $filter: $status");
+ return $status;
+ } elsif(not @vers) {
+ warn("CPAN lookup failed for $dir + $filter");
+ }
+ }
+ unless(@vers) {
+ @vers = recurse_dirs($filter, $dir);
+ my $status = shift @vers;
+ return $status || "NotFound" unless(@vers);
+ }
+
+ my @mangled;
+ foreach my $uver (@vers) {
+ push @mangled, $uver->{upstream_version};
+ next unless($opts{uversionmangle});
+ debug("Mangle option: ", join(", ", @{$opts{uversionmangle}}));
+ foreach(split(/;/, join(";", @{$opts{uversionmangle}}))) {
+ debug("Executing '\$mangled[-1] =~ $_'");
+ eval "\$mangled[-1] =~ $_";
+ if($@) {
+ error("Invalid watchfile: $@");
+ return("Invalid");
+ }
+ }
+ debug("Mangled version: $mangled[-1]");
+ }
+ my @order = sort({ deb_compare_nofail($mangled[$a], $mangled[$b]) }
+ (0..$#vers));
+ return(undef,
+ %{$vers[$order[-1]]},
+ upstream_dir => $dir,
+ upstream_mangled => $mangled[$order[-1]]);
+}
+sub recurse_dirs($$);
+sub recurse_dirs($$) {
+ my($filter, $base) = @_;
+ debug("recurse_dirs($filter, $base)");
+
+ if($base =~ /\(/) {
+ my($newfilt, $staticpart) = ("", "");
+ while($base =~ s#/[^/(]*$##) {
+ $staticpart = "$&$staticpart";
+ }
+ $base =~ s#([^/]*\([^/]*)$## or die "Can't happen!!";
+ $newfilt = "$1/?";
+ debug("After stripping (): $newfilt, $base, remains: $staticpart");
+ my ($status, @data) = recurse_dirs($newfilt, $base);
+ return $status unless(@data);
+ @data = sort({ deb_compare_nofail($a->{upstream_version},
+ $b->{upstream_version}) } @data);
+ $base = $data[-1]{upstream_url} . $staticpart;
+ debug("Return from recursion: $base");
+ }
+ unless($base =~ m{(^\w+://[^/]+)(/.*?)/*$}) {
+ error("Invalid base: $base");
+ return("Invalid");
+ }
+ my $site = $1;
+ my $path = $2;
+ my $pattern;
+ if($filter =~ m{^/}) {
+ $pattern = qr{(?:^\Q$site\E)?$filter};
+ } elsif($filter !~ m{^\w+://}) {
+ $pattern = qr{(?:(?:^\Q$site\E)?\Q$path\E/)?$filter};
+ } else {
+ $pattern = $filter;
+ }
+
+ debug("Downloading $base");
+ my $res = $ua->get($base);
+ unless($res->is_success) {
+ error("Unable to get $base: " . $res->message());
+ return ("NotFound") if($res->code == 404);
+ return ("DownloadError");
+ }
+ my $page = $res->decoded_content();
+ $page =~ s/<!--.*?-->//gs;
+ $page =~ s/\n+/ /gs;
+
+ my @candidates;
+ if($base =~ /^ftp/) {
+ @candidates = split(/\s+/, $page);
+ } else {
+ @candidates = grep(defined, ($page =~
+ m{<a\s[^>]*href\s*=\s*(?:"([^"]+)"|'([^']+)'|([^"][^\s>]+))}gi));
+ }
+ my @vers;
+ foreach my $url (grep(m{^$pattern$}, @candidates)) {
+ my $ver = join(".", ($url =~ m{^$pattern$}));
+ if($ver =~ s#/+$##) { # Can't find a better way
+ $url =~ s#\Q$&\E$##; # Remove the same
+ }
+ if($url =~ m{^/}) {
+ $url = $site . $url;
+ } elsif($url !~ m{^\w+://}) {
+ $url = $site . $path . "/" . $url;
+ }
+ push @vers, {
+ upstream_version => $ver,
+ upstream_url => $url };
+ }
+ debug("Versions found: ", join(", ", map({ $_->{upstream_version} }
+ @vers)));
+ return(undef, @vers);
+}
+
+sub cpan_lookup($$) {
+ my($dir, $filter) = @_;
+
+ $dir =~ $cpanregex or return ();
+ my $base = $1;
+ my $type = $2;
+ $dir =~ s{/+$}{};
+ my $origdir = $dir;
+
+ $type =~ s/.*(dist|modules|authors).*/$1/ or return ();
+ my $cpan;
+ if($type eq "dist") {
+ $filter =~ s/.*\///;
+ $cpan = cpan_dist_download();
+ } else {
+ $cpan = cpan_index_download()->{$type};
+ }
+ $dir =~ s/$cpanregex//i;
+ $dir =~ s{^/+}{};
+ debug("Looking for $dir + $filter into CPAN $type cache");
+ #return ("NotFound") unless(exists($cpan->{$dir}));
+ # Allow this to gracefully degrade to a normal uscan check
+ return () unless(exists($cpan->{$dir}));
+
+ my @res;
+ foreach(keys %{$cpan->{$dir}}) {
+ next unless ($_ =~ $filter);
+ my $filt_ver = $1;
+ if($type eq "dist") {
+ my $cpan_ver = $cpan->{$dir}{$_}{version};
+ if($filt_ver ne $cpan_ver) {
+ # Try to remove initial "v"s, if any
+ $cpan_ver =~ s/^v//;
+ }
+ if($filt_ver ne $cpan_ver) {
+ warn("Version mismatch: uscan says $filt_ver, ",
+ "cpan says $cpan_ver");
+ return ("VersionMismatch");
+ }
+ }
+ push @res, {
+ upstream_version => $filt_ver,
+ upstream_url => (
+ $type eq "dist" ?
+ "$base/CPAN/authors/id/" . $cpan->{$dir}{$_}{path} :
+ "$origdir/$_"
+ )
+ };
+ }
+ # Allow this to gracefully degrade to a normal uscan check
+ #return ("NotFound") unless(@res);
+ return (undef, @res);
+}
+sub cpan_dist_download(;$) {
+ my $force = shift;
+ unless($force) {
+ my $cpan = read_cache("cpan_dists", "", 0);
+ if($CFG{watch}{cpan_ttl} * 60 > time - find_stamp($cpan, "")) {
+ return $cpan;
+ }
+ }
+
+ my $url = $CFG{watch}{cpan_mirror} . "/modules/02packages.details.txt.gz";
+ info("Rebuilding CPAN dists cache from $url");
+ open(TMP, "+>", undef) or die $!;
+ my $res = $ua->get($url, ":content_cb" => sub {
+ print TMP $_[0] or die $!;
+ });
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ seek(TMP, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $gz = Compress::Zlib::gzopen(\*TMP, "rb")
+ or die "Can't open compressed file: $!\n";
+
+ my $data;
+ open($data, "+>", undef) or die $!;
+ my $buffer = " " x 4096;
+ my $bytes;
+ while(($bytes = $gz->gzread($buffer)) > 0) {
+ print $data $buffer;
+ }
+ die $gz->gzerror if($bytes < 0);
+ close TMP;
+ #my $z = new IO::Uncompress::Gunzip(\$data);
+
+ seek($data, 0, SEEK_SET) or die "Can't seek: $!\n";
+
+ # Skip header
+ while(<$data>) {
+ chomp;
+ last if(/^$/);
+ }
+ my $cpan = {};
+ while(<$data>) {
+ chomp;
+ my $tarball = (split)[2];
+ my $distinfo = new CPAN::DistnameInfo($tarball);
+# next if($distinfo->maturity() eq "developer");
+ my $distname = $distinfo->dist();
+ unless($distname) {
+ info("Invalid CPAN distribution: $tarball");
+ next;
+ }
+ my $version = $distinfo->version();
+ my $filename = $distinfo->filename();
+
+ $cpan->{$distname}{$filename} = {
+ path => $tarball,
+ version => $version
+ };
+ }
+ close $data;
+ update_cache("cpan_dists", $cpan, "", 1);
+ return $cpan;
+}
+sub cpan_index_download(;$) {
+ my $force = shift;
+ unless($force) {
+ my $cpan = read_cache("cpan_index", "", 0);
+ if($CFG{watch}{cpan_ttl} * 60 > time - find_stamp($cpan, "")) {
+ return $cpan;
+ }
+ }
+
+ my $url = $CFG{watch}{cpan_mirror} . "/indices/ls-lR.gz";
+ info("Rebuilding CPAN indices cache from $url");
+ open(TMP, "+>", undef) or die $!;
+ my $res = $ua->get($url, ":content_cb" => sub {
+ print TMP $_[0] or die $!;
+ });
+ unless($res->is_success()) {
+ warn "Can't download $url: " . $res->message();
+ return 0;
+ }
+ seek(TMP, 0, SEEK_SET) or die "Can't seek: $!\n";
+ my $gz = Compress::Zlib::gzopen(\*TMP, "rb")
+ or die "Can't open compressed file: $!\n";
+
+ my $data;
+ open($data, "+>", undef) or die $!;
+ my $buffer = " " x 4096;
+ my $bytes;
+ while(($bytes = $gz->gzread($buffer)) > 0) {
+ print $data $buffer;
+ }
+ die $gz->gzerror if($bytes < 0);
+ close TMP;
+ #my $z = new IO::Uncompress::Gunzip(\$data);
+
+ seek($data, 0, SEEK_SET) or die "Can't seek: $!\n";
+
+ my $cpan = {};
+ my($dir, $type);
+ while(<$data>) {
+ chomp;
+ if(/^(.+):$/) {
+ my $subdir = $1;
+ $type = undef;
+ $subdir =~ m{/.*(authors/id|modules/by-module)/+(.*?)/*$} or next;
+ $dir = $2;
+ $1 =~ /(authors|modules)/ and $type = $1;
+ next;
+ }
+ next unless($type and /^[-l]r.....r/);
+ s/ -> .*//;
+ my $file = (split)[8];
+ $file =~ m{\.(?:bz2|gz|zip|pl|pm|tar|tgz)$}i or next;
+ $cpan->{$type}{$dir}{$file} = 1;
+ }
+ close $data;
+ update_cache("cpan_index", $cpan, "", 1);
+ return $cpan;
+}
+1;
Added: scripts/pet/Parse/DebControl.pm
===================================================================
--- scripts/pet/Parse/DebControl.pm (rev 0)
+++ scripts/pet/Parse/DebControl.pm 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,811 @@
+package Parse::DebControl;
+
+###########################################################
+# Parse::DebControl - Parse debian-style control
+# files (and other colon key-value fields)
+#
+# Copyright 2003 - Jay Bonci <jaybonci at cpan.org>
+# Licensed under the same terms as perl itself
+#
+###########################################################
+
+use strict;
+use IO::Scalar;
+use Compress::Zlib;
+use LWP::UserAgent;
+
+use vars qw($VERSION);
+$VERSION = '2.005';
+
+sub new {
+ my ($class, $debug) = @_;
+ my $this = {};
+
+ my $obj = bless $this, $class;
+ if($debug)
+ {
+ $obj->DEBUG();
+ }
+ return $obj;
+};
+
+sub parse_file {
+ my ($this, $filename, $options) = @_;
+ unless($filename)
+ {
+ $this->_dowarn("parse_file failed because no filename parameter was given");
+ return;
+ }
+
+ my $fh;
+ unless(open($fh,"$filename"))
+ {
+ $this->_dowarn("parse_file failed because $filename could not be opened for reading");
+ return;
+ }
+
+ return $this->_parseDataHandle($fh, $options);
+};
+
+sub parse_mem {
+ my ($this, $data, $options) = @_;
+
+ unless($data)
+ {
+ $this->_dowarn("parse_mem failed because no data was given");
+ return;
+ }
+
+ my $IOS = new IO::Scalar \$data;
+
+ unless($IOS)
+ {
+ $this->_dowarn("parse_mem failed because IO::Scalar creation failed.");
+ return;
+ }
+
+ return $this->_parseDataHandle($IOS, $options);
+
+};
+
+sub parse_web {
+ my ($this, $url, $options) = @_;
+
+ unless($url)
+ {
+ $this->_dowarn("No url given, thus no data to parse");
+ return;
+ }
+
+ my $ua = LWP::UserAgent->new;
+
+ my $request = HTTP::Request->new(GET => $url);
+
+ unless($request)
+ {
+ $this->_dowarn("Failed to instantiate HTTP Request object");
+ return;
+ }
+
+ my $response = $ua->request($request);
+
+ if ($response->is_success) {
+ return $this->parse_mem($response->content(), $options);
+ } else {
+ $this->_dowarn("Failed to fetch $url from the web");
+ return;
+ }
+}
+
+sub write_file {
+ my ($this, $filenameorhandle, $dataorarrayref, $options) = @_;
+
+ unless($filenameorhandle)
+ {
+ $this->_dowarn("write_file failed because no filename or filehandle was given");
+ return;
+ }
+
+ unless($dataorarrayref)
+ {
+ $this->_dowarn("write_file failed because no data was given");
+ return;
+ }
+
+ my $handle = $this->_getValidHandle($filenameorhandle, $options);
+
+ unless($handle)
+ {
+ $this->_dowarn("write_file failed because we couldn't negotiate a valid handle");
+ return;
+ }
+
+ my $string = $this->write_mem($dataorarrayref, $options);
+ $string ||= "";
+
+ print $handle $string;
+ close $handle;
+
+ return length($string);
+}
+
+sub write_mem {
+ my ($this, $dataorarrayref, $options) = @_;
+
+ unless($dataorarrayref)
+ {
+ $this->_dowarn("write_mem failed because no data was given");
+ return;
+ }
+
+ my $arrayref = $this->_makeArrayref($dataorarrayref);
+
+ my $string = $this->_makeControl($arrayref);
+
+ $string .= "\n" if $options->{addNewline};
+
+ $string = Compress::Zlib::memGzip($string) if $options->{gzip};
+
+ return $string;
+}
+
+sub DEBUG
+{
+ my($this, $verbose) = @_;
+ $verbose = 1 unless(defined($verbose) and int($verbose) == 0);
+ $this->{_verbose} = $verbose;
+ return;
+
+}
+
+sub _getValidHandle {
+ my($this, $filenameorhandle, $options) = @_;
+
+ if(ref $filenameorhandle eq "GLOB")
+ {
+ unless($filenameorhandle->opened())
+ {
+ $this->_dowarn("Can't get a valid filehandle to write to, because that is closed");
+ return;
+ }
+
+ return $filenameorhandle;
+ }else
+ {
+ my $openmode = ">>";
+ $openmode=">" if $options->{clobberFile};
+ $openmode=">>" if $options->{appendFile};
+
+ my $handle;
+
+ unless(open $handle,"$openmode$filenameorhandle")
+ {
+ $this->_dowarn("Couldn't open file: $openmode$filenameorhandle for writing");
+ return;
+ }
+
+ return $handle;
+ }
+}
+
+sub _makeArrayref {
+ my ($this, $dataorarrayref) = @_;
+
+ if(ref $dataorarrayref eq "ARRAY")
+ {
+ return $dataorarrayref;
+ }else{
+ return [$dataorarrayref];
+ }
+}
+
+sub _makeControl
+{
+ my ($this, $dataorarrayref) = @_;
+
+ my $str = "";
+
+ foreach my $stanza(@$dataorarrayref)
+ {
+ foreach my $key(keys %$stanza)
+ {
+ $stanza->{$key} ||= "";
+
+ my @lines = split("\n", $stanza->{$key});
+ if (@lines) {
+ $str.="$key\: ".(shift @lines)."\n";
+ } else {
+ $str.="$key\:\n";
+ }
+
+ foreach(@lines)
+ {
+ if($_ eq "")
+ {
+ $str.=" .\n";
+ }
+ else{
+ $str.=" $_\n";
+ }
+ }
+
+ }
+
+ $str ||= "";
+ $str.="\n";
+ }
+
+ chomp($str);
+ return $str;
+
+}
+
+sub _parseDataHandle
+{
+ my ($this, $handle, $options) = @_;
+
+ my $structs;
+
+ unless($handle)
+ {
+ $this->_dowarn("_parseDataHandle failed because no handle was given. This is likely a bug in the module");
+ return;
+ }
+
+ if($options->{tryGzip})
+ {
+ if(my $gunzipped = $this->_tryGzipInflate($handle))
+ {
+ $handle = new IO::Scalar \$gunzipped
+ }
+ }
+
+ my $data = $this->_getReadyHash($options);
+
+ my $linenum = 0;
+ my $lastfield = "";
+
+ foreach my $line (<$handle>)
+ {
+ #Sometimes with IO::Scalar, lines may have a newline at the end
+
+ #$line =~ s/\r??\n??$//; #CRLF fix, but chomp seems to clean it
+ chomp $line;
+
+
+ if($options->{stripComments}){
+ next if $line =~ /^\s*\#[^\#]/;
+ $line =~ s/\#$//;
+ $line =~ s/(?<=[^\#])\#[^\#].*//;
+ $line =~ s/\#\#/\#/;
+ }
+
+ $linenum++;
+ if($line =~ /^\S/)
+ {
+ #we have a valid key-value pair
+ if($line =~ /(.*?)\s*\:\s*(.*)$/)
+ {
+ my $key = $1;
+ my $value = $2;
+
+ if($options->{discardCase})
+ {
+ $key = lc($key);
+ }
+
+ unless($options->{verbMultiLine})
+ {
+ $value =~ s/[\s\t]+$//;
+ }
+
+ $data->{$key} = $value;
+
+
+ if ($options->{verbMultiLine}
+ && (($data->{$lastfield} || "") =~ /\n/o)){
+ $data->{$lastfield} .= "\n";
+ }
+
+ $lastfield = $key;
+ }else{
+ $this->_dowarn("Parse error on line $linenum of data; invalid key/value stanza");
+ return $structs;
+ }
+
+ }elsif($line =~ /^(\s+)(\S.*)/)
+ {
+ #appends to previous line
+
+ unless($lastfield)
+ {
+ $this->_dowarn("Parse error on line $linenum of data; indented entry without previous line");
+ return $structs;
+ }
+ if($options->{verbMultiLine}){
+ $data->{$lastfield}.="\n$1$2";
+ }elsif($2 eq "." ){
+ $data->{$lastfield}.="\n";
+ }else{
+ my $val = $2;
+ $val =~ s/\s+$//;
+ $data->{$lastfield}.="\n$val";
+ }
+
+ }elsif($line =~ /^\s*$/){
+ if ($options->{verbMultiLine}
+ && ($data->{$lastfield} =~ /\n/o)) {
+ $data->{$lastfield} .= "\n";
+ }
+ if(keys %$data > 0){
+ push @$structs, $data;
+ }
+ $data = $this->_getReadyHash($options);
+ $lastfield = "";
+ }else{
+ $this->_dowarn("Parse error on line $linenum of data; unidentified line structure");
+ return $structs;
+ }
+
+ }
+
+ if(keys %$data > 0)
+ {
+ push @$structs, $data;
+ }
+
+ return $structs;
+}
+
+sub _tryGzipInflate
+{
+ my ($this, $handle) = @_;
+
+ my $buffer;
+ {
+ local $/ = undef;
+ $buffer = <$handle>;
+ }
+ return Compress::Zlib::memGunzip($buffer) || $buffer;
+}
+
+sub _getReadyHash
+{
+ my ($this, $options) = @_;
+ my $data;
+
+ if($options->{useTieIxHash})
+ {
+ eval("use Tie::IxHash");
+ if($@)
+ {
+ $this->_dowarn("Can't use Tie::IxHash. You need to install it to have this functionality");
+ return;
+ }
+ tie(%$data, "Tie::IxHash");
+ return $data;
+ }
+
+ return {};
+}
+
+sub _dowarn
+{
+ my ($this, $warning) = @_;
+
+ if($this->{_verbose})
+ {
+ warn "DEBUG: $warning";
+ }
+
+ return;
+}
+
+
+1;
+
+__END__
+
+=head1 NAME
+
+Parse::DebControl - Easy OO parsing of debian control-like files
+
+=head1 SYNOPSIS
+
+ use Parse::DebControl
+
+ $parser = new Parse::DebControl;
+
+ $data = $parser->parse_mem($control_data, $options);
+ $data = $parser->parse_file('./debian/control', $options);
+ $data = $parser->parse_web($url, $options);
+
+ $writer = new Parse::DebControl;
+
+ $string = $writer->write_mem($singlestanza);
+ $string = $writer->write_mem([$stanza1, $stanza2]);
+
+ $writer->write_file($filename, $singlestanza, $options);
+ $writer->write_file($filename, [$stanza1, $stanza2], $options);
+
+ $writer->write_file($handle, $singlestanza, $options);
+ $writer->write_file($handle, [$stanza1, $stanza2], $options);
+
+ $parser->DEBUG();
+
+=head1 DESCRIPTION
+
+ Parse::DebControl is an easy OO way to parse debian control files and
+ other colon separated key-value pairs. It's specifically designed
+ to handle the format used in Debian control files, template files, and
+ the cache files used by dpkg.
+
+ For basic format information see:
+ http://www.debian.org/doc/debian-policy/ch-controlfields.html#s-controlsyntax
+
+ This module does not actually do any intelligence with the file content
+ (because there are a lot of files in this format), but merely handles
+ the format. It can handle simple control files, or files hundreds of lines
+ long efficiently and easily.
+
+=head2 Class Methods
+
+=over 4
+
+=item * C<new()>
+
+=item * C<new(I<$debug>)>
+
+Returns a new Parse::DebControl object. If a true parameter I<$debug> is
+passed in, it turns on debugging, similar to a call to C<DEBUG()> (see below);
+
+=back
+
+=over 4
+
+=item * C<parse_file($control_filename,I<$options>)>
+
+Takes a filename as a scalar and an optional hashref of options (see below).
+Will parse as much as it can, warning (if C<DEBUG>ing is turned on) on
+parsing errors.
+
+Returns an array of hashrefs, containing the data in the control file, split up
+by stanza. Stanzas are deliniated by newlines, and multi-line fields are
+expressed as such post-parsing. Single periods are treated as special extra
+newline deliniators, per convention. Whitespace is also stripped off of lines
+as to make it less-easy to make mistakes with hand-written conf files).
+
+The options hashref can take parameters as follows. Setting the string to true
+enables the option.
+
+ useTieIxHash - Instead of an array of regular hashrefs, uses Tie::IxHash-
+ based hashrefs
+
+ discardCase - Remove all case items from keys (not values)
+
+ stripComments - Remove all commented lines in standard #comment format.
+ Literal #'s are represented by ##. For instance
+
+ Hello there #this is a comment
+ Hello there, I like ##CCCCCC as a grey.
+
+ The first is a comment, the second is a literal "#".
+
+ verbMultiLine - Keep the description AS IS, and no not collapse leading
+ spaces or dots as newlines. This also keeps whitespace from being
+ stripped off the end of lines.
+
+ tryGzip - Attempt to expand the data chunk with gzip first. If the text is
+ already expanded (ie: plain text), parsing will continue normally.
+ This could optionally be turned on for all items in the future, but
+ it is off by default so we don't have to scrub over all the text for
+ performance reasons.
+
+=back
+
+=over 4
+
+=item * C<parse_mem($control_data, I<$options>)>
+
+Similar to C<parse_file>, except takes data as a scalar. Returns the same
+array of hashrefs as C<parse_file>. The options hashref is the same as
+C<parse_file> as well; see above.
+
+=back
+
+=over 4
+
+=item * C<parse_web($url, I<$options>)>
+
+Similar to the other parse_* functions, this pulls down a control file from
+the web and attempts to parse it. For options and return values, see C<parse_file>,
+above
+
+=back
+
+=over 4
+
+=item * C<write_file($filename, $data, I<$options>)>
+
+=item * C<write_file($handle, $data)>
+
+=item * C<write_file($filename, [$data1, $data2, $data3], I<$options>)>
+
+=item * C<write_file($handle, [$data, $data2, $data3])>
+
+This function takes a filename or a handle and writes the data out. The
+data can be given as a single hashref or as an arrayref of hashrefs. It
+will then write it out in a format that it can parse. The order is dependant
+on your hash sorting order. If you care, use Tie::IxHash. Remember for
+reading back in, the module doesn't care.
+
+The I<$options> hashref can contain one of the following two items:
+
+ addNewline - At the end of the last stanza, add an additional newline.
+ appendFile - (default) Write to the end of the file
+ clobberFile - Overwrite the file given.
+ gzip - Compress the data with gzip before writing
+
+Since you determine the mode of your filehandle, passing it along with an
+options hashref obviously won't do anything; rather, it is ignored.
+
+The I<addNewline> option solves a situation where if you are writing
+stanzas to a file in a loop (such as logging with this module), then
+the data will be streamed together, and won't parse back in correctly.
+It is possible that this is the behavior that you want (if you wanted to write
+one key at a time), so it is optional.
+
+This function returns the number of bytes written to the file, undef
+otherwise.
+
+=back
+
+=over 4
+
+=item * C<write_mem($data)>
+
+=item * C<write_mem([$data1,$data2,$data3])>;
+
+This function works similarly to the C<write_file> method, except it returns
+the control structure as a scalar, instead of writing it to a file. There
+is no I<%options> for this file (yet);
+
+=back
+
+=over 4
+
+=item * C<DEBUG()>
+
+Turns on debugging. Calling it with no paramater or a true parameter turns
+on verbose C<warn()>ings. Calling it with a false parameter turns it off.
+It is useful for nailing down any format or internal problems.
+
+=back
+
+=head1 CHANGES
+
+B<Version 2.005> - January 13th, 2004
+
+=over 4
+
+=item * More generic test suite fix for earlier versions of Test::More
+
+=item * Updated copyright statement
+
+=back
+
+B<Version 2.004> - January 12th, 2004
+
+=over 4
+
+=item * More documentation formatting and typo fixes
+
+=item * CHANGES file now generated automatically
+
+=item * Fixes for potential test suite failure in Pod::Coverage run
+
+=item * Adds the "addNewline" option to write_file to solve the streaming stanza problem.
+
+=item * Adds tests for the addNewline option
+
+=back
+
+B<Version 2.003> - January 6th, 2004
+
+=over 4
+
+=item * Added optional Test::Pod test
+
+=item * Skips potential Win32 test failure in the module where it wants to write to /tmp.
+
+=item * Added optional Pod::Coverage test
+
+=back
+
+B<Version 2.002> - October 7th, 2003
+
+=over 4
+
+=item * No code changes. Fixes to test suite
+
+=back
+
+B<Version 2.001> - September 11th, 2003
+
+=over 4
+
+=item * Cleaned up more POD errors
+
+=item * Added tests for file writing
+
+=item * Fixed bug where write_file ignored the gzip parameter
+
+=back
+
+B<Version 2.0> - September 5th, 2003
+
+=over 4
+
+=item * Version increase.
+
+=item * Added gzip support (with the tryGzip option), so that compresses control files can be parsed on the fly
+
+=item * Added gzip support for writing of control files
+
+=item * Added parse_web to snag files right off the web. Useful for things such as apt's Sources.gz and Packages.gz
+
+=back
+
+B<Version 1.10b> - September 2nd, 2003
+
+=over 4
+
+=item * Documentation fix for ## vs # in stripComments
+
+=back
+
+B<Version 1.10> - September 2nd, 2003
+
+=over 4
+
+=item * Documentation fixes, as pointed out by pudge
+
+=item * Adds a feature to stripComments where ## will get interpolated as a literal pound sign, as suggested by pudge.
+
+=back
+
+B<Version 1.9> - July 24th, 2003
+
+=over 4
+
+=item * Fix for warning for edge case (uninitialized value in chomp)
+
+=item * Tests for CRLF
+
+=back
+
+B<Version 1.8> - July 11th, 2003
+
+=over 4
+
+=item * By default, we now strip off whitespace unless verbMultiLine is in place. This makes sense for things like conf files where trailing whitespace has no meaning. Thanks to pudge for reporting this.
+
+=back
+
+B<Version 1.7> - June 25th, 2003
+
+=over 4
+
+=item * POD documentation error noticed again by Frank Lichtenheld
+
+=item * Also by Frank, applied a patch to add a "verbMultiLine" option so that we can hand multiline fields back unparsed.
+
+=item * Slightly expanded test suite to cover new features
+
+=back
+
+B<Version 1.6.1> - June 9th, 2003
+
+=over 4
+
+=item * POD cleanups noticed by Frank Lichtenheld. Thank you, Frank.
+
+=back
+
+B<Version 1.6> - June 2nd, 2003
+
+=over 4
+
+=item * Cleaned up some warnings when you pass in empty hashrefs or arrayrefs
+
+=item * Added stripComments setting
+
+=item * Cleaned up POD errors
+
+=back
+
+B<Version 1.5> - May 8th, 2003
+
+=over 4
+
+=item * Added a line to quash errors with undef hashkeys and writing
+
+=item * Fixed the Makefile.PL to straighten up DebControl.pm being in the wrong dir
+
+=back
+
+B<Version 1.4> - April 30th, 2003
+
+=over 4
+
+=item * Removed exports as they were unnecessary. Many thanks to pudge, who pointed this out.
+
+=back
+
+B<Version 1.3> - April 28th, 2003
+
+=over 4
+
+=item * Fixed a bug where writing blank stanzas would throw a warning. Fix found and supplied by Nate Oostendorp.
+
+=back
+
+B<Version 1.2b> - April 25th, 2003
+
+Fixed:
+
+=over 4
+
+=item * A bug in the test suite where IxHash was not disabled in 40write.t. Thanks to Jeroen Latour from cpan-testers for the report.
+
+=back
+
+B<Version 1.2> - April 24th, 2003
+
+Fixed:
+
+=over 4
+
+=item * A bug in IxHash support where multiple stanzas might be out of order
+
+=back
+
+B<Version 1.1> - April 23rd, 2003
+
+Added:
+
+=over 4
+
+=item * Writing support
+
+=item * Tie::IxHash support
+
+=item * Case insensitive reading support
+
+=back
+
+B<Version 1.0> - April 23rd, 2003
+
+=over 4
+
+=item * This is the initial public release for CPAN, so everything is new.
+
+=back
+
+=head1 BUGS
+
+The module will let you parse otherwise illegal key-value pairs and pairs with spaces. Badly formed stanzas will do things like overwrite duplicate keys, etc. This is your problem.
+
+As of 1.10, the module uses advanced regexp's to figure out about comments. If the tests fail, then stripComments won't work on your earlier perl version (should be fine on 5.6.0+)
+
+=head1 TODO
+
+Change the name over to the Debian:: namespace, probably as Debian::ControlFormat. This will happen as soon as the project that uses this module reaches stability, and we can do some minor tweaks.
+
+=head1 COPYRIGHT
+
+Parse::DebControl is copyright 2003,2004 Jay Bonci E<lt>jaybonci at cpan.orgE<gt>.
+This program is free software; you can redistribute it and/or modify it under
+the same terms as Perl itself.
+
+=cut
Added: scripts/pet/README
===================================================================
--- scripts/pet/README (rev 0)
+++ scripts/pet/README 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,148 @@
+About the PET scripts
+=====================
+
+There are a bunch of perl modules under the PET namespace, some of them
+provide certain common functionality and the others are responsible of data
+collection from different sources.
+
+Data collection is completely separated from presentation. There are a couple
+of very simple scripts that provide the latter: pet and pet.cgi. They
+give more or less the same information on stdout, but the latter is meant to be
+used as a CGI script.
+
+The script that controls data collection is fetchdata, which is meant to be run
+from a cronjob and/or post-commit hook.
+
+You can ask for basic help with the --help option (not in the cgi version).
+
+All the scripts read from the same configuration file, which you specify with
+the --conf option, or with the PET_CONF environment variable. The CGI
+script doesn't have the --conf option, obviously.
+
+For a sample configuration file, see the pet.conf-sample file. It is
+mostly self-explaining. Don't forget to set a suitable cache_dir, that other
+members of your group can write to, and put an absolute path for the template
+dir.
+
+Cheat sheet for usual svn layouts:
+
+Layout 1 (python-modules example):
+----------------------------------
+
+For a structure like:
+
+svn://svn.debian.org/svn/python-modules/packages/<package>/trunk/
+
+You should use:
+
+[pet_cgi]
+wsvn_url = http://svn.debian.org/wsvn/python-modules/packages/%s/trunk
+
+[svn]
+repository = svn://svn.debian.org/svn/python-modules/
+packages_path = packages
+post_path = trunk
+track_tags = 1
+tags_path = packages
+tags_post_path = tags
+
+If you don't have a 'packages' directory under your repository root
+use / for both packages_path and tags_path.
+
+Layout 2 (pkg-perl example):
+----------------------------
+
+For a structure like:
+
+svn://svn.debian.org/svn/pkg-perl/trunk/<package>/debian/
+
+You should use:
+
+[pet_cgi]
+wsvn_url = http://svn.debian.org/wsvn/pkg-perl/trunk/%s
+
+[svn]
+repository = svn://svn.debian.org/svn/pkg-perl/
+packages_path = trunk
+track_tags = 1
+tags_path = tags
+
+
+First run
+=========
+
+After configuring, you run the initial download, it could take a long time:
+
+$ <path>/fetchdata --conf <pathtoconf> [-v[v..]] [-j]
+
+-v increases verbosity, and -j enables working in parallel (3 threads).
+
+After that, it will use the cached data if it's not stale, or it will download
+what's necessary. You can also specify package directories to avoid updating
+the whole database.
+
+Package status in the command line
+==================================
+
+With pet you can see in your shell the packages' status, you can have the
+full listing or only the specified packages:
+
+$ <path>/pet --conf <pathtoconf> [<package> [<package> ... ]]
+
+asterisk:
+ - Version status: Watchfile problem
+ + Watch status: DownloadError
+ + SVN: 1:1.4.13~dfsg-1 (mangled: 1.4.13) (unreleased: 1:1.4.13~dfsg-2) Archive: 1:1.4.13~dfsg-1 (unstable) Upstream: Unknown (mangled: Unknown)
+ + Bugs: #396499, #448171, #433779, #337209, #386114, #399807, #399970, #449706, #381786, #438702, #293751, #353227
+(...)
+
+Using the CGI script
+====================
+
+Copy or symlink pet.cgi to your project's cgi-bin directory, and copy the
+htaccess (renaming it to .htaccess). There you should configure the paths to
+find the libraries and the configuration.
+
+Once done that, you will be able to see a nice XHTML version of the status
+report. You can write your own template, and switch between them with a GET
+parameter: http://..../cgi-bin/pet.cgi?template=my_nice_template
+
+Setting a post-commit hook
+==========================
+
+If you want to have the information updated the moment you commit a change, you
+can add this lines in your post-commit hook:
+
+REPOS="$1"
+REV="$2"
+
+[...]
+
+umask 002
+BASE=<path_to_your_local_copy>
+PERL5LIB=$BASE $BASE/fetchdata \
+ -c <path_to_conf>/pet.conf -r "$REV"
+
+The -r switch sets post-commit mode: it only checks changes in the repository,
+and then verifies if it needs to update upstream information for the packages
+modified.
+
+
+Setting a cron job
+==================
+
+All the data you downloaded in the first run gets stale after some time, so you
+need to run a full check to acquire again what's old (the time to live of each
+data source is controlled from the configuration file). So, the best thing to
+do is to set up an periodic cron job (once each one or two hours is a good
+period, it won't waste bandwith if the data is still current):
+
+$ crontab -l
+# m h dom mon dow command
+
+BINDIR=<path_to_your_local_copy>
+PERL5LIB=<path_to_your_local_copy>
+
+0 * * * * $BINDIR/fetchdata -c <path_to_conf>/pet.conf
+
+$Id: README 608 2008-08-30 05:31:49Z tincho $
Added: scripts/pet/debianqa.conf-sample
===================================================================
--- scripts/pet/debianqa.conf-sample (rev 0)
+++ scripts/pet/debianqa.conf-sample 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,74 @@
+; vim:syntax=dosini
+;
+; Sample config for PET scripts
+;
+; A "~/" appearing at the beginning of a string will be replaced for the user's
+; home directory
+[pet_cgi]
+templates_path = /home/groups/pkg-foo/PET/templates
+default_template = by_category
+group_name = Debian Foo Group
+group_url = http://pkg-foo.alioth.debian.org/
+; sprintf format for the package SCM web location, takes one parameter, the
+; package directory/file. Samples are for ViewSVN (recommended) and WebSVN
+;scm_web_file = http://svn.debian.org/viewsvn/pkg-foo/trunk/${pkg}/${file}?view=markup
+;scm_web_file = http://svn.debian.org/wsvn/pkg-foo/trunk/${pkg}/${file}/?op=file&rev=0&sc=0
+;scm_web_dir = http://svn.debian.org/viewsvn/pkg-foo/trunk/${pkg}/${dir}/?
+;scm_web_dir = http://svn.debian.org/wsvn/pkg-foo/trunk/${pkg}/${dir}/?rev=0&sc=0
+; Defaults for CGI options
+;default_show_all = 0
+;default_start_collapsed = 0
+;default_hide_binaries = 0
+;default_refresh = 1800 ; 30 minutes
+;default_format = categories ; or list
+;default_ignore_keywords = ; comma-separated list
+
+[svn]
+repository = svn://svn.debian.org/svn/pkg-foo
+; path before the package name, use / if none
+packages_path = trunk
+; path after the package name, should be the parent of the "debian/" directory
+;post_path = trunk
+track_tags = 1
+; path to the directory containing the tags, use / if none
+tags_path = tags
+; same as post_path, for tags
+; tags_post_path = tags
+
+[archive]
+mirror = ftp://ftp.debian.org/debian
+suites = unstable, testing, stable, oldstable, experimental
+sections = main, contrib, non-free
+suites_ttl = 360, 360, 10080, 10080, 360
+new_url = http://ftp-master.debian.org/new.html
+new_ttl = 60
+incoming_url = http://incoming.debian.org
+incoming_ttl = 60
+
+[watch]
+ttl = 360 # 6 hours
+use_cpan = 1
+cpan_mirror = ftp://cpan.org
+cpan_ttl = 360 # 6 hours
+
+[bts]
+ttl = 60 # 1 hour
+soap_proxy = http://bugs.debian.org/cgi-bin/soap.cgi
+soap_uri = Debbugs/SOAP
+; usertags to follow (usernames/emails): foo at bar.com, bar at foo.com
+usertag_users = debian-qa at lists.debian.org
+; wontfix, pending, etch, sarge, etc
+ignore_keywords =
+; wishlist, minor
+ignore_severities =
+
+; Parameters before any section header go into the [common] section
+[common]
+cache_dir = /home/groups/pkg-foo/PET/cache
+; verbosity level: error => 0, warn => 1, info => 2 debug => 3
+verbose = 1
+; Prepend syslog-style format?
+formatted_log => 1
+; if present, will be used to check that packages are in fact maintained by the
+; group
+group_email = pkg-foo-maintainers at lists.debian.org
Added: scripts/pet/fetchdata
===================================================================
--- scripts/pet/fetchdata (rev 0)
+++ scripts/pet/fetchdata 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,125 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: fetchdata 608 2008-08-30 05:31:49Z tincho $
+#
+# Program for invoking the different data-fetching routines. To use from a
+# cronjob, interactively or on post-commit hooks.
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+use PET::Archive;
+use PET::BTS;
+use PET::Common;
+use PET::Config;
+use PET::Svn;
+use PET::Watch;
+use Getopt::Long;
+
+my $opts = getopt_common(1, 1);
+
+my $p = new Getopt::Long::Parser;
+$p->configure(qw(no_ignore_case bundling));
+
+my $list_is_packages = 0;
+my $svn_rev;
+my $parallel = 0;
+my $only;
+$p->getoptions('help|h|?' => \&help, 'packages!' => \$list_is_packages,
+ 'svn-revision|r=i' => \$svn_rev, 'parallel|j!' => \$parallel,
+ 'only=s' => \$only) or die "Error parsing command-line arguments!\n";
+
+die "Invalid module $only" if($only and $only !~ /^(svn|watch|bts|archive)$/);
+if($svn_rev) {
+ info("Enabling post-commit mode");
+ $only = "svn";
+ $opts->{force} = 0;
+}
+my @dirs = @ARGV;
+my @pkgs;
+
+if($list_is_packages) {
+ @pkgs = @dirs;
+ foreach(0..$#pkgs) {
+ $dirs[$_] = pkgname2svndir($pkgs[$_]) || $pkgs[$_]; # Fallback
+ }
+} else {
+ foreach(0..$#dirs) {
+ $pkgs[$_] = svndir2pkgname($dirs[$_]) || $dirs[$_]; # Fallback
+ }
+}
+# We need this first
+my @changed_pkgs;
+ at changed_pkgs = svn_download($opts->{force}, $svn_rev, @dirs) if(
+ !$only or $only eq "svn");
+
+# returns dirs, not packages
+foreach(0..$#changed_pkgs) {
+ $changed_pkgs[$_] = svndir2pkgname($changed_pkgs[$_]) || $changed_pkgs[$_];
+}
+info(scalar @changed_pkgs, " changed packages in svn");
+debug("Changed packages in svn: ", join(", ", @changed_pkgs));
+
+if($parallel) {
+ local $SIG{CHLD} = "IGNORE";
+ my @pids;
+ my $pid;
+ foreach(0..2) {
+ unless(defined($pid = fork())) {
+ die "Can't fork: $!";
+ }
+ last unless($pid);
+ push @pids, $pid;
+ }
+ if(@pids == 2) {
+ deb_download($opts->{force}) if(!$only or $only eq "archive"); exit 0;
+ } elsif(@pids == 1) {
+ bts_download($opts->{force}, @pkgs) if(!$only or $only eq "bts");
+ exit 0;
+ } elsif(@pids == 0) {
+ if($svn_rev and @changed_pkgs) { # post-commit mode
+ watch_download($opts->{force}, @changed_pkgs);
+ } else {
+ watch_download($opts->{force}, @pkgs) if(!$only
+ or $only eq "watch");
+ }
+ exit 0;
+ } else {
+ waitpid($_, 0) foreach(@pids);
+ }
+} else {
+ deb_download($opts->{force}) if(!$only or $only eq "archive");
+ bts_download($opts->{force}, @pkgs) if(!$only or $only eq "bts");
+ if($svn_rev and @changed_pkgs) { # post-commit mode
+ watch_download($opts->{force}, @changed_pkgs);
+ } else {
+ watch_download($opts->{force}, @pkgs) if(!$only or $only eq "watch");
+ }
+}
+
+sub help {
+ print <<END;
+Usage:
+ $0 [options] [dirname [dirname ...]]
+
+ For each named directory, updates the databases with information retrieved
+ from the Debian archive, BTS, watchfiles and the Subversion repository.
+
+Options:
+ --help, -h This help.
+ --conf, -c FILE Specifies a configuration file, uses defaults if not
+ present.
+ --force, -f Force operation: ignore caches.
+ --packages Treat the parameters as source package names, instead of
+ directories.
+ --svn-revision,
+ -r REV Current revision for scanning the Subversion repository,
+ only scans svn and watch files changed (post-commit mode).
+ --parallel, -j Process in parallel (it will fork three processes).
+ --only MODULE Only run update for MODULE (svn|archive|watch|bts).
+
+END
+ exit 0;
+}
Property changes on: scripts/pet/fetchdata
___________________________________________________________________
Name: svn:executable
+ *
Added: scripts/pet/htaccess
===================================================================
--- scripts/pet/htaccess (rev 0)
+++ scripts/pet/htaccess 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,2 @@
+SetEnv PERL5LIB ../PET/bin
+SetEnv PET_CONF ../PET/etc/pet.conf
Added: scripts/pet/pet
===================================================================
--- scripts/pet/pet (rev 0)
+++ scripts/pet/pet 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,130 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: pet 610 2008-08-30 06:50:16Z tincho $
+#
+# Draft of a report
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+#use PET::Cache;
+use PET::Classification;
+#use PET::Common;
+use PET::Config;
+#use PET::DebVersions;
+use PET::Svn;
+use Getopt::Long;
+
+my $p = new Getopt::Long::Parser;
+$p->configure(qw(no_ignore_case bundling pass_through));
+
+my $list_is_dirs = 0;
+my $show_all = 0;
+$p->getoptions('help|h|?' => \&help, 'directories!' => \$list_is_dirs,
+ 'showall|a!' => \$show_all
+ ) or die "Error parsing command-line arguments!\n";
+
+my $opts = getopt_common(0, 1); # No passthru, load config
+
+my @dirs = @ARGV;
+
+if($list_is_dirs) {
+ foreach my $dir (@dirs) {
+ $dir = svndir2pkgname($dir) || $dir; # Fallback
+ }
+}
+
+my @pkglist = @dirs;
+ at pkglist = get_pkglist() unless(@pkglist);
+my $csfy = classify(@pkglist);
+unless($show_all) {
+ foreach(keys %$csfy) {
+ delete $csfy->{$_} unless(%{$csfy->{$_}{hilight}});
+ }
+}
+print("Showing ", scalar keys %$csfy, " out of ", scalar @pkglist,
+ " packages\n");
+foreach my $pkg (sort keys %$csfy) {
+ my %data = %{$csfy->{$pkg}};
+ print "$pkg:";
+ if($pkg ne $data{svn_path}) {
+ print " (SVN: $data{svn_path})";
+ }
+ print " ", $data{svn}{short_descr} if($data{svn}{short_descr});
+ print "\n";
+ if(%{$data{status}}) {
+ print " - Problems: ", join(", ", keys %{$data{status}}), "\n";
+ }
+ if(@{$data{notes}}) {
+ print " - Notes: ", join(", ", @{$data{notes}}), "\n";
+ }
+ print " - Repository status: ";
+ if($data{hilight}{svn}) {
+ print join(", ", keys %{$data{hilight}{svn}}), "\n";
+ } else {
+ print "OK\n";
+ }
+ if($data{svn}{version}) {
+ print " + Latest released: $data{svn}{version} ";
+ print "($data{svn}{changer})\n";
+ }
+ if($data{svn}{un_version}) {
+ print " + Latest unreleased: $data{svn}{un_version}\n";
+ }
+ #
+ print " - Debian archive status: ";
+ if($data{hilight}{archive}) {
+ print join(", ", keys %{$data{hilight}{archive}}), "\n";
+ } else {
+ print "OK\n";
+ }
+ if($data{archive}{most_recent}) {
+ print " + Latest version: $data{archive}{most_recent} ";
+ print "(from $data{archive}{most_recent_src})\n";
+ }
+ #
+ print " - BTS status: ";
+ if($data{hilight}{bts}) {
+ print join(", ", keys %{$data{hilight}{bts}}), "\n";
+ } else {
+ print "OK\n";
+ }
+ foreach(keys %{$data{bts}}) {
+ print " + Bug #$_ - $data{bts}{$_}{subject}\n";
+ }
+ #
+ print " - Upstream status: ";
+ if($data{hilight}{upstream}) {
+ print join(", ", keys %{$data{hilight}{upstream}}), "\n";
+ } else {
+ print "OK\n";
+ }
+ print " + URL: $data{upstream_url}\n" if($data{upstream_url});
+ if($data{watch}{upstream_version}) {
+ print " + Latest version: $data{watch}{upstream_version}\n";
+ }
+ #
+ # use Data::Dumper; print Dumper %data;
+}
+
+sub help {
+ print <<END;
+Usage:
+ $0 [options] [dirname [dirname ...]]
+
+ For each named directory, updates the databases with information retrieved
+ from the Debian archive, BTS, watchfiles and the Subversion repository.
+
+Options:
+ --help, -h This help.
+ --conf, -c FILE Specifies a configuration file, uses defaults if not
+ present.
+ --directories Treat the parameters as repository directory names, instead
+ of source package names.
+ --showall Show status of all packages, including OK packages.
+
+END
+ exit 0;
+}
Property changes on: scripts/pet/pet
___________________________________________________________________
Name: svn:executable
+ *
Added: scripts/pet/pet-chlog.cgi
===================================================================
--- scripts/pet/pet-chlog.cgi (rev 0)
+++ scripts/pet/pet-chlog.cgi 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,65 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: pet-chlog.cgi 610 2008-08-30 06:50:16Z tincho $
+#
+# Report packages version states
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+use PET::Config qw(read_config %CFG);
+use PET::Svn;
+use CGI ':fatalsToBrowser';
+use CGI;
+
+read_config();
+
+my $cgi = new CGI;
+
+
+if( $ENV{GATEWAY_INTERFACE} )
+{
+ print $cgi->header(
+ -content_type => 'text/html; charset=utf-8',
+ );
+}
+
+my $pkg = $cgi->param('pkg') or exit 0;
+my $rel = $cgi->param('rel') || '';
+
+my $svn = svn_get();
+
+my $text = $svn->{$pkg}{ ($rel eq 'rel')?'text' : 'un_text' };
+
+$text =~ s/&/&/g;
+$text =~ s/'/'/g;
+$text =~ s/"/"/g;
+$text =~ s/</</g;
+$text =~ s/>/>/g;
+$text =~ s{\r?\n}{<br/>}g;
+
+# replace bug-numbers with links
+$text =~ s{
+ ( # leading
+ ^ # start of string
+ |\W # or non-word
+ )
+ \#(\d+) # followed by a bug ID
+ \b # word boundary
+}
+{$1<a href="http://bugs.debian.org/$2">#$2</a>}xgm;
+# treat text as multi-line
+# Same for CPAN's RT
+$text =~ s{\bCPAN#(\d+)\b}
+{<a href="http://rt.cpan.org/Ticket/Display.html?id=$1">CPAN#$1</a>}gm;
+
+print qq(<a style="float: right; margin: 0 0 1pt 1pt; clear: none;"
+ href="javascript:async_get( '${pkg}_${rel}_chlog_balloon',
+ 'pet-chlog.cgi?pkg=$pkg;rel=$rel')">reload</a>\n);
+print qq(<code style="white-space: pre">$text</code>);
+
+exit 0;
+
Property changes on: scripts/pet/pet-chlog.cgi
___________________________________________________________________
Name: svn:executable
+ *
Added: scripts/pet/pet.cgi
===================================================================
--- scripts/pet/pet.cgi (rev 0)
+++ scripts/pet/pet.cgi 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,209 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: pet.cgi 616 2008-09-15 14:05:04Z tincho $
+#
+# Report packages version states
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+use PET::Cache;
+use PET::Classification;
+use PET::Config qw(read_config %CFG);
+use PET::Svn;
+use CGI ();
+use CGI::Carp qw(fatalsToBrowser);
+use POSIX qw(locale_h);
+use Template ();
+use Date::Parse ();
+use File::Find ();
+use List::Util qw(max);
+
+read_config();
+
+my $cgi = new CGI;
+
+my %param_defaults = (
+ template => $CFG{pet_cgi}{default_template},
+ show_all => $CFG{pet_cgi}{default_show_all},
+ start_collapsed => $CFG{pet_cgi}{default_start_collapsed},
+ hide_binaries => $CFG{pet_cgi}{default_hide_binaries},
+ refresh => $CFG{pet_cgi}{default_refresh},
+ format => $CFG{pet_cgi}{default_format},
+ ignore_keywords => $CFG{pet_cgi}{default_ignore_keywords},
+);
+foreach(qw(show_all start_collapsed hide_binaries)) {
+ $param_defaults{$_} = $param_defaults{$_} ? 1 : 0;
+}
+$param_defaults{ignore_keywords} = [ split(
+ /\s*,\s*/, ($param_defaults{ignore_keywords} || "")) ];
+
+my %params;
+foreach(qw(template show_all start_collapsed hide_binaries refresh format
+ ignore_keywords)) {
+ my @p = $cgi->param($_);
+ if(@p) {
+ $params{$_} = ref $param_defaults{$_} ? \@p : $p[0];
+ } else {
+ $params{$_} = $param_defaults{$_};
+ }
+}
+
+my $cache = read_cache(consolidated => "");
+
+my @modified;
+# Find recent template files
+File::Find::find( { wanted => sub {
+ my $mtime = (stat)[9];
+ push @modified, $mtime if(-f _);
+ } }, $CFG{pet_cgi}{templates_path} );
+my $script_date = '$Date: 2008-09-15 16:05:04 +0200(lun, 15 set 2008) $';
+push @modified, Date::Parse::str2time(
+ join(' ', (split(/ /, $script_date))[1..3]));
+push @modified, (stat $CFG{common}{cache_dir} . "/consolidated")[9];
+
+my $last_modified = max @modified;
+my @pkglist = get_pkglist();
+my $cls = classify(@pkglist);
+
+my( @no_prob, @for_upload, @for_upgrade, @upgrade_wip, @weird, @waiting,
+ @itp_wip, @wip, @with_rc_bugs, @with_bugs, @tagged, @all );
+
+my %ignore = map({ $_ => 1 } @{$params{ignore_keywords}});
+my %keywords = ();
+foreach my $p (values %$cls) {
+ foreach my $bug (keys %{$p->{bts}}) {
+ $keywords{$_} = 1 foreach(@{$p->{bts}{$bug}{keywordsA}});
+ delete $p->{bts}{$bug} if(grep({ $ignore{$_} }
+ @{$p->{bts}{$bug}{keywordsA}}));
+ }
+ unless(%{$p->{bts}}) {
+ delete $p->{status}{has_bugs};
+ delete $p->{hilight}{bts};
+ }
+}
+unless($params{show_all})
+{
+ foreach(keys %$cls)
+ {
+ delete $cls->{$_} unless(%{$cls->{$_}{hilight}});
+ }
+}
+
+foreach my $pkg (sort keys %$cls)
+{
+ my $data = $cls->{$pkg};
+
+ my $dest; # like "destiny" :)
+ my $status = $data->{status}; # to save some typing
+
+ $dest ||= \@upgrade_wip if($status->{upgrade_in_progress});
+ $dest ||= \@for_upgrade if($status->{needs_upgrade});
+ $dest ||= \@tagged if($status->{tagged_wait});
+ $dest ||= \@for_upload if($status->{needs_upload});
+ $dest ||= \@wip if($status->{name_mismatch});
+ $dest ||= \@itp_wip if($status->{never_uploaded});
+ $dest ||= \@wip if($status->{not_finished}
+ or $status->{invalid_svn_version} or $status->{invalid_tag}
+ or $status->{missing_tag});
+ $dest ||= \@weird if($status->{repo_ancient} or $status->{svn_ancient} or
+ $status->{upstream_ancient});
+ $dest ||= \@wip if $status->{watch_error};
+ $dest ||= \@waiting if $status->{archive_waiting};
+ $dest ||= \@with_rc_bugs if $status->{has_rc_bugs};
+ $dest ||= \@with_bugs if $status->{has_bugs};
+ $dest ||= \@wip if $status->{archive_foreign};
+ # $dest ||= \@wip if $status->{svn_foreign};
+ $dest ||= \@no_prob;
+
+ push @$dest, $data;
+ push @all, $data;
+}
+
+if( $ENV{GATEWAY_INTERFACE} )
+{
+ my $htmlp = $cgi->Accept("text/html");
+ my $xhtmlp = $cgi->Accept("application/xhtml+xml");
+
+ my $ims = $cgi->http('If-Modified-Since');
+ $ims = Date::Parse::str2time($ims) if $ims;
+
+ if( $ims and $ims >= $last_modified )
+ {
+ print $cgi->header('text/html', '304 Not modified');
+ exit 0;
+ }
+
+ my $old_locale = setlocale(LC_TIME);
+ setlocale(LC_TIME, "C");
+ print $cgi->header(
+ -content_type => (
+ ($xhtmlp and $xhtmlp > $htmlp)
+ ? 'application/xhtml+xml; charset=utf-8'
+ : 'text/html; charset=utf-8'
+ ),
+ -last_modified => POSIX::strftime(
+ "%a, %d %b %Y %T %Z",
+ localtime($last_modified),
+ ),
+ $params{"refresh"} ? (-refresh => $params{"refresh"}) : (),
+ );
+ setlocale(LC_TIME, $old_locale);
+}
+
+my $tt = new Template(
+ {
+ INCLUDE_PATH => $CFG{pet_cgi}{templates_path},
+ INTERPOLATE => 1,
+ POST_CHOMP => 1,
+ FILTERS => {
+ 'quotemeta' => sub { quotemeta(shift) },
+ },
+ }
+);
+
+$tt->process(
+ $params{"template"},
+ {
+ data => $cls,
+ group_email => $CFG{common}{group_email},
+ group_name => $CFG{pet_cgi}{group_name},
+ group_url => $CFG{pet_cgi}{group_url},
+ scm_web_file=> $CFG{pet_cgi}{scm_web_file},
+ scm_web_dir => $CFG{pet_cgi}{scm_web_dir},
+ (
+ ( ($params{'format'}||'') eq 'list' )
+ ? (
+ all => \@all
+ )
+ : (
+ all => \@no_prob,
+ for_upgrade => \@for_upgrade,
+ upgrade_wip => \@upgrade_wip,
+ weird => \@weird,
+ for_upload => \@for_upload,
+ waiting => \@waiting,
+ tagged => \@tagged,
+ itp_wip => \@itp_wip,
+ wip => \@wip,
+ with_bugs => \@with_bugs,
+ with_rc_bugs=> \@with_rc_bugs,
+ )
+ ),
+ shown_packages => scalar(@all),
+ total_packages => scalar(@pkglist),
+ last_modified => POSIX::strftime("%a, %d %b %Y %T %Z",
+ localtime($last_modified)),
+ now => POSIX::strftime("%a, %d %b %Y %T %Z",
+ localtime(time)),
+ keywords => [ "", sort keys %keywords ],
+ param_defaults => \%param_defaults,
+ params => \%params,
+ },
+) || die $tt->error;
+
+exit 0;
+
Property changes on: scripts/pet/pet.cgi
___________________________________________________________________
Name: svn:executable
+ *
Added: scripts/pet/pkginfo.cgi
===================================================================
--- scripts/pet/pkginfo.cgi (rev 0)
+++ scripts/pet/pkginfo.cgi 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,111 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: pkginfo.cgi 608 2008-08-30 05:31:49Z tincho $
+#
+# Dump all info about a package
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2008
+# © 2008 Damyan Ivanov <dmn at debian.org>
+# Released under the terms of the GNU GPL 2
+use strict;
+use warnings;
+
+use PET::Cache;
+use PET::Config qw(read_config %CFG);
+
+use CGI ':fatalsToBrowser';
+use CGI ();
+use Date::Parse ();
+use File::Find ();
+use List::Util qw(max);
+use POSIX qw(locale_h);
+use Template ();
+
+read_config();
+
+my $cgi = new CGI;
+
+my $pkg = $cgi->param('pkg') or exit 0;
+my $cache = read_cache(consolidated => "");
+
+my @modified;
+# Find recent template files
+File::Find::find( { wanted => sub {
+ my $mtime = (stat)[9];
+ push @modified, $mtime if(-f _);
+ } }, $CFG{pet_cgi}{templates_path} );
+my $script_date = '$Date: 2008-08-30 07:31:49 +0200(sab, 30 ago 2008) $';
+push @modified, Date::Parse::str2time(
+ join(' ', (split(/ /, $script_date))[1..3]));
+push @modified, (stat $CFG{common}{cache_dir} . "/consolidated")[9];
+
+my $last_modified = max @modified;
+
+if($ENV{GATEWAY_INTERFACE}) {
+ my $htmlp = $cgi->Accept("text/html");
+ my $xhtmlp = $cgi->Accept("application/xhtml+xml");
+
+ my $ims = $cgi->http('If-Modified-Since');
+ $ims = Date::Parse::str2time($ims) if $ims;
+
+ if($ims and $ims >= $last_modified) {
+ print $cgi->header('text/html', '304 Not modified');
+ exit 0;
+ }
+
+ my $old_locale = setlocale(LC_TIME);
+ setlocale(LC_TIME, "C");
+ print $cgi->header(
+ -content_type => (
+ 'text/plain; charset=utf-8'
+# ($xhtmlp and $xhtmlp > $htmlp)
+# ? 'application/xhtml+xml; charset=utf-8'
+# : 'text/html; charset=utf-8'
+ ),
+ -last_modified => POSIX::strftime(
+ "%a, %d %b %Y %T %Z",
+ localtime($last_modified),
+ )
+ );
+ setlocale(LC_TIME, $old_locale);
+}
+
+
+use Template;
+my $tt = new Template(
+ {
+ INCLUDE_PATH => $CFG{pet_cgi}{templates_path},
+ INTERPOLATE => 1,
+ POST_CHOMP => 1,
+ FILTERS => {
+ 'quotemeta' => sub { quotemeta(shift) },
+ },
+ RECURSION => 1,
+ }
+);
+
+$tt->process(
+ 'pkginfo',
+ {
+ sections => [
+ {
+ title => 'SVN',
+ data => $cache->{svn}{$pkg},
+ },
+ {
+ title => 'Upstream',
+ data => $cache->{watch}{$pkg},
+ },
+ {
+ title => 'Archive',
+ data => $cache->{archive}{$pkg},
+ },
+ {
+ title => 'Bugs',
+ data => $cache->{bts}{$pkg},
+ },
+ ],
+ },
+) || die $tt->error();
+exit 0;
+
Property changes on: scripts/pet/pkginfo.cgi
___________________________________________________________________
Name: svn:executable
+ *
Added: scripts/pet/rss_upload.cgi
===================================================================
--- scripts/pet/rss_upload.cgi (rev 0)
+++ scripts/pet/rss_upload.cgi 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,128 @@
+#!/usr/bin/perl
+# vim:ts=4:sw=4:et:ai:sts=4
+# $Id: rss_upload.cgi 608 2008-08-30 05:31:49Z tincho $
+#
+# RSS feed for packages that need upload
+#
+# Copyright Martín Ferrari <martin.ferrari at gmail.com>, 2007
+# Copyright Damyan Ivanov <dmn at debian.org>, 2007
+# Copyright gregor herrmann <gregor+debian at comodo.priv.at>, 2008
+# Released under the terms of the GNU GPL 2
+
+use strict;
+use warnings;
+
+use PET::Cache;
+use PET::Classification;
+use PET::Config qw(read_config %CFG);
+use PET::Svn;
+use CGI ();
+use CGI::Carp qw(fatalsToBrowser);
+use POSIX qw(locale_h);
+use Template ();
+use Date::Parse ();
+use List::Util qw(max);
+use XML::RSS;
+use Encode;
+
+read_config();
+
+my $cgi = new CGI;
+
+my $cache = read_cache(consolidated => "");
+
+my @modified;
+my $script_date = '$Date: 2008-08-30 07:31:49 +0200(sab, 30 ago 2008) $';
+push @modified, Date::Parse::str2time(
+ join(' ', (split(/ /, $script_date))[1..3]));
+push @modified, (stat $CFG{common}{cache_dir} . "/consolidated")[9];
+
+my $last_modified = max @modified;
+my @pkglist = get_pkglist();
+my $cls = classify(@pkglist);
+my @for_upload;
+
+foreach my $pkg (sort keys %$cls)
+{
+ my $data = $cls->{$pkg};
+
+ my $dest; # like "destiny" :)
+ my $status = $data->{status}; # to save some typing
+
+ $dest ||= \@for_upload if($status->{needs_upload});
+
+ push @$dest, $data;
+}
+
+if( $ENV{GATEWAY_INTERFACE} )
+{
+ my $textxmlp = $cgi->Accept("text/xml");
+ my $appxmlp= $cgi->Accept("application/xml");
+
+ my $ims = $cgi->http('If-Modified-Since');
+ $ims = Date::Parse::str2time($ims) if $ims;
+
+ if( $ims and $ims >= $last_modified )
+ {
+ print $cgi->header('text/html', '304 Not modified');
+ exit 0;
+ }
+
+ my $old_locale = setlocale(LC_TIME, "C");
+ print $cgi->header(
+ -content_type => (
+ ($appxmlp and $appxmlp >= $textxmlp)
+ ? 'application/rss+xml; charset=utf-8'
+ : 'text/xml; charset=utf-8'
+ ),
+ -last_modified => POSIX::strftime(
+ "%a, %d %b %Y %T %Z",
+ localtime($last_modified),
+ ),
+ $cgi->param("refresh") ? (-refresh => $cgi->param("refresh")) : (),
+ );
+ setlocale(LC_TIME, $old_locale);
+}
+
+# use Data::Dumper; print Dumper @for_upload;
+
+my $rss = new XML::RSS (version => '2.0');
+
+my $pet_cgi = $ENV{GATEWAY_INTERFACE} ? CGI::url() : undef;
+$pet_cgi =~ s/rss_upload\.cgi.*/pet.cgi/ if $pet_cgi;
+
+$rss->channel(
+ title => "$CFG{pet_cgi}{group_name} -- ready for upload",
+ link => $pet_cgi || "$CFG{pet_cgi}{group_url}",
+ description => "Packages in the $CFG{pet_cgi}{group_name} repository that need an upload",
+ pubDate => POSIX::strftime("%a, %d %b %Y %T %z", localtime()),
+ language => "en",
+ webMaster => "$CFG{common}{group_email} ($CFG{pet_cgi}{group_name})",
+ copyright => "Copyright " . POSIX::strftime("%Y", localtime()) . " $CFG{pet_cgi}{group_name}",
+);
+
+foreach my $pkg ( @for_upload )
+{
+ my $svn = svn_get();
+ my $changelog = $svn->{$pkg->{name}}{text};
+ $changelog =~ s/&/&/g;
+ $changelog =~ s/'/'/g;
+ $changelog =~ s/"/"/g;
+ $changelog =~ s/</</g;
+ $changelog =~ s/>/>/g;
+ $changelog =~ s{\r?\n}{<br/>}g;
+
+ $rss->add_item(
+ title => "$pkg->{name}",
+ link => sprintf($CFG{pet_cgi}{wsvn_url}, $pkg->{name}),
+ description => "<pre>" . decode_utf8($changelog) ."</pre>\n",
+ guid => "$CFG{pet_cgi}{group_name}, $pkg->{svn}->{date}",
+ );
+
+# use Data::Dumper; print Dumper $pkg;
+}
+
+print $rss->as_string;
+
+exit 0;
+
Property changes on: scripts/pet/rss_upload.cgi
___________________________________________________________________
Name: svn:executable
+ *
Added: scripts/pet/templates/by_category
===================================================================
--- scripts/pet/templates/by_category (rev 0)
+++ scripts/pet/templates/by_category 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,372 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- vim:ts=4:sw=4:et:ai:sts=4:syntax=xhtml
+-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+
+[% MACRO quote(s) BLOCK %][% s | quotemeta %][% END %]
+[% MACRO format_email(s) BLOCK %][%
+ matches = s.match('^\s*"?(.*?)"?\s*<(.*)>\s*$');
+ myname = matches.0 | html;
+ myemail = matches.1 | uri;
+%]<a href="mailto:$myemail">$myname</a>[% END %]
+
+[% MACRO show_maint(data) BLOCK %]
+[%
+ mymaint = [];
+ FOREACH m IN data.maintainer;
+ mymaint.push(format_email(m));
+ END;
+%]
+<span>Maintainer: [% mymaint.join(", ") %] </span>
+[% IF data.uploaders.size %]
+[%
+ myupldr = [];
+ FOREACH m IN data.uploaders;
+ myupldr.push(format_email(m));
+ END;
+%]
+<span>Uploaders: [% myupldr.join(", ") %] </span>
+[% END #IF %]
+[% END #BLOCK %]
+
+[% BLOCK scm_file_url %]
+[% scm_web_file | replace('\${pkg}', pkg.svn.dir) | replace('\${file}', file) %]
+[% END #BLOCK scm_file_url %]
+
+[% BLOCK scm_dir_url %]
+[% scm_web_dir | replace('\${pkg}', pkg.svn.dir) | replace('\${dir}', dir) %]
+[% END #BLOCK scm_dir_url %]
+
+[% MACRO check_maint(pkg, s) BLOCK %]
+[% IF s == "svn";
+ngkey = "svn_foreign";
+nmkey = "svn_nonmaint";
+mdata = pkg.svn;
+ELSE;
+ngkey = "archive_foreign";
+nmkey = "archive_nonmaint";
+mdata = pkg.archive.control;
+END %]
+[% IF pkg.status.$ngkey OR pkg.status.$nmkey %]
+<span class="popup">
+ <span class="non-group">
+ [% IF pkg.status.$ngkey %][NG][% ELSE %][NM][% END %]
+ </span>
+ <span class="balloon">
+ [%# this is span, but is has "display: block" in .css
+ the reason is that XHTML does not allow DIVs inside SPANs %]
+ <span style="margin-bottom: 1em;">
+ [% IF pkg.status.$ngkey %]
+ The group is neither maintainer nor uploader of this
+ package
+ [% ELSE %]
+ The group is not maintainer (but uploader) of this
+ package
+ [% END %]
+ </span>
+ [% show_maint(mdata) %]
+ </span>
+</span>
+[% END #IF %]
+[% END #BLOCK %]
+
+[% BLOCK bts_link %]
+[% IF pkg.bts.size %]
+<div class="bts-info"><div class="popup"><a
+href="http://bugs.debian.org/src:$pkg.name"
+>$pkg.bts.keys.size</a>
+<!-- span class="paren">[</span -->
+<table class="bts-info-details balloon">
+[% FOREACH bug IN pkg.bts.nsort %]
+<tr>
+ <td>
+ <a class="bts-${pkg.bts.$bug.severity}"
+ href="http://bugs.debian.org/$bug">#$bug</a>
+ [% IF pkg.bts.$bug.forwarded %]
+ [% SET F = pkg.bts.$bug.forwarded %]
+ [% qm = BLOCK %]^https?:[% FILTER quotemeta %]//rt.cpan.org/[% END %].+html\?id=(\d+)[% '$' %][% END %]
+ [% rt = F.match(qm) %]
+ <div class="bts-forwarded">
+ [% IF rt.0 %]
+ <a href="$F">cpan#[% rt.0 %]</a>
+ [% ELSE %]
+ <a href="[% IF F.match("^http") %][% GET F | html %][% ELSE %]mailto:[% GET F | uri %][% END %]">forwarded</a>
+ [% END %]
+ </div>
+ [% END %]
+ [% IF pkg.bts.$bug.keywordsA.size > 0 %]
+ <div class="bts-keywords">
+ [% pkg.bts.$bug.keywordsA.join(", ") | html %]
+ </div>
+ [% END %]
+ </td>
+ <td>
+ [% qm = quote(pkg.name) %]
+ [% pkg.bts.$bug.subject.replace("^$qm:\\s*",'') | html %]</td>
+</tr>
+[% END #FOREACH %]
+</table>
+<!-- span class="paren">]</span -->
+</div></div>
+[% END #IF bugs %]
+[% END #BLOCK bts_link %]
+
+[% BLOCK archive_patch_link %]
+[% IF pkg.svn.patches.size %]
+<span class="patches">
+<a href="http://patch-tracking.debian.net/package/${pkg.name}/${pkg.archive.most_recent}/">
+(${pkg.svn.patches.size} patches)
+</a>
+</span>
+[% END #IF archive_patches %]
+[% END #BLOCK archive_patch_link %]
+
+[% BLOCK svn_patch_link %]
+[% IF pkg.svn.patches.size %]
+
+[% IF pkg.svn.patches.size > 1;
+linktext = "${pkg.svn.patches.size} patches";
+ELSE;
+linktext = "1 patch";
+END # patch number %]
+
+[% patch_url = BLOCK %][% INCLUDE scm_dir_url
+dir='debian/patches' | html %][% END %]
+<span class="patches">
+<a href="$patch_url">($linktext)</a>
+</span>
+[% END #IF svn_patches %]
+[% END #BLOCK svn_patch_link %]
+
+[% BLOCK package %]
+ [% SET arch_ver = pkg.archive.most_recent %]
+ [% SET arch_src = pkg.archive.most_recent_src %]
+ [% SET svn_ver = pkg.svn.version %]
+ [% SET svn_un_ver = pkg.svn.un_version %]
+ <tr>
+ <td class="pkg">
+ <span class="pkg-moreinfo popup" >[more]
+ <span id="${pkg.name}_moreinfo_balloon" class="balloon">
+ <a href="javascript:async_get(
+ '${pkg.name}_moreinfo_balloon',
+ 'pkginfo.cgi?pkg=$pkg.name')">Click for full
+ info</a>
+ </span>
+ </span>
+ [% IF pkg.notes.size %]<span class="popup">$pkg.name
+ <span class="balloon">[% pkg.notes.join(', ') %]</span>
+ </span>[% ELSE %]$pkg.name[% END %]
+ [% IF pkg.svn.section AND pkg.svn.section != "main" %]
+ <span class="section-$pkg.svn.section">[$pkg.svn.section]</span>
+ [% END #IF %]
+ [% IF ! params.hide_binaries
+ AND pkg.svn.binaries AND pkg.svn.binaries.size
+ AND (
+ pkg.svn.binaries.size > 1
+ OR pkg.svn.binaries.first != pkg.name
+ ) %]<br/><span class="binary-packages">([%
+ pkg.svn.binaries.join(', ') %])</span>[% END %]
+ [% IF pkg.status.name_mismatch %]
+ <br/>
+ <span class="pkg-name-mismatch">Directory name $pkg.svn.dir != source
+ package $pkg.name</span>
+ [% END %]
+ </td>
+
+ <td[% IF pkg.hilight.svn %] class="todo"[% END %]>
+ [% chlog_url = BLOCK %][% INCLUDE scm_file_url
+ file='debian/changelog' | html %][% END %]
+ <span class="popup svn-rel"><a href="$chlog_url">$svn_ver</a><span
+ id="${pkg.svn.dir}_rel_chlog_balloon" class="balloon"><a
+ href="javascript:async_get(
+ '${pkg.svn.dir}_rel_chlog_balloon',
+ 'pet-chlog.cgi?pkg=$pkg.svn.dir;rel=rel')" >[%
+ pkg.svn.changer | html %] — [% pkg.svn.date |
+ html %]</a>
+ </span></span>
+
+ [% IF svn_un_ver AND (svn_un_ver != svn_ver) %]
+ <span class="popup svn-unrel"><a
+ href="$chlog_url">($svn_un_ver)</a><span
+ id="${pkg.svn.dir}_unrel_chlog_balloon" class="balloon"><a
+ href="javascript:async_get(
+ '${pkg.svn.dir}_unrel_chlog_balloon',
+ 'pet-chlog.cgi?pkg=$pkg.svn.dir;rel=unrel')" >[%
+ pkg.svn.un_changer | html %] — [% pkg.svn.un_date
+ | html %]</a></span></span>[% END #IF %]
+ [% check_maint(pkg, "svn") %]
+ [% IF pkg.status.invalid_tag OR pkg.status.tagged_wait
+ OR pkg.status.missing_tag %]
+ <br/>
+ <span class=[% IF pkg.status.invalid_tag
+ %]"svn-tag-missing"[% ELSIF pkg.status.missing_tag
+ %]"svn-tag-invalid"[% ELSE %]"svn-tag-wait"[%
+ END %]>[Tag: $pkg.svn.tags.last]</span>
+ [% END %]
+ [% INCLUDE svn_patch_link pkg=pkg %]
+ </td>
+
+ <td[% IF pkg.hilight.archive %] class="todo"[% END %]>
+ [% IF arch_ver %]
+ [% IF arch_src != "new" OR pkg.archive.unstable %]
+ <a href="http://packages.qa.debian.org/$pkg.name">$arch_ver</a>
+ [% ELSE %]
+ <a href="http://ftp-master.debian.org/new/${pkg.name}_${arch_ver}.html">$arch_ver</a>
+ [% END #IF %]
+ [% END #IF %]
+ [% IF arch_src AND arch_src != "unstable" %]
+ ($arch_src)
+ [% END #IF %]
+ [% IF pkg.archive.control.dm_allowed %]
+ <span class="popup"><span class="dm-enabled">[DMUA]</span>
+ <span class="balloon">
+ [% show_maint(pkg.archive.control) %]
+ </span>
+ </span>
+ [% END #IF %]
+ [% check_maint(pkg, "archive") %]
+ [%# INCLUDE archive_patch_link pkg=pkg %]
+ </td>
+
+ <td>[% INCLUDE bts_link pkg=pkg %]</td>
+
+ <td[% IF pkg.hilight.upstream %] class="todo"[% END %]>[% IF
+ pkg.watch.upstream_mangled %]<a href="[% pkg.watch.upstream_url | html
+ %]">[% pkg.watch.upstream_mangled
+ %]</a>[% ELSE %][% pkg.watch.error %][% END %]
+ [% IF pkg.watch.upstream_dir %] <a href="$pkg.watch.upstream_dir"
+ class="watch-up-dir">(browse)</a> [% END %] [% IF
+ pkg.hilight.upstream %] [% IF pkg.watch.error %] <a href="[%
+ INCLUDE scm_file_url file="debian/copyright" | html %]"
+ class="watch-cp-info">(copyright info)</a> [% END %] [% IF
+ pkg.watch.error != "Missing" AND ! pkg.status.needs_upgrade %]
+ <a href="[% INCLUDE scm_file_url file="debian/watch" | html %]"
+ class="watch-file">(watchfile)</a> [% END %] [% END %] </td>
+ </tr> [% END #BLOCK package %]
+
+[% BLOCK section %]
+ [% IF list.0 %]
+ [% IF title and name %]
+ <tbody>
+ <tr>
+ <th colspan="5" class="clickable"><a style="display: block" href="javascript:toggle_visibility('$name')">$title ($list.size)</a></th>
+ </tr>
+ </tbody>
+ [% END #IF title and name %]
+ <tbody[% IF name %] id="$name" style="display: [% IF params.start_collapsed %]none[% ELSE %]table-row-group[% END %]"[% END %]>
+ <tr>
+ <th>Package</th>
+ <th>Repository</th>
+ <th>Archive</th>
+ <th>Bugs</th>
+ <th>Upstream</th>
+ </tr>
+ [% FOREACH pkg IN list %]
+ [% INCLUDE package pkg=pkg %]
+ [% END #FOREACH list %]
+ </tbody>
+ [% END #IF list.size %]
+[% END #BLOCK section %]
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+<head>
+ <title>${group_name} packages overview</title>
+ <style type="text/css">
+ [% INCLUDE default.css %]
+ </style>
+ <script type="text/javascript">
+ //<![CDATA[
+ <!--
+ [% INCLUDE by_category.js %]
+ //-->
+ //]]>
+ </script>
+ <link rel="alternate" type="application/rss+xml" title="Packages in the ${group_name} repository ready for upload" href="rss_upload.cgi" />
+</head>
+<body>
+<h1><a href="${group_url}">${group_name}</a></h1>
+<table id="main_table">
+ [% INCLUDE section data=data list=for_upgrade name="for_upgrade" title="Newer upstream release available" %]
+ [% INCLUDE section data=data list=upgrade_wip name="upgrade_wip" title="Newer upstream available :: Work in progress" %]
+ [% INCLUDE section data=data list=for_upload name="for_upload" title="Ready for upload" %]
+ [% INCLUDE section data=data list=tagged name="tagged" title="Tagged but not in the archive (yet)" %]
+ [% INCLUDE section data=data list=waiting name="waiting" title="NEW and incoming" %]
+ [% INCLUDE section data=data list=weird name="weird" title="Packages with strange versions in the repository" %]
+ [% INCLUDE section data=data list=with_rc_bugs name="with_rc_bugs" title="With RC bugs" %]
+ [% INCLUDE section data=data list=itp_wip name="itp_wip" title="New packages :: Work in progress" %]
+ [% INCLUDE section data=data list=wip name="wip" title="Work in progress" %]
+ [% INCLUDE section data=data list=with_bugs name="with_bugs" title="With bugs" %]
+ [% INCLUDE section data=data list=all name='unclassified' title='Unclassified' %]
+</table>
+
+<h2>$shown_packages/$total_packages</h2>
+
+<div id="options">
+<h2>Options</h2>
+[% USE CGI %]
+[% CGI.start_form({ Method => 'GET' }) %]
+<p>
+[% CGI.checkbox({ Name => 'show_all', Label => "Show all packages",
+Checked => param_defaults.show_all }) %]
+
+[% CGI.checkbox({ Name => 'start_collapsed', Label => "Collapse tables",
+Checked => param_defaults.start_collapsed }) %]
+
+[% CGI.checkbox({ Name => 'hide_binaries',
+Label => "Don't show binary package names",
+Checked => param_defaults.hide_binaries }) %]
+</p>
+<p>Order: [% CGI.radio_group({
+ Name => 'format',
+ Values => [ 'list', 'categories' ],
+ Default => param_defaults.format,
+ Labels => {
+ categories => "by category",
+ list => "by name",
+ }
+}).join("\n") %]</p>
+<p>Refresh: [% CGI.radio_group({
+ Name => 'refresh',
+ Values => [ 0, 1800, 3600, 7200 ],
+ Default => param_defaults.refresh,
+ Labels => {
+ "0" => "No refresh",
+ "1800" => "30 min",
+ "3600" => "1 hour",
+ "7200" => "2 hours"
+ }
+ }).join("\n") %]</p>
+<p>Bug keywords to ignore: <br/>
+ [% CGI.scrolling_list(
+ "-name" => 'ignore_keywords',
+ "-values" => keywords,
+ "-size" => 6,
+ "-labels" => { '' => '--none--' },
+ "-multiple" => 'true',
+ "-default" => param_defaults.ignore_keywords,
+ ) %]</p>
+<p>[% CGI.submit({ Label => 'Reload' }) %]</p>
+[% CGI.end_form.join("\n") %]
+</div>
+<p id="w3org">
+ <a href="http://validator.w3.org/check?uri=referer"><img
+ style="border:0;width:88px;height:31px"
+ src="http://www.w3.org/Icons/valid-xhtml10-blue"
+ alt="Valid XHTML 1.0 Strict"/></a>
+ <a href="http://jigsaw.w3.org/css-validator/check/referer">
+ <img style="border:0;width:88px;height:31px"
+ src="http://jigsaw.w3.org/css-validator/images/vcss"
+ alt="Valid CSS!" /></a>
+</p>
+[% META id='$Id: by_category 617 2008-11-21 15:03:41Z tincho $' %]
+<p id="selfplug">
+ Powered by
+ <a href="http://pet.alioth.debian.org/">Package Entropy Tracker</a>
+</p>
+<p id="page_id">
+ Last modified: $last_modified<br/>
+ Retrieved on: $now<br/>
+ <code>$template.id</code>
+</p>
+</body>
+</html>
Added: scripts/pet/templates/by_category.js
===================================================================
--- scripts/pet/templates/by_category.js (rev 0)
+++ scripts/pet/templates/by_category.js 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,50 @@
+function toggle_visibility(id)
+{
+ var el = document.getElementById(id);
+ el.style.display = (el.style.display == 'none' ? 'table-row-group' : 'none');
+}
+function async_get(id, url)
+{
+ var xml;
+ if (window.XMLHttpRequest) {
+ xml = new XMLHttpRequest();
+ } else if (window.ActiveXObject) {
+ xml = new ActiveXObject("Microsoft.XMLHTTP");
+ } else {
+ alert("Your browser lacks the needed ability to use Ajax. Sorry.");
+ return false;
+ }
+ xml.open('GET', url);
+ xml.onreadystatechange = function() {
+ ajaxStateChanged(xml, id);
+ };
+ xml.send('');
+}
+function ajaxStateChanged(xml, id)
+{
+ var el = document.getElementById(id);
+ if( !el )
+ {
+ alert('Element "' + id + '" not found');
+ return false;
+ }
+ if( xml.readyState <= 1 )
+ {
+ el.innerHTML = el.innerHTML + "<br/>Loading...";
+ }
+ if( xml.readyState == 3 )
+ {
+ el.innerHTML = el.innerHTML + ".";
+ }
+ if( xml.readyState == 4 )
+ {
+ if( xml.status == 200 )
+ {
+ el.innerHTML = xml.responseText;
+ }
+ else
+ {
+ el.innerHTML = xml.status+': '+xml.StatusText;
+ }
+ }
+}
Added: scripts/pet/templates/default.css
===================================================================
--- scripts/pet/templates/default.css (rev 0)
+++ scripts/pet/templates/default.css 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,168 @@
+/* vim:ts=4:sw=4:et:ai:sts=4:syntax=css
+ */
+body {
+ background: white;
+ color: black;
+ margin: 0;
+ padding: 8px;
+}
+table {
+ border: 1px solid black;
+ border-collapse: collapse;
+ empty-cells: show;
+}
+td, th {
+ border: 1px solid black;
+ padding: 0.2em;
+}
+th.clickable, th.clickable a, th.clickable a:visited {
+ background: #404040;
+ color: white;
+}
+.balloon table, .balloon table td, .balloon table th {
+ border: none;
+ vertical-align: top;
+}
+a {
+ text-decoration: none;
+}
+/* before enabling this, think about the link colors -- they all have to
+be visible with the new background
+tr:hover td, tr:hover th {
+background: #F5F5B5;
+color: black;
+}
+ */
+/* From ikiwiki templates */
+.popup {
+ border-bottom: 1px dotted;
+}
+.popup .balloon,
+.popup .paren,
+.popup .expand {
+ display: none;
+}
+.popup:hover .balloon,
+.popup:focus .balloon {
+ position: absolute;
+ display: block;
+ min-width: 20em;
+ max-width: 40em;
+ max-height: 75%;
+ overflow: auto;
+ margin: 0em 0 0 -10em;
+ padding: 0.5em;
+ border: 2px outset #F5F5B5; /* light yellowish */
+ background: #F5F5B5; /* light yellowish */
+ color: black;
+ /* Nonstandard, but very nice. */
+ opacity: 0.95;
+ -moz-opacity: 0.95;
+ filter: alpha(opacity=95);
+}
+
+span.balloon span {
+ display: block;
+}
+
+#main_table {
+ width: 95%;
+}
+.pkg .popup .balloon {
+ margin-left: 0;
+}
+.todo {
+ background: #ADDBE6; /* lightblue */
+}
+.pkg-moreinfo {
+ float: right;
+ font-size: smaller;
+}
+.pkg-name-mismatch {
+ font-weight: bold;
+ color: red;
+}
+.non-group {
+ font-size: smaller;
+ font-weight: bold;
+ color: red;
+}
+.non-maint {
+ font-size: smaller;
+ font-weight: bold;
+ color: maroon;
+}
+.dm-enabled {
+ font-size: smaller;
+ font-weight: bold;
+}
+.binary-packages {
+ font-size: smaller;
+}
+.svn-unrel {
+ font-size: smaller;
+}
+.svn-tag-invalid { font-size: smaller; color: red }
+.svn-tag-missing { font-size: smaller; color: red }
+.svn-tag-wait { font-size: smaller; color: navy }
+.section-non-free { color: red; }
+.section-contrib { color: maroon; }
+.bts-wishlist {
+ color: green;
+}
+.bts-minor {
+ color: #004000; /* darkgreen */
+}
+.bts-normal, .bts-important {
+}
+.bts-grave, .bts-serious {
+ color: red;
+}
+.bts-critical {
+ color: red;
+ text-decoration: blink;
+}
+.bts-keywords {
+ font-size: smaller;
+}
+.bts-forwarded {
+ font-size: smaller;
+}
+.patches {
+ font-size: smaller;
+}
+table.bts-info-details td:first-child {
+ text-align: center;
+}
+div.bts-info div.popup {
+ text-align: center;
+}
+.bts-info-details p {
+ text-indent: -3em;
+ margin: 0 0 0 3em;
+}
+table.bts-info-details td {
+ border: 0;
+ vertical-align: top;
+ text-align: left;
+}
+.watch-file {
+ font-size: smaller;
+}
+.watch-cp-info {
+ font-size: smaller;
+}
+.watch-up-dir {
+ font-size: smaller;
+}
+#options {
+ float: left;
+ padding: .5em;
+ border: 1px black dashed;
+ margin-bottom: 1em;
+}
+#options h2 { font-size: 110%; margin: 0; }
+#options div { display: none }
+#w3org { clear: both; }
+#page_id { border-top: 1px solid black; }
+#selfplug { float: right; }
Added: scripts/pet/templates/pkginfo
===================================================================
--- scripts/pet/templates/pkginfo (rev 0)
+++ scripts/pet/templates/pkginfo 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,43 @@
+
+
+
+[% BLOCK show %]
+ [% IF v.keys %]
+ <table>
+ [% FOREACH k IN v.keys %]
+ [% IF v.$k.list.size %]
+ <tr>
+ <td>${k|html}</td>
+ <td>[% INCLUDE show v=v.$k %]</td>
+ </tr>
+ [% END %]
+ [% END # FOREACH v.keys %]
+ </table>
+ [% ELSIF (v.list.size > 1) %]
+ <table>
+ [% FOREACH i IN v.list %]
+ <tr>
+ <td>[% INCLUDE show v=i %]</td>
+ </tr>
+ [% END %]
+ </table>
+ [% ELSE %]
+ ${v.list.0|html}
+ [% END %]
+[% END # BLOCK show %]
+
+<table>
+ [% FOREACH s IN sections %]
+ <tr><th colspan="2">${s.title|html}</th></tr>
+ [% FOREACH data IN s.data %]
+ [% IF data.value.list.size %]
+ <tr>
+ <td>${data.key|html}</td>
+ <td>
+ [% INCLUDE show v=data.value %]
+ </td>
+ </tr>
+ [% END # IF data.value %]
+ [% END # FREACH data %]
+ [% END # FOREACH sections %]
+</table>
Added: scripts/pkg-mono.conf
===================================================================
--- scripts/pkg-mono.conf (rev 0)
+++ scripts/pkg-mono.conf 2008-11-22 21:23:32 UTC (rev 3776)
@@ -0,0 +1,79 @@
+; vim:syntax=dosini
+;
+; Sample config for PET scripts
+;
+; A "~/" appearing at the beginning of a string will be replaced for the user's
+; home directory
+[pet_cgi]
+templates_path = /home/groups/pkg-mono/scripts/pet/templates
+default_template = by_category
+group_name = Debian Mono Group
+group_url = http://pkg-mono.alioth.debian.org/
+
+; sprintf format for the package SCM web location, takes one parameter, the
+; package directory/file. Samples are for ViewSVN (recommended) and WebSVN
+;scm_web_file = http://svn.debian.org/viewsvn/pkg-foo/trunk/${pkg}/${file}?view=markup
+;scm_web_file = http://svn.debian.org/wsvn/pkg-foo/trunk/${pkg}/${file}/?op=file&rev=0&sc=0
+;scm_web_dir = http://svn.debian.org/viewsvn/pkg-foo/trunk/${pkg}/${dir}/?
+;scm_web_dir = http://svn.debian.org/wsvn/pkg-foo/trunk/${pkg}/${dir}/?rev=0&sc=0
+
+scm_web_file = http://svn.debian.org/wsvn/pkg-mono/${pkg}/trunk/${file}/?op=file&rev=0&sc=0
+scm_web_dir = http://svn.debian.org/wsvn/pkg-mono/${pkg}/trunk/${dir}/?rev=0&sc=0
+
+; Defaults for CGI options
+;default_show_all = 0
+;default_start_collapsed = 0
+default_hide_binaries = 0
+;default_refresh = 1800 ; 30 minutes
+;default_format = categories ; or list
+;default_ignore_keywords = ; comma-separated list
+
+[svn]
+repository = svn://svn.debian.org/svn/pkg-mono
+; path before the package name, use / if none
+packages_path = /
+; path after the package name, should be the parent of the "debian/" directory
+post_path = trunk
+track_tags = 1
+; path to the directory containing the tags, use / if none
+tags_path = /
+; same as post_path, for tags
+tags_post_path = tags
+
+[archive]
+mirror = ftp://ftp.debian.org/debian
+suites = unstable, testing, stable, oldstable, experimental
+sections = main, contrib, non-free
+suites_ttl = 360, 360, 10080, 10080, 360
+new_url = http://ftp-master.debian.org/new.html
+new_ttl = 60
+incoming_url = http://incoming.debian.org
+incoming_ttl = 60
+
+[watch]
+ttl = 360 # 6 hours
+use_cpan = 1
+cpan_mirror = ftp://cpan.org
+cpan_ttl = 360 # 6 hours
+
+[bts]
+ttl = 60 # 1 hour
+soap_proxy = http://bugs.debian.org/cgi-bin/soap.cgi
+soap_uri = Debbugs/SOAP
+; usertags to follow (usernames/emails): foo at bar.com, bar at foo.com
+usertag_users = debian-qa at lists.debian.org
+; wontfix, pending, etch, sarge, etc
+ignore_keywords =
+; wishlist, minor
+ignore_severities =
+
+; Parameters before any section header go into the [common] section
+[common]
+cache_dir = /home/groups/pkg-mono/scripts/pet_cache
+; verbosity level: error => 0, warn => 1, info => 2 debug => 3
+verbose = 1
+; Prepend syslog-style format?
+formatted_log => 1
+; if present, will be used to check that packages are in fact maintained by the
+; group
+group_email = pkg-mono-devel at lists.debian.org
More information about the Pkg-mono-svn-commits
mailing list