[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[commit-womb] gnumaint Makefile gm gm-read.pl gm-util.pl gnup...
From: |
karl |
Subject: |
[commit-womb] gnumaint Makefile gm gm-read.pl gm-util.pl gnup... |
Date: |
Fri, 08 Feb 2013 19:28:00 +0000 |
CVSROOT: /sources/womb
Module name: gnumaint
Changes by: karl <karl> 13/02/08 19:28:00
Modified files:
. : Makefile gm gm-read.pl gm-util.pl
gnupackages.txt
Added files:
. : gm-check.pl gm-generate.pl gm-list.pl
Log message:
first cut at checking the recorded ftp-upload emails; finish splitting
up gm into subfiles
CVSWeb URLs:
http://cvs.savannah.gnu.org/viewcvs/gnumaint/Makefile?cvsroot=womb&r1=1.43&r2=1.44
http://cvs.savannah.gnu.org/viewcvs/gnumaint/gm?cvsroot=womb&r1=1.53&r2=1.54
http://cvs.savannah.gnu.org/viewcvs/gnumaint/gm-read.pl?cvsroot=womb&r1=1.5&r2=1.6
http://cvs.savannah.gnu.org/viewcvs/gnumaint/gm-util.pl?cvsroot=womb&r1=1.4&r2=1.5
http://cvs.savannah.gnu.org/viewcvs/gnumaint/gnupackages.txt?cvsroot=womb&r1=1.125&r2=1.126
http://cvs.savannah.gnu.org/viewcvs/gnumaint/gm-check.pl?cvsroot=womb&rev=1.1
http://cvs.savannah.gnu.org/viewcvs/gnumaint/gm-generate.pl?cvsroot=womb&rev=1.1
http://cvs.savannah.gnu.org/viewcvs/gnumaint/gm-list.pl?cvsroot=womb&rev=1.1
Patches:
Index: Makefile
===================================================================
RCS file: /sources/womb/gnumaint/Makefile,v
retrieving revision 1.43
retrieving revision 1.44
diff -u -b -r1.43 -r1.44
--- Makefile 13 Jan 2013 18:42:00 -0000 1.43
+++ Makefile 8 Feb 2013 19:27:59 -0000 1.44
@@ -1,12 +1,15 @@
-# $Id: Makefile,v 1.43 2013/01/13 18:42:00 karl Exp $
-# Copyright 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
+# $Id: Makefile,v 1.44 2013/02/08 19:27:59 karl Exp $
+# Copyright 2007, 2008, 2009, 2010, 2011, 2012, 2013
+# Free Software Foundation, Inc.
#
# Copying and distribution of this file, with or without modification,
# are permitted in any medium without royalty provided the copyright
# notice and this notice are preserved.
-default: creport
+default:
+# when we want to remake all the .html on the web site,
+# $(gw) being the www cvs checkout.
update-html:
gm generate logos html >$(gw)/graphics/allgnupkgs.html
gm generate manual html >$(gw)/manual/allgnupkgs.html
@@ -55,12 +58,18 @@
test-unanswered:
gm list packages unanswered
-test-checkactivity test-checka:
+test-checkactivity cact:
gm check activityfile
test-checkfsf cfsf:
gm check fsfcopyright
+test-checkmaint cmaint:
+ gm check maintainers
+
+test-checkftp-upload cfu:
+ gm check ftpupload
+
creport:
@printf "total "; gm generate maintainers bypackage \
| grep ' - ' | grep -v ' (generic)' | wc -l
@@ -79,9 +88,6 @@
test-checkftp ftp:
gm check ftplisting
-test-checkmaint:
- gm check maintainers
-
test-checksv sv:
gm check savannah
Index: gm
===================================================================
RCS file: /sources/womb/gnumaint/gm,v
retrieving revision 1.53
retrieving revision 1.54
diff -u -b -r1.53 -r1.54
--- gm 24 Dec 2012 14:25:05 -0000 1.53
+++ gm 8 Feb 2013 19:27:59 -0000 1.54
@@ -1,5 +1,5 @@
#!/usr/bin/env perl
-# $Id: gm,v 1.53 2012/12/24 14:25:05 karl Exp $
+# $Id: gm,v 1.54 2013/02/08 19:27:59 karl Exp $
# GNU maintainer-related operations.
#
# Copyright 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation Inc.
@@ -19,21 +19,44 @@
#
# Originally written by Karl Berry.
+# In this particular case, using require seemed better than setting up
+# modules. It's certainly simpler.
+require "gm-check.pl";
+require "gm-email.pl";
+require "gm-generate.pl";
+require "gm-list.pl";
require "gm-read.pl";
require "gm-util.pl";
-require "gm-email.pl";
$DEBUG = 0;
+
+# Recommended way to deal with the foreign data files: have a cron job
+# that copies them locally and nightly; put symlinks in this directory
+# to the cron-updated files.
+
+ # fp:~karl/src/gnumaint/activity-report.txt
$ACTIVITY_FILE = "activity-report.txt";
+ # fp:/gd/gnuorg/copyright.list
$COPYRIGHT_LIST_FILE = "copyright.list";
+ # list received from address@hidden:
$COPYRIGHT_PAPERS_FILE = "copyright-papers.txt";
+ # maintained here:
$DOC_CATEGORIES_FILE = "doc-categories.txt";
+ # rsync://ftp.gnu.org/gnu/
$FTPLISTING_FILE = "ftplisting.txt";
+ # fp:/srv/data/ftp-upload/maintainers_emails.txt
+$FTPUPLOAD_EMAIL_FILE = "ftp-upload-email.txt";
+ # maintainer here:
$GNUPACKAGES_FILE = "gnupackages.txt";
+ # http://ftp.gnu.org/gnu/texinfo/htmlxref.cnf
$HTMLXREF_FILE = "htmlxref.cnf";
+ # fp:/gd/gnuorg/maintainers
$MAINTAINERS_FILE = "maintainers";
+ # maintained here:
$OLDPACKAGES_FILE = "oldpackages.txt";
+ # maintained here (stale):
$RECENTREL_FILE = "recentrel.txt";
+ # http://savannah.gnu.org/cooperation/groups.tsv
$SAVANNAH_FILE = "groups.tsv";
exit (&main ());
@@ -43,16 +66,16 @@
my $arg1 = $ARGV[1];
my $arg2 = $ARGV[2];
- if ($cmd =~ /^--?help$/) {
+ if ($cmd =~ /^--?help *$/) {
print <<END_USAGE;
Usage: $0 CMD ARG...
Perform various GNU maintainer/package operations.
-(See also the gnumaint script in this directory.)
Here are the possibilities:
check fsfcopyright verify consistency: gnupackages/copyright.list
check maintainers verify consistency: gnupackages/maintainers
+check ftpupload verify consistency: maintainers/ftp-upload
generate email bypackage make messages to send out; add -h for details.
generate maintainers bypackage make /gd/gnuorg/maintainers.bypkg file
@@ -82,7 +105,7 @@
# More features started but not finished:
#check activityfile verify activity-report.txt
#list copyrightpapers copyright.list vs. paperwork
-#check ftp verify consistency: ftplisting.txt
+#check ftplisting verify consistency: ftplisting.txt
#check savannah verify consistency: gnupackages/savannah
# construct the function name from the arguments, and eval it.
@@ -101,720 +124,4 @@
return 0;
}
-
-
-# Return list of packages in the activity report that is not in the
-# maintainers file. (Implementation not finished.)
-#
-sub check_activityfile_ {
- my @ret = ();
-
- my %pkgs = &read_maintainers ("by-package");
- my %maints = &read_maintainers ("by-maintainer");
-
- my %activity = &read_activity ("by-package");
-
- for my $ap (sort by_lineno keys %activity) {
- next if $ap eq "*"; # our last_sendemail placeholder
- my ($email,$name,$time,$line) = split (/\|/, $activity{$ap});
-
- push (@ret, "$ACTIVITY_FILE:$line: active package $ap does not exist"
- . " ($email|$name|$time)")
- unless exists $pkgs{$ap}; #|| exists $missing_pkg_ok{$ap};
- }
- return @ret;
-
-
- sub by_lineno {
- my (undef,undef,undef,$aline) = split (/\|/, $activity{$a});
- my (undef,undef,undef,$bline) = split (/\|/, $activity{$b});
- $aline <=> $bline;
- }
-}
-
-
-
-# Return inconsistencies between copyright.list and the
-# copyright-holder: field in gnupackages. There should not be any.
-#
-sub check_fsfcopyright_ {
- my @ret = ();
-
- my %pkgs = &read_gnupackages ();
- my @cl = &list_copyrightfsf_ (0, 1);
- my %cl;
- @address@hidden = (); # make hash from list
-
- for my $cl (keys %cl) {
- if (! exists $pkgs{$cl}) {
- # should be caught by daily checks.
- push (@ret, "$0: FSF-copyrighted $cl missing from $GNUPACKAGES_FILE");
- next;
- }
- my $p = $pkgs{$cl};
- #&warn_hash ($cl, $p);
- if ($p->{"copyright-holder"} !~ /^(fsf|see-)/) { # allow redirects too
- push (@ret, &gnupkgs_msg
- ("copyright-holder: not fsf, but in copyright.list", %$p));
- }
- delete $pkgs{$cl};
- }
-
- for my $pkgname (keys %pkgs) {
- my $p = $pkgs{$pkgname};
- if ($p->{"copyright-holder"} =~ /^fsf/) {
- push (@ret, &gnupkgs_msg
- ("copyright-holder: fsf, but not in copyright.list", %$p));
- }
- }
-
- return @ret;
-}
-
-
-
-# Return list of entries in the ftp listing that are not in the official
-# list of packages. (Implementation not finished.)
-#
-sub check_ftplisting_ {
- my @ret = ();
-
- my %pkgs = &read_gnupackages ();
- my @ftp = &read_ftplisting ();
-
- # known-unusual entries or aliases under ftp.gnu.org:gnu/.
- my @special = qw(GNUinfo GNUsBulletins Licenses MailingListArchives
- MicrosPorts aspell- commonc\+\+ dotgnu flex git glibc
- non-gnu phantom queue savannah speedx windows);
-
- for my $f (sort @ftp) {
- next if exists $pkgs{$f};
- next if grep { $f =~ /^$_/ } @special;
- # read oldpackages? next if grep { $f =~ /^$_/ } @old;
- push (@ret, $f);
- }
-
- return @ret;
-}
-
-
-
-# Return list of packages in the gnupackages file that are not in the
-# maintainers file, and vice versa.
-#
-sub check_maintainers_ {
- my @ret = ();
-
- my %maint_file = &read_maintainers ("by-package");
- my %pkg_file = &read_gnupackages ();
-
- for my $m (keys %maint_file) {
- if (exists $pkg_file{$m}) {
- delete $maint_file{$m};
- delete $pkg_file{$m};
- }
- }
-
- for my $p (sort keys %pkg_file) {
- push (@ret, "$GNUPACKAGES_FILE:$pkg_file{$p}->{lineno}: "
- . "$p not in maintainers");
- }
-
- for my $p (sort keys %maint_file) {
- next if $p =~ /\.nongnu/;
- push (@ret, "$MAINTAINERS_FILE:$maint_file{$p}[0]->{lineno}: "
- . "$p not in gnupackages");
- }
-
- return @ret;
-}
-
-
-
-# Return list of packages in the savannah GNU groups.tsv list that are
-# not in the gnupackages or oldpackages files. We check against these
-# rather than maintainers because some are legitimately only on sv and
-# we want to have info for them.
-#
-# On the other hand, there's no expectation that everything in
-# gnupackages (or maintainers) is on savannah, so don't check that
-# direction.
-#
-# The sv file is at http://savannah.gnu.org/cooperation/groups.tsv
-# and is updated via cron on savannah.
-#
-# (Implementation not finished.)
-sub check_savannah_ {
- my @ret = ();
-
- my %pkg_file = &read_gnupackages ();
- my %sv_file = &read_savannah ();
-
- for my $m (keys %sv_file) {
- if (exists $pkg_file{$m}) {
- delete $sv_file{$m};
- }
- }
-
- for my $p (sort keys %sv_file) {
- push (@ret, "$SAVANNAH_FILE:$sv_file{$p}->{lineno}: "
- . "$p ($sv_file{$p}->{name}) not in gnupackages");
- }
-
- return @ret;
-}
-
-
-
-# Return doc links for all packages. The result is included in
-# www.gnu.org/manual/manual.html via SSI.
-#
-sub generate_logos_html {
- my $autostamp = &generated_by_us ();
- my @ret = ("<!-- File $autostamp -->");
- push (@ret, "<table>");
-
- my %pkgs = &read_gnupackages ();
- for my $pkgname (sort keys %pkgs) {
- next if &skip_pkg_p ($pkgname);
- my $logo = $pkgs{$pkgname}->{"logo"};
- next unless $logo;
-
- push (@ret, qq!<tr><td><a href="/software/$pkgname/">$pkgname</a></td>!);
- push (@ret, qq! <td><img alt="$pkgname" src="$logo" /></td></tr>\n!);
- }
-
- push (@ret, "</table>");
- push (@ret, "<!-- End file $autostamp -->");
-
- return @ret;
-}
-
-
-
-# Return all packages with all their maintainers, one package per
-# line, like the original format of the maintainers file. We run this
-# from cron.
-#
-sub generate_maintainers_bypackage {
- my @ret = ();
-
- my %pkgs = &read_maintainers ("by-package");
-
- for my $p (sort { lc($a) cmp lc($b) } keys %pkgs) {
- my ($entries,$generic_entry) = &maintainer_email_addrs ($pkgs{$p});
-
- # might not be anything in @entries if the only maintainer was generic.
- push (@ret, "$p - $entries") if $entries;
-
- # if we had a generic maintainer for this package, add that as a
- # separate entry, since that's the way rms wants it.
- push (@ret, "$p (generic) - $generic_entry") if $generic_entry;
- }
-
- return @ret;
-}
-
-
-
-# Return doc links for all packages. The result is included in
-# www.gnu.org/manual/manual.html via SSI.
-#
-sub generate_manual_html {
- my $autostamp = &generated_by_us ();
- my @ret = ("<!-- File $autostamp -->");
-
- # we want to output by category, so keep a hash with category names
- # for keys, and lists of package references for values.
- my %cat;
-
- my %pkgs = &read_gnupackages ();
-
- # Sort packages into their categories.
- for my $pkgname (sort keys %pkgs) {
- next if &skip_pkg_p ($pkgname);
- my %p = %{$pkgs{$pkgname}};
-
- my $short_cat = $p{"doc-category"};
- if (! $short_cat) {
- warn (&gnupkgs_msg ("lacks doc-category\n", %p));
- next;
- }
-
- # Append to the list of packages for this category.
- my @x = exists $cat{$short_cat} ? @{$cat{$short_cat}} : ();
- push (@x, \%p);
- $cat{$short_cat} = address@hidden;
- }
-
- # show list of all categories at top
- push (@ret, &output_category_list (%cat));
-
- push (@ret, "<table>");
- push (@ret, qq!<col width="24%" />!); # <td width=...> disallowed in XHTML.
- push (@ret, qq!<col width="74%" />!);
-
- # Sort first by full category name, since the abbreviations sometimes
- # start with a completely different string (Libraries -> Software
- # libraries). Then, for each category, sort list of packages and output.
- #
- for my $short_cat (sort by_full_cat keys %cat) {
- my ($full_cat,$cat_url) = @{ &read_doc_categories ($short_cat) };
-
- push (@ret, &output_cat ($short_cat, $full_cat, $cat_url));
-
- # For each package ...
- for my $pkg_ref (sort by_pkgname @{$cat{$short_cat}}) {
- my %p = %$pkg_ref;
- my ($doc_url,$doc_summary) = ($p{"doc-url"}, $p{"doc-summary"});
- if (! $doc_url || ! $doc_summary) {
- warn (&gnupkgs_msg ("lacks doc-(url|summary)\n", %p));
- next;
- }
-
- # convert the doc-url value to a hash of manuals and urls.
- my $pkgname = $p{"package"};
- my $home_url = "/software/$pkgname/";
-
- my %doc_urls = &find_doc_urls ($pkgname, $p{"doc-url"});
-
- # if there was no explicit url for the package, use the home page.
- if (! exists ($doc_urls{$pkgname})) {
- $doc_urls{$pkgname} = $home_url;
- }
-
- # have to replace & with & for XHTML.
- for my $manual (keys %doc_urls) {
- (my $doc_url_xhtml = $doc_urls{$manual}) =~ s,\&,\&,g;
- $doc_urls{$manual} = $doc_url_xhtml;
- }
-
- # start building output string for this package.
- # first column is the package name and additional manuals.
- # Add an id for each package for easy linking;
- # but XHTML, as usual, introduces gratuitious problems.
- (my $xhtml_id = $pkgname) =~ s/[^-_0-9a-z]//g; # identifier chars
- $xhtml_id = "pkg_$xhtml_id" if $xhtml_id !~ /^[a-z]/; # start w/letter
- my $str = qq!\n<tr id="$xhtml_id"><td>* !;
-
- # the main package identifier and its doc url. If we have a
- # mundane name, use it. Otherwise, prettify the pkg identifier.
- my $main_label = $p{"mundane-name"};
- if (! $main_label) {
- ($main_label = $pkgname) =~ s/^gnu/GNU/; # gnufoo -> GNUfoo
- $main_label = ucfirst ($main_label); # bar -> Bar
- }
- $str .= qq!<a href="$doc_urls{$pkgname}">$main_label</a>!;
-
- # followed by other manual identifiers if present.
- my @more_manuals = ();
- for my $manual (keys %doc_urls) {
- next if $manual eq $pkgname; # already took care of that
- push (@more_manuals,
- sprintf (qq!<a href="%s">$manual</a>!, $doc_urls{$manual}));
- }
- if (@more_manuals) {
- $str .= "\n<small>(";
- $str .= join ("\n ", sort @more_manuals);
- $str .= ")</small>\n";
- }
- $str .= "</td>\n";
-
- # Second column is the package description, any shop url's, and
- # the package home page.
- my $summary = "$doc_summary."; # yep, add period
- my $shop = &find_shop_urls (%p);
- my $home = qq!\n [<a href="$home_url">$pkgname home</a>]!;
-
- $str .= qq! <td>$summary$shop$home!;
- $str .= "</td></tr>";
-
- push (@ret, $str);
- }
- }
- push (@ret, "</table>");
-
- # show list of categories again at the end:
- push (@ret, &output_category_list (%cat));
-
- push (@ret, "<!-- End file $autostamp -->");
-
- return @ret;
-
-
- # HTML output for the beginning of each doc category --
- # the table row with the text, a header, a link. The padding-top
- # leaves a bit of extra space above the header, and padding-left moves
- # the header right to straddle the columns.
- #
- sub output_cat {
- my ($short_cat,$full_cat,$cat_url) = @_;
- my $css = qq!style="padding-top:.8em; padding-left:16%;"!;
- my $ret = "\n\n<tr>\n";
- $ret .= qq!<td id="$short_cat" colspan="2" $css>!;
- $ret .= qq!<a href="$cat_url">! if $cat_url;
- $ret .= "<big><b>$full_cat</b></big>";
- $ret .= "</a>" if $cat_url;
- $ret .= "</td></tr>";
- return $ret;
- }
-
- # given two package references, compare their names (for sorting).
- sub by_pkgname { $a->{"package"} cmp $b->{"package"}; }
-
- # given two short categories, compare their full names (for sorting).
- sub by_full_cat { &full_category ($a) cmp &full_category ($b); }
-
- # return just the full category name for SHORT_CAT.
- sub full_category {
- my ($short_cat) = @_;
- my ($full,undef) = @{ &read_doc_categories ($short_cat) };
- return $full;
- }
-
- # return string with all categories as links, as a sort of toc.
- sub output_category_list {
- my (%cat) = @_;
- my $ret = "<p>\n";
-
- for my $short_cat (sort by_full_cat keys %cat) {
- my ($full_cat,$cat_url) = &full_category ($short_cat);
- $full_cat =~ s/ /\ /g; # no spaces in the category name here
- $ret .= qq!<a href="#$short_cat">$full_cat</a> -\n!;
- }
-
- $ret .= "</p>\n";
- return $ret;
- }
-
- # interpret the doc-url value, return hash where keys are manual
- # identifiers and values are their urls.
- #
- sub find_doc_urls {
- my ($pkgname, $doc_url_val) = @_;
- my %ret;
-
- my @vals = split (/\|/, $doc_url_val); # result of parsing is | separators
- for my $val (@vals) {
- if ($val eq "none") {
- ; # nothing to return, let caller handle it.
-
- } elsif ($val eq "htmlxref") {
- my %htmlxref = &read_htmlxref ($pkgname);
- for my $manual (keys %htmlxref) {
- # do not overwrite a url from gnupackages for the main package
- # name with one from the htmlxref. Instead, add junk to make
- # the htmlxref manual have a different key. We don't want to
- # lose it, since if we have a general entry for "Texinfo"
- # (pointing to all its manuals), say, it's still useful to
- # have the direct link to the "texinfo" manual specifically.
- # Since we uppercase the main label, they're visually
- # distinct, too.
- #
- if ($manual eq $pkgname && exists $ret{$pkgname}) {
- $ret{"$manual<!-- again -->"} = $htmlxref{$manual}
- } else {
- # otherwise, take what we are given.
- $ret{$manual} = $htmlxref{$manual};
- }
- }
-
- } else {
- $ret{$pkgname} = $val; # always prefer url given in gnupackages.
- }
- }
- return %ret;
- }
-
- # Handle FSF shop references. We assume they come in pairs:
- # description in one entry and url in the next. We return the HTML to
- # insert in the output, or the empty string.
- #
- sub find_shop_urls {
- my (%pkg) = @_;
- my $ret;
- my @shop = split (/\|/, $pkg{"doc-shop"});
- if (@shop) {
- $ret = "\n <br/>Available in print:";
- # keep same ordering as input.
- my @books = ();
- for (my $i = 0; $i < @shop; $i += 2) {
- my $title = $shop[$i];
- my $url = $shop[$i+1];
- if ($url !~ /http:/) {
- warn (&gnupkgs_msg ("doc-shop url lacks http (misordered?)\n",%pkg));
- }
- push (@books, qq!\n <cite><a href="$url">$title</a></cite>!);
- }
- $ret .= join (",", @books);
- $ret .= ".";
- } else {
- $ret = "";
- }
- return $ret;
- }
-}
-
-
-
-# Return all packages as relative HTML links to directories by the
-# package name. We carefully maintain http://www.gnu.org/software/
-# so this works. Use the simple pkgname/ directory, since nothing else
-# (neither /index.html nor /pkgname.html) always works.
-#
-sub generate_packages_html {
- my $autostamp = &generated_by_us ();
- my @ret = ("<!-- File $autostamp -->");
-
- my %pkgs = &read_gnupackages ();
- for my $pkgname (sort keys %pkgs) {
- next if &skip_pkg_p ($pkgname);
- push (@ret, qq!<a href="$pkgname/">$pkgname</a> !);
- }
-
- push (@ret, "<!-- End file $autostamp -->");
- return @ret;
-}
-
-
-
-# Return a list of strings: the (active) package names which the FSF is
-# the copyright holder. Or, if the NOTFSF argument is set, for which it
-# is not the copyright holder.
-#
-sub list_copyrightfsf_ {
- my ($notfsf,$nowarn) = @_;
- my @ret = ();
-
- my %fsf_pkgs = &read_copyright_list ("by-line");
- my %old_pkgs = &read_oldpackages ();
- my %maint_pkgs = &read_maintainers ("by-package");
-
- for my $fsf_pkg (sort keys %fsf_pkgs) { # packages in copyright.list
- if (! exists $maint_pkgs{$fsf_pkg}) { # if not in maintainers ...
- # warn about stray names unless known special case, or decommissioned.
- if (! &skip_fsf ($fsf_pkg) && ! exists $old_pkgs{$fsf_pkg}) {
- $fsf_line = $fsf_pkgs{$fsf_pkg};
- warn "$COPYRIGHT_LIST_FILE:$fsf_line: $fsf_pkg not in maintainers\n"
- unless $nowarn;
- }
- next;
- }
-
- if ($notfsf) {
- delete $maint_pkgs{$fsf_pkg};
- } else {
- push (@ret, $fsf_pkg) if ! &skip_pkg_p ($mp);
- }
- }
-
-
- if ($notfsf) {
- # if not fsf, then we want everything left in (not deleted from) maint.
- # The same few problem and non-packages to delete in this case.
- for my $mp (keys %maint_pkgs) {
- delete $maint_pkgs{$mp} if &skip_pkg_p ($mp);
- }
- push (@ret, sort keys %maint_pkgs);
- }
-
- return @ret;
-
-
- # Return 1 if we shouldn't worry about the name of this FSF assignment
- # not being a canonical package name.
- #
- sub skip_fsf {
- my ($fsf) = @_;
-
- my @skip_fsf = qw(
- alloca art artwork asa
- at crontab atrm crond makeatfile
- autolib
- backupfile getversion
- banner blksize bsearch c2tex catalog cdlndir
- cgraph dfs
- checkaliases checker chkmalloc command configure crypto ctutorial cvs
- cvs-utils
- dcl debian dvc
- ebnf2ps ecc ecos edebug egcs elisp_manual elms emacstalk enc-dec
- ep gnust
- etags expect
- fcrypt fiasco file flex flymake flyspell fpr freeodbc fsflogo
- g77 g95 gamma garlic gc gcc-testsuite gconnectfour gellmu gfortran
- gfsd gm2 gnatdist gnoetry gnu_ocr gnulist gnussl go32 gomp grx gsmb
- gso guile-python guppi gyve
- initialize interp io isaac ispell
- je
- kaffe
- leif lesstif lib libiberty libstdc libwv linkcontroller lynx
- m2f mh-e mingw32 minpath misc mkinstalldirs mmalloc mpuz msort mtime
- mtrace mule mutt myme
- newlib newsticker nvi
- opcodes ox
- p2c pc pipobbs pips planner polyorb pptp profile psi publish
- qsort quagga
- rcl readelf regex review riacs
- scandir srchdir
- send sim spim spline stm suit
- tcl tix tk expect
- texi2roff thethe tkwww trix tsp_solve tzset
- udi ul uncvt unexec
- viper web webpages win32api xemacs zlib
- );
-
- my %skip_fsf;
- @address@hidden = (); # hash slice to make hash from list
-
- return exists $skip_fsf{$fsf};
- }
-}
-
-
-# Return the packages for which the FSF is not the copyright holder.
-#
-sub list_copyrightfsfnot_ {
- return list_copyrightfsf_ (1);
-}
-
-
-
-# Return copyright.list entries that don't have matching paperwork,
-# and vice versa.
-#
-sub list_copyrightpapers_ {
- my @ret = ();
- my %cl_pkgs = &read_copyright_list ("by-year");
- my %cp_pkgs = &read_copyright_papers ();
-
- $DEBUG = 1;
-
- for my $year (sort keys %cp_pkgs) {
- my $cp_year = $cp_pkgs{$year};
- my $cl_year = $cl_pkgs{$year};
- &debug_hash ("cp_year $year", $cp_year);
- &debug_hash ("cl_year $year", $cl_year);
- last;
- }
-
- return @ret;
-}
-
-
-
-# Return list of maintainers for whom we have no phone or address.
-#
-sub list_maintainers_nophysical {
- my @maints = ();
- my %maints = &read_maintainers ("by-maintainer");
-
- for my $m (sort keys %maints) {
- my $m_ref = $maints{$m};
- my %m = %$m_ref;
- next if $m{"is_generic"}; # no contact info needed
- next if $m{"address"} || $m{"phone"}; # have contact info
- (my $packages = $m{"package"}) =~ tr/|/ /;
- push (@maints, "$m{best_email} ($m{name} - $packages)");
- }
-
- return @maints;
-}
-
-
-
-# Return all packages sorted by activity status, one package per line.
-#
-sub list_packages_activity {
- my @ret = ();
-
- # sort activity statuses in this order. If other strings are used,
- # they'll show up first so they can be easily fixed.
- my %activity_order = ("stale" => 1,
- "moribund" => 2,
- "ok" => 3,
- "stable" => 4,
- "container" => 5,
- );
-
- my %pkgs = &read_gnupackages ();
- for my $pkgname (sort by_activity keys %pkgs) {
- my %p = %{$pkgs{$pkgname}};
- my $activity = $p{"activity-status"};
- push (@ret, &gnupkgs_msg ($activity, %p));
- }
-
- return @ret;
-
- sub by_activity {
- (my $a_status = $pkgs{$a}->{"activity-status"}) =~ s/ .*//;
- (my $b_status = $pkgs{$b}->{"activity-status"}) =~ s/ .*//;
- $activity_order{$a_status} <=> $activity_order{$b_status}
- || $pkgs{$a}->{"activity-status"} cmp $pkgs{$b}->{"activity-status"}
- || $a cmp $b;
- }
-}
-
-
-
-# Return all packages whose GPLv3 status is not final.
-#
-sub list_packages_gplv3 {
- my @ret = ();
-
- my %pkgs = &read_gnupackages ();
- for my $pkgname (sort by_gplv3 keys %pkgs) {
- my %p = %{$pkgs{$pkgname}};
- my $gplv3 = $p{"gplv3-status"};
- my $contact = $p{"last-contact"};
- next if $gplv3 =~ /^(done|doc|not-applicable|notgpl|ok|see)/;
- push (@ret, &gnupkgs_msg ($gplv3 . ($contact ? " [$contact]" : ""), %p));
- }
-
- return @ret;
-
- sub by_gplv3 {
- (my $a_status = $pkgs{$a}->{"gplv3-status"});# =~ s/ .*//;
- (my $b_status = $pkgs{$b}->{"gplv3-status"});# =~ s/ .*//;
- $pkgs{$a}->{"gplv3-status"} cmp $pkgs{$b}->{"gplv3-status"}
- || $a cmp $b;
- }
-}
-
-
-
-# Return list of packages for whom no maintainer has answered.
-#
-sub list_packages_unanswered {
- my @recentrel = &read_recentrel ();
- my %activity = &read_activity ("by-package");
- my %pkgs = &read_maintainers ("by-package");
- my @ret = ();
-
- for my $p (sort { lc($a) cmp lc($b) } keys %pkgs) {
- #&debug_hash ($p, $pkgs{$p});
-
- if (grep { $p eq $_ } @recentrel) {
- &debug ("$p recently released, skipping");
-
- } elsif (exists $activity{$p}) {
- # todo: check back to some cutoff
- &debug ("$p got activity reply, skipping");
-
- } else {
- &debug ("$p no activity, returning");
- my @entries = ();
- for my $m (@{$pkgs{$p}}) {
- next if $m->{"is_generic"};
- my $entry = $m->{"name"};
- $entry .= " " if $entry;
- $entry .= "<$m->{best_email}>" if exists $m->{"best_email"};
- push (@entries, $entry);
- }
-
- # might not be anything in @entries.
- push (@ret, "$p - " . join (", ", @entries)) if @entries;
- }
- }
-
- return @ret;
-}
+# All the code is in the gm-*.pl files.
Index: gm-read.pl
===================================================================
RCS file: /sources/womb/gnumaint/gm-read.pl,v
retrieving revision 1.5
retrieving revision 1.6
diff -u -b -r1.5 -r1.6
--- gm-read.pl 24 Dec 2012 14:25:20 -0000 1.5
+++ gm-read.pl 8 Feb 2013 19:28:00 -0000 1.6
@@ -1,10 +1,8 @@
-# $Id: gm-read.pl,v 1.5 2012/12/24 14:25:20 karl Exp $
+# $Id: gm-read.pl,v 1.6 2013/02/08 19:28:00 karl Exp $
# Subroutines for gm script that read various external data file.
-# (In this particular case, using require seemed better than setting up
-# modules. Certainly simpler.)
#
-# Copyright 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation
-# Inc.
+# Copyright 2007, 2008, 2009, 2010, 2011, 2012, 2013
+# Free Software Foundation Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -386,6 +384,29 @@
+# Read the ftp-upload-email file, generated by the sysadmins.
+# Return per-package hash of info, with keys being the package names and
+# values the list of email addresses.
+#
+sub read_ftpupload_email {
+ my %ret;
+
+ open (FTPUPLOAD_EMAIL_FILE) || die "open($FTPUPLOAD_EMAIL_FILE) failed: $!";
+ while (<FTPUPLOAD_EMAIL_FILE>) {
+ chomp;
+ my ($pkg,$emails) = split (" ");
+
+ $pkg = "libc" if $pkg eq "glibc"; # name on ftp.gnu.org differs
+
+ $ret{$pkg} = $emails;
+ }
+ close (FTPUPLOAD_EMAIL_FILE) || warn "close($FTPUPLOAD_EMAIL_FILE) failed:
$!";
+
+ return %ret;
+}
+
+
+
# Read the gnupackages.txt file, return a hash of information, where
# the keys are package names and the values are hash references with the
# information. If a key is given more than once (e.g., note), the
Index: gm-util.pl
===================================================================
RCS file: /sources/womb/gnumaint/gm-util.pl,v
retrieving revision 1.4
retrieving revision 1.5
diff -u -b -r1.4 -r1.5
--- gm-util.pl 24 Dec 2012 14:25:32 -0000 1.4
+++ gm-util.pl 8 Feb 2013 19:28:00 -0000 1.5
@@ -1,9 +1,8 @@
-# $Id: gm-util.pl,v 1.4 2012/12/24 14:25:32 karl Exp $
+# $Id: gm-util.pl,v 1.5 2013/02/08 19:28:00 karl Exp $
# Utilities for the gm script.
-# (In this particular case, using require seemed better than setting up
-# modules. Certainly simpler.)
#
-# Copyright 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation Inc.
+# Copyright 2007, 2008, 2009, 2010, 2011, 2012, 2013
+# Free Software Foundation Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
Index: gnupackages.txt
===================================================================
RCS file: /sources/womb/gnumaint/gnupackages.txt,v
retrieving revision 1.125
retrieving revision 1.126
diff -u -b -r1.125 -r1.126
--- gnupackages.txt 7 Feb 2013 18:39:03 -0000 1.125
+++ gnupackages.txt 8 Feb 2013 19:28:00 -0000 1.126
@@ -1,4 +1,4 @@
-# $Id: gnupackages.txt,v 1.125 2013/02/07 18:39:03 karl Exp $
+# $Id: gnupackages.txt,v 1.126 2013/02/08 19:28:00 karl Exp $
# Public domain.
#
# This file is maintained in the CVS repository of GNU womb,
@@ -83,8 +83,8 @@
doc-summary: Asynchronous DNS client library and utilities
doc-url: none
gplv3-status: stays-gplv2+-since-library? (28aug07,gnumaint-reply 21 Aug 2007
11:55:17 +0100))
-activity-status: stable 20060606 (1.3)
-last-contact: 19jan12 asked, 10mar11 replied-in-a-month
+activity-status: stale 20060606 (1.3)
+last-contact: 7feb13 asked, 19jan12 asked, 10mar11 replied-in-a-month
package: aetherspace
doc-category: Games
@@ -241,6 +241,7 @@
doc-category: Libraries
doc-summary: Binary File Descriptor library
doc-url: htmlxref
+gplv3-status: unknown
activity-status: container is-binutils
package: binutils
@@ -490,7 +491,7 @@
doc-summary: Statistics and graphics package
doc-url: none
gplv3-status: not-done-maintainer-wants-volunteer (4 Sep 2007 22:37:48, 28 Jan
2008 20:23:40)
-activity-status: newmaint/20101105 20080220 (3.7)
+activity-status: nomaint/20120219 20080220 (3.7)
last-contact: 19feb12,22jan12 jmd asked
package: dc
@@ -1580,7 +1581,7 @@
doc-url: htmlxref
logo: /graphics/groff-head.png
gplv3-status: done-in-1.20.1
-activity-status: ok 20121230 (1.22.1)
+activity-status: ok 20130207 (1.22.1)
package: grub
mundane-name: GRUB
@@ -1916,7 +1917,7 @@
doc-summary: An extended whois client in Java
doc-url: /software/jwhois/manual/
gplv3-status: done-in-4.0
-activity-status: nomaint 20070701 (4.0)
+activity-status: nomaint/20121212 20070701 (4.0)
last-contact: 12dec12 newmaint needed, 20090620 replied, maybe soon
package: kawa
Index: gm-check.pl
===================================================================
RCS file: gm-check.pl
diff -N gm-check.pl
--- /dev/null 1 Jan 1970 00:00:00 -0000
+++ gm-check.pl 8 Feb 2013 19:27:59 -0000 1.1
@@ -0,0 +1,220 @@
+# $Id: gm-check.pl,v 1.1 2013/02/08 19:27:59 karl Exp $
+# The check actions for the gm script (see --help message).
+#
+# Copyright 2007, 2008, 2009, 2010, 2012, 2013
+# Free Software Foundation Inc.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or (at
+# your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# Originally written by Karl Berry.
+
+
+
+# Return list of packages in the activity report that are not in the
+# maintainers file. (Implementation not finished.)
+#
+sub check_activityfile_ {
+ my @ret = ();
+
+ my %pkgs = &read_maintainers ("by-package");
+ my %maints = &read_maintainers ("by-maintainer");
+
+ my %activity = &read_activity ("by-package");
+
+ for my $ap (sort by_lineno keys %activity) {
+ next if $ap eq "*"; # our last_sendemail placeholder
+ my ($email,$name,$time,$line) = split (/\|/, $activity{$ap});
+
+ push (@ret, "$ACTIVITY_FILE:$line: active package $ap does not exist"
+ . " ($email|$name|$time)")
+ unless exists $pkgs{$ap}; #|| exists $missing_pkg_ok{$ap};
+ }
+ return @ret;
+
+
+ sub by_lineno {
+ my (undef,undef,undef,$aline) = split (/\|/, $activity{$a});
+ my (undef,undef,undef,$bline) = split (/\|/, $activity{$b});
+ $aline <=> $bline;
+ }
+}
+
+
+
+# Return inconsistencies between copyright.list and the
+# copyright-holder: field in gnupackages. There should not be any.
+#
+sub check_fsfcopyright_ {
+ my @ret = ();
+
+ my %pkgs = &read_gnupackages ();
+ my @cl = &list_copyrightfsf_ (0, 1);
+ my %cl;
+ @address@hidden = (); # make hash from list
+
+ for my $cl (keys %cl) {
+ if (! exists $pkgs{$cl}) {
+ # should be caught by daily checks.
+ push (@ret, "$0: FSF-copyrighted $cl missing from $GNUPACKAGES_FILE");
+ next;
+ }
+ my $p = $pkgs{$cl};
+ #&warn_hash ($cl, $p);
+ if ($p->{"copyright-holder"} !~ /^(fsf|see-)/) { # allow redirects too
+ push (@ret, &gnupkgs_msg
+ ("copyright-holder: not fsf, but in copyright.list", %$p));
+ }
+ delete $pkgs{$cl};
+ }
+
+ for my $pkgname (keys %pkgs) {
+ my $p = $pkgs{$pkgname};
+ if ($p->{"copyright-holder"} =~ /^fsf/) {
+ push (@ret, &gnupkgs_msg
+ ("copyright-holder: fsf, but not in copyright.list", %$p));
+ }
+ }
+
+ return @ret;
+}
+
+
+
+# Return list of entries in the ftp listing that are not in the official
+# list of packages. (Implementation not finished.)
+#
+sub check_ftplisting_ {
+ my @ret = ();
+
+ my %pkgs = &read_gnupackages ();
+ my @ftp = &read_ftplisting ();
+
+ # known-unusual entries or aliases under ftp.gnu.org:gnu/.
+ my @special = qw(GNUinfo GNUsBulletins Licenses MailingListArchives
+ MicrosPorts aspell- commonc\+\+ dotgnu flex git glibc
+ non-gnu phantom queue savannah speedx windows);
+
+ for my $f (sort @ftp) {
+ next if exists $pkgs{$f};
+ next if grep { $f =~ /^$_/ } @special;
+ # read oldpackages? next if grep { $f =~ /^$_/ } @old;
+ push (@ret, $f);
+ }
+
+ return @ret;
+}
+
+
+
+# Return list of entries in the ftp listing that are not in the official
+# list of packages. (Implementation not finished.)
+#
+sub check_ftpupload_ {
+ my @ret = ();
+
+ my %maint_file = &read_maintainers ("by-package");
+ my %ftpup_file = &read_ftpupload_email ();
+
+ for my $m (keys %maint_file) {
+ if (exists $ftpup_file{$m}) {
+ # xxtodo: the comparison
+ delete $maint_file{$m};
+ delete $ftpup_file{$m};
+ }
+ }
+
+ for my $p (sort keys %ftpup_file) {
+ push (@ret, "$FTPUPLOAD_EMAIL_FILE: not in maintainers: $p");
+ }
+
+ # Many packages do not release through ftp.gnu.org, unfortunately,
+ # so there is no use in worrying about this list.
+ #for my $p (sort keys %maint_file) {
+ # next if &skip_pkg_p ($p);
+ # push (@ret, "$MAINTAINERS_FILE:$maint_file{$p}[0]->{lineno}: "
+ # . "$p not in ftp-upload email");
+ #}
+
+ return @ret;
+}
+
+
+# Return list of packages in the gnupackages file that are not in the
+# maintainers file, and vice versa. Run hourly from karl cron on fp.
+#
+sub check_maintainers_ {
+ my @ret = ();
+
+ my %maint_file = &read_maintainers ("by-package");
+ my %pkg_file = &read_gnupackages ();
+
+ for my $m (keys %maint_file) {
+ if (exists $pkg_file{$m}) {
+ delete $maint_file{$m};
+ delete $pkg_file{$m};
+ }
+ }
+
+ for my $p (sort keys %pkg_file) {
+ push (@ret, "$GNUPACKAGES_FILE:$pkg_file{$p}->{lineno}: "
+ . "$p not in maintainers");
+ }
+
+ for my $p (sort keys %maint_file) {
+ next if $p =~ /\.nongnu/;
+ push (@ret, "$MAINTAINERS_FILE:$maint_file{$p}[0]->{lineno}: "
+ . "$p not in gnupackages");
+ }
+
+ return @ret;
+}
+
+
+
+# Return list of packages in the savannah GNU groups.tsv list that are
+# not in the gnupackages or oldpackages files. We check against these
+# rather than maintainers because some are legitimately only on sv and
+# we want to have info for them.
+#
+# On the other hand, there's no expectation that everything in
+# gnupackages (or maintainers) is on savannah, so don't check that
+# direction.
+#
+# The sv file is at http://savannah.gnu.org/cooperation/groups.tsv
+# and is updated via cron on savannah.
+#
+# (Implementation not finished.)
+#
+sub check_savannah_ {
+ my @ret = ();
+
+ my %pkg_file = &read_gnupackages ();
+ my %sv_file = &read_savannah ();
+
+ for my $m (keys %sv_file) {
+ if (exists $pkg_file{$m}) {
+ delete $sv_file{$m};
+ }
+ }
+
+ for my $p (sort keys %sv_file) {
+ push (@ret, "$SAVANNAH_FILE:$sv_file{$p}->{lineno}: "
+ . "$p ($sv_file{$p}->{name}) not in gnupackages");
+ }
+
+ return @ret;
+}
+
+
+1;
Index: gm-generate.pl
===================================================================
RCS file: gm-generate.pl
diff -N gm-generate.pl
--- /dev/null 1 Jan 1970 00:00:00 -0000
+++ gm-generate.pl 8 Feb 2013 19:27:59 -0000 1.1
@@ -0,0 +1,335 @@
+# $Id: gm-generate.pl,v 1.1 2013/02/08 19:27:59 karl Exp $
+# The generate actions for the gm script (see --help message).
+#
+# Copyright 2007, 2008, 2009, 2010, 2012, 2013 Free Software Foundation
+# Inc.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or (at
+# your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# Originally written by Karl Berry.
+
+
+
+# Return doc links for all packages. The result is included in
+# www.gnu.org/manual/manual.html via SSI.
+#
+sub generate_logos_html {
+ my $autostamp = &generated_by_us ();
+ my @ret = ("<!-- File $autostamp -->");
+ push (@ret, "<table>");
+
+ my %pkgs = &read_gnupackages ();
+ for my $pkgname (sort keys %pkgs) {
+ next if &skip_pkg_p ($pkgname);
+ my $logo = $pkgs{$pkgname}->{"logo"};
+ next unless $logo;
+
+ push (@ret, qq!<tr><td><a href="/software/$pkgname/">$pkgname</a></td>!);
+ push (@ret, qq! <td><img alt="$pkgname" src="$logo" /></td></tr>\n!);
+ }
+
+ push (@ret, "</table>");
+ push (@ret, "<!-- End file $autostamp -->");
+
+ return @ret;
+}
+
+
+
+# Return all packages with all their maintainers, one package per
+# line, like the original format of the maintainers file. We run this
+# from cron.
+#
+sub generate_maintainers_bypackage {
+ my @ret = ();
+
+ my %pkgs = &read_maintainers ("by-package");
+
+ for my $p (sort { lc($a) cmp lc($b) } keys %pkgs) {
+ my ($entries,$generic_entry) = &maintainer_email_addrs ($pkgs{$p});
+
+ # might not be anything in @entries if the only maintainer was generic.
+ push (@ret, "$p - $entries") if $entries;
+
+ # if we had a generic maintainer for this package, add that as a
+ # separate entry, since that's the way rms wants it.
+ push (@ret, "$p (generic) - $generic_entry") if $generic_entry;
+ }
+
+ return @ret;
+}
+
+
+
+# Return doc links for all packages. The result is included in
+# www.gnu.org/manual/manual.html via SSI.
+#
+sub generate_manual_html {
+ my $autostamp = &generated_by_us ();
+ my @ret = ("<!-- File $autostamp -->");
+
+ # we want to output by category, so keep a hash with category names
+ # for keys, and lists of package references for values.
+ my %cat;
+
+ my %pkgs = &read_gnupackages ();
+
+ # Sort packages into their categories.
+ for my $pkgname (sort keys %pkgs) {
+ next if &skip_pkg_p ($pkgname);
+ my %p = %{$pkgs{$pkgname}};
+
+ my $short_cat = $p{"doc-category"};
+ if (! $short_cat) {
+ warn (&gnupkgs_msg ("lacks doc-category\n", %p));
+ next;
+ }
+
+ # Append to the list of packages for this category.
+ my @x = exists $cat{$short_cat} ? @{$cat{$short_cat}} : ();
+ push (@x, \%p);
+ $cat{$short_cat} = address@hidden;
+ }
+
+ # show list of all categories at top
+ push (@ret, &output_category_list (%cat));
+
+ push (@ret, "<table>");
+ push (@ret, qq!<col width="24%" />!); # <td width=...> disallowed in XHTML.
+ push (@ret, qq!<col width="74%" />!);
+
+ # Sort first by full category name, since the abbreviations sometimes
+ # start with a completely different string (Libraries -> Software
+ # libraries). Then, for each category, sort list of packages and output.
+ #
+ for my $short_cat (sort by_full_cat keys %cat) {
+ my ($full_cat,$cat_url) = @{ &read_doc_categories ($short_cat) };
+
+ push (@ret, &output_cat ($short_cat, $full_cat, $cat_url));
+
+ # For each package ...
+ for my $pkg_ref (sort by_pkgname @{$cat{$short_cat}}) {
+ my %p = %$pkg_ref;
+ my ($doc_url,$doc_summary) = ($p{"doc-url"}, $p{"doc-summary"});
+ if (! $doc_url || ! $doc_summary) {
+ warn (&gnupkgs_msg ("lacks doc-(url|summary)\n", %p));
+ next;
+ }
+
+ # convert the doc-url value to a hash of manuals and urls.
+ my $pkgname = $p{"package"};
+ my $home_url = "/software/$pkgname/";
+
+ my %doc_urls = &find_doc_urls ($pkgname, $p{"doc-url"});
+
+ # if there was no explicit url for the package, use the home page.
+ if (! exists ($doc_urls{$pkgname})) {
+ $doc_urls{$pkgname} = $home_url;
+ }
+
+ # have to replace & with & for XHTML.
+ for my $manual (keys %doc_urls) {
+ (my $doc_url_xhtml = $doc_urls{$manual}) =~ s,\&,\&,g;
+ $doc_urls{$manual} = $doc_url_xhtml;
+ }
+
+ # start building output string for this package.
+ # first column is the package name and additional manuals.
+ # Add an id for each package for easy linking;
+ # but XHTML, as usual, introduces gratuitious problems.
+ (my $xhtml_id = $pkgname) =~ s/[^-_0-9a-z]//g; # identifier chars
+ $xhtml_id = "pkg_$xhtml_id" if $xhtml_id !~ /^[a-z]/; # start w/letter
+ my $str = qq!\n<tr id="$xhtml_id"><td>* !;
+
+ # the main package identifier and its doc url. If we have a
+ # mundane name, use it. Otherwise, prettify the pkg identifier.
+ my $main_label = $p{"mundane-name"};
+ if (! $main_label) {
+ ($main_label = $pkgname) =~ s/^gnu/GNU/; # gnufoo -> GNUfoo
+ $main_label = ucfirst ($main_label); # bar -> Bar
+ }
+ $str .= qq!<a href="$doc_urls{$pkgname}">$main_label</a>!;
+
+ # followed by other manual identifiers if present.
+ my @more_manuals = ();
+ for my $manual (keys %doc_urls) {
+ next if $manual eq $pkgname; # already took care of that
+ push (@more_manuals,
+ sprintf (qq!<a href="%s">$manual</a>!, $doc_urls{$manual}));
+ }
+ if (@more_manuals) {
+ $str .= "\n<small>(";
+ $str .= join ("\n ", sort @more_manuals);
+ $str .= ")</small>\n";
+ }
+ $str .= "</td>\n";
+
+ # Second column is the package description, any shop url's, and
+ # the package home page.
+ my $summary = "$doc_summary."; # yep, add period
+ my $shop = &find_shop_urls (%p);
+ my $home = qq!\n [<a href="$home_url">$pkgname home</a>]!;
+
+ $str .= qq! <td>$summary$shop$home!;
+ $str .= "</td></tr>";
+
+ push (@ret, $str);
+ }
+ }
+ push (@ret, "</table>");
+
+ # show list of categories again at the end:
+ push (@ret, &output_category_list (%cat));
+
+ push (@ret, "<!-- End file $autostamp -->");
+
+ return @ret;
+
+
+ # HTML output for the beginning of each doc category --
+ # the table row with the text, a header, a link. The padding-top
+ # leaves a bit of extra space above the header, and padding-left moves
+ # the header right to straddle the columns.
+ #
+ sub output_cat {
+ my ($short_cat,$full_cat,$cat_url) = @_;
+ my $css = qq!style="padding-top:.8em; padding-left:16%;"!;
+ my $ret = "\n\n<tr>\n";
+ $ret .= qq!<td id="$short_cat" colspan="2" $css>!;
+ $ret .= qq!<a href="$cat_url">! if $cat_url;
+ $ret .= "<big><b>$full_cat</b></big>";
+ $ret .= "</a>" if $cat_url;
+ $ret .= "</td></tr>";
+ return $ret;
+ }
+
+ # given two package references, compare their names (for sorting).
+ sub by_pkgname { $a->{"package"} cmp $b->{"package"}; }
+
+ # given two short categories, compare their full names (for sorting).
+ sub by_full_cat { &full_category ($a) cmp &full_category ($b); }
+
+ # return just the full category name for SHORT_CAT.
+ sub full_category {
+ my ($short_cat) = @_;
+ my ($full,undef) = @{ &read_doc_categories ($short_cat) };
+ return $full;
+ }
+
+ # return string with all categories as links, as a sort of toc.
+ sub output_category_list {
+ my (%cat) = @_;
+ my $ret = "<p>\n";
+
+ for my $short_cat (sort by_full_cat keys %cat) {
+ my ($full_cat,$cat_url) = &full_category ($short_cat);
+ $full_cat =~ s/ /\ /g; # no spaces in the category name here
+ $ret .= qq!<a href="#$short_cat">$full_cat</a> -\n!;
+ }
+
+ $ret .= "</p>\n";
+ return $ret;
+ }
+
+ # interpret the doc-url value, return hash where keys are manual
+ # identifiers and values are their urls.
+ #
+ sub find_doc_urls {
+ my ($pkgname, $doc_url_val) = @_;
+ my %ret;
+
+ my @vals = split (/\|/, $doc_url_val); # result of parsing is | separators
+ for my $val (@vals) {
+ if ($val eq "none") {
+ ; # nothing to return, let caller handle it.
+
+ } elsif ($val eq "htmlxref") {
+ my %htmlxref = &read_htmlxref ($pkgname);
+ for my $manual (keys %htmlxref) {
+ # do not overwrite a url from gnupackages for the main package
+ # name with one from the htmlxref. Instead, add junk to make
+ # the htmlxref manual have a different key. We don't want to
+ # lose it, since if we have a general entry for "Texinfo"
+ # (pointing to all its manuals), say, it's still useful to
+ # have the direct link to the "texinfo" manual specifically.
+ # Since we uppercase the main label, they're visually
+ # distinct, too.
+ #
+ if ($manual eq $pkgname && exists $ret{$pkgname}) {
+ $ret{"$manual<!-- again -->"} = $htmlxref{$manual}
+ } else {
+ # otherwise, take what we are given.
+ $ret{$manual} = $htmlxref{$manual};
+ }
+ }
+
+ } else {
+ $ret{$pkgname} = $val; # always prefer url given in gnupackages.
+ }
+ }
+ return %ret;
+ }
+
+ # Handle FSF shop references. We assume they come in pairs:
+ # description in one entry and url in the next. We return the HTML to
+ # insert in the output, or the empty string.
+ #
+ sub find_shop_urls {
+ my (%pkg) = @_;
+ my $ret;
+ my @shop = split (/\|/, $pkg{"doc-shop"});
+ if (@shop) {
+ $ret = "\n <br/>Available in print:";
+ # keep same ordering as input.
+ my @books = ();
+ for (my $i = 0; $i < @shop; $i += 2) {
+ my $title = $shop[$i];
+ my $url = $shop[$i+1];
+ if ($url !~ /http:/) {
+ warn (&gnupkgs_msg ("doc-shop url lacks http (misordered?)\n",%pkg));
+ }
+ push (@books, qq!\n <cite><a href="$url">$title</a></cite>!);
+ }
+ $ret .= join (",", @books);
+ $ret .= ".";
+ } else {
+ $ret = "";
+ }
+ return $ret;
+ }
+}
+
+
+
+# Return all packages as relative HTML links to directories by the
+# package name. We carefully maintain http://www.gnu.org/software/
+# so this works. Use the simple pkgname/ directory, since nothing else
+# (neither /index.html nor /pkgname.html) always works.
+#
+sub generate_packages_html {
+ my $autostamp = &generated_by_us ();
+ my @ret = ("<!-- File $autostamp -->");
+
+ my %pkgs = &read_gnupackages ();
+ for my $pkgname (sort keys %pkgs) {
+ next if &skip_pkg_p ($pkgname);
+ push (@ret, qq!<a href="$pkgname/">$pkgname</a> !);
+ }
+
+ push (@ret, "<!-- End file $autostamp -->");
+ return @ret;
+}
+
+
+1;
Index: gm-list.pl
===================================================================
RCS file: gm-list.pl
diff -N gm-list.pl
--- /dev/null 1 Jan 1970 00:00:00 -0000
+++ gm-list.pl 8 Feb 2013 19:27:59 -0000 1.1
@@ -0,0 +1,266 @@
+# $Id: gm-list.pl,v 1.1 2013/02/08 19:27:59 karl Exp $
+# The list actions for the gm script (see --help message).
+#
+# Copyright 2007, 2008, 2009, 2010, 2012, 2013
+# Free Software Foundation Inc.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or (at
+# your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# Originally written by Karl Berry.
+
+
+
+# Return a list of strings: the (active) package names which the FSF is
+# the copyright holder. Or, if the NOTFSF argument is set, for which it
+# is not the copyright holder.
+#
+sub list_copyrightfsf_ {
+ my ($notfsf,$nowarn) = @_;
+ my @ret = ();
+
+ my %fsf_pkgs = &read_copyright_list ("by-line");
+ my %old_pkgs = &read_oldpackages ();
+ my %maint_pkgs = &read_maintainers ("by-package");
+
+ for my $fsf_pkg (sort keys %fsf_pkgs) { # packages in copyright.list
+ if (! exists $maint_pkgs{$fsf_pkg}) { # if not in maintainers ...
+ # warn about stray names unless known special case, or decommissioned.
+ if (! &skip_fsf ($fsf_pkg) && ! exists $old_pkgs{$fsf_pkg}) {
+ $fsf_line = $fsf_pkgs{$fsf_pkg};
+ warn "$COPYRIGHT_LIST_FILE:$fsf_line: $fsf_pkg not in maintainers\n"
+ unless $nowarn;
+ }
+ next;
+ }
+
+ if ($notfsf) {
+ delete $maint_pkgs{$fsf_pkg};
+ } else {
+ push (@ret, $fsf_pkg) if ! &skip_pkg_p ($mp);
+ }
+ }
+
+
+ if ($notfsf) {
+ # if not fsf, then we want everything left in (not deleted from) maint.
+ # The same few problem and non-packages to delete in this case.
+ for my $mp (keys %maint_pkgs) {
+ delete $maint_pkgs{$mp} if &skip_pkg_p ($mp);
+ }
+ push (@ret, sort keys %maint_pkgs);
+ }
+
+ return @ret;
+
+
+ # Return 1 if we shouldn't worry about the name of this FSF assignment
+ # not being a canonical package name.
+ #
+ sub skip_fsf {
+ my ($fsf) = @_;
+
+ my @skip_fsf = qw(
+ alloca art artwork asa
+ at crontab atrm crond makeatfile
+ autolib
+ backupfile getversion
+ banner blksize bsearch c2tex catalog cdlndir
+ cgraph dfs
+ checkaliases checker chkmalloc command configure crypto ctutorial cvs
+ cvs-utils
+ dcl debian dvc
+ ebnf2ps ecc ecos edebug egcs elisp_manual elms emacstalk enc-dec
+ ep gnust
+ etags expect
+ fcrypt fiasco file flex flymake flyspell fpr freeodbc fsflogo
+ g77 g95 gamma garlic gc gcc-testsuite gconnectfour gellmu gfortran
+ gfsd gm2 gnatdist gnoetry gnu_ocr gnulist gnussl go32 gomp grx gsmb
+ gso guile-python guppi gyve
+ initialize interp io isaac ispell
+ je
+ kaffe
+ leif lesstif lib libiberty libstdc libwv linkcontroller lynx
+ m2f mh-e mingw32 minpath misc mkinstalldirs mmalloc mpuz msort mtime
+ mtrace mule mutt myme
+ newlib newsticker nvi
+ opcodes ox
+ p2c pc pipobbs pips planner polyorb pptp profile psi publish
+ qsort quagga
+ rcl readelf regex review riacs
+ scandir srchdir
+ send sim spim spline stm suit
+ tcl tix tk expect
+ texi2roff thethe tkwww trix tsp_solve tzset
+ udi ul uncvt unexec
+ viper web webpages win32api xemacs zlib
+ );
+
+ my %skip_fsf;
+ @address@hidden = (); # hash slice to make hash from list
+
+ return exists $skip_fsf{$fsf};
+ }
+}
+
+
+# Return the packages for which the FSF is not the copyright holder.
+#
+sub list_copyrightfsfnot_ {
+ return list_copyrightfsf_ (1);
+}
+
+
+
+# Return copyright.list entries that don't have matching paperwork,
+# and vice versa.
+#
+sub list_copyrightpapers_ {
+ my @ret = ();
+ my %cl_pkgs = &read_copyright_list ("by-year");
+ my %cp_pkgs = &read_copyright_papers ();
+
+ $DEBUG = 1;
+
+ for my $year (sort keys %cp_pkgs) {
+ my $cp_year = $cp_pkgs{$year};
+ my $cl_year = $cl_pkgs{$year};
+ &debug_hash ("cp_year $year", $cp_year);
+ &debug_hash ("cl_year $year", $cl_year);
+ last;
+ }
+
+ return @ret;
+}
+
+
+
+# Return list of maintainers for whom we have no phone or address.
+#
+sub list_maintainers_nophysical {
+ my @maints = ();
+ my %maints = &read_maintainers ("by-maintainer");
+
+ for my $m (sort keys %maints) {
+ my $m_ref = $maints{$m};
+ my %m = %$m_ref;
+ next if $m{"is_generic"}; # no contact info needed
+ next if $m{"address"} || $m{"phone"}; # have contact info
+ (my $packages = $m{"package"}) =~ tr/|/ /;
+ push (@maints, "$m{best_email} ($m{name} - $packages)");
+ }
+
+ return @maints;
+}
+
+
+
+# Return all packages sorted by activity status, one package per line.
+#
+sub list_packages_activity {
+ my @ret = ();
+
+ # sort activity statuses in this order. If other strings are used,
+ # they'll show up first so they can be easily fixed.
+ my %activity_order = ("stale" => 1,
+ "moribund" => 2,
+ "ok" => 3,
+ "stable" => 4,
+ "container" => 5,
+ );
+
+ my %pkgs = &read_gnupackages ();
+ for my $pkgname (sort by_activity keys %pkgs) {
+ my %p = %{$pkgs{$pkgname}};
+ my $activity = $p{"activity-status"};
+ push (@ret, &gnupkgs_msg ($activity, %p));
+ }
+
+ return @ret;
+
+ sub by_activity {
+ (my $a_status = $pkgs{$a}->{"activity-status"}) =~ s/ .*//;
+ (my $b_status = $pkgs{$b}->{"activity-status"}) =~ s/ .*//;
+ $activity_order{$a_status} <=> $activity_order{$b_status}
+ || $pkgs{$a}->{"activity-status"} cmp $pkgs{$b}->{"activity-status"}
+ || $a cmp $b;
+ }
+}
+
+
+
+# Return all packages whose GPLv3 status is not final.
+#
+sub list_packages_gplv3 {
+ my @ret = ();
+
+ my %pkgs = &read_gnupackages ();
+ for my $pkgname (sort by_gplv3 keys %pkgs) {
+ my %p = %{$pkgs{$pkgname}};
+ my $gplv3 = $p{"gplv3-status"};
+ my $contact = $p{"last-contact"};
+ next if $gplv3 =~ /^(done|doc|not-applicable|notgpl|ok|see)/;
+ push (@ret, &gnupkgs_msg ($gplv3 . ($contact ? " [$contact]" : ""), %p));
+ }
+
+ return @ret;
+
+ sub by_gplv3 {
+ (my $a_status = $pkgs{$a}->{"gplv3-status"});# =~ s/ .*//;
+ (my $b_status = $pkgs{$b}->{"gplv3-status"});# =~ s/ .*//;
+ $pkgs{$a}->{"gplv3-status"} cmp $pkgs{$b}->{"gplv3-status"}
+ || $a cmp $b;
+ }
+}
+
+
+
+# Return list of packages for whom no maintainer has answered.
+#
+sub list_packages_unanswered {
+ my @recentrel = &read_recentrel ();
+ my %activity = &read_activity ("by-package");
+ my %pkgs = &read_maintainers ("by-package");
+ my @ret = ();
+
+ for my $p (sort { lc($a) cmp lc($b) } keys %pkgs) {
+ #&debug_hash ($p, $pkgs{$p});
+
+ if (grep { $p eq $_ } @recentrel) {
+ &debug ("$p recently released, skipping");
+
+ } elsif (exists $activity{$p}) {
+ # todo: check back to some cutoff
+ &debug ("$p got activity reply, skipping");
+
+ } else {
+ &debug ("$p no activity, returning");
+ my @entries = ();
+ for my $m (@{$pkgs{$p}}) {
+ next if $m->{"is_generic"};
+ my $entry = $m->{"name"};
+ $entry .= " " if $entry;
+ $entry .= "<$m->{best_email}>" if exists $m->{"best_email"};
+ push (@entries, $entry);
+ }
+
+ # might not be anything in @entries.
+ push (@ret, "$p - " . join (", ", @entries)) if @entries;
+ }
+ }
+
+ return @ret;
+}
+
+
+1;
[Prev in Thread] |
Current Thread |
[Next in Thread] |
- [commit-womb] gnumaint Makefile gm gm-read.pl gm-util.pl gnup...,
karl <=