]> sipb.mit.edu Git - ikiwiki.git/blobdiff - IkiWiki/Plugin/aggregate.pm
add missing test to avoid uninitialised value when a page with metadata is removed
[ikiwiki.git] / IkiWiki / Plugin / aggregate.pm
index 7fceb0df3d54112478f25ab946477bd418416ee5..c3cbbae0553464b9c8d20fab97333c788a7290eb 100644 (file)
@@ -4,7 +4,7 @@ package IkiWiki::Plugin::aggregate;
 
 use warnings;
 use strict;
-use IkiWiki;
+use IkiWiki 2.00;
 use HTML::Entities;
 use HTML::Parser;
 use HTML::Tagset;
@@ -17,7 +17,7 @@ my %guids;
 sub import { #{{{
        hook(type => "getopt", id => "aggregate", call => \&getopt);
        hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
-       hook(type => "filter", id => "aggregate", call => \&filter);
+       hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
        hook(type => "preprocess", id => "aggregate", call => \&preprocess);
         hook(type => "delete", id => "aggregate", call => \&delete);
        hook(type => "savestate", id => "aggregate", call => \&savestate);
@@ -31,26 +31,48 @@ sub getopt () { #{{{
 } #}}}
 
 sub checkconfig () { #{{{
-       IkiWiki::lockwiki();
-       loadstate();
-       if ($config{aggregate}) {
-               IkiWiki::loadindex();
-               aggregate();
-               expire();
-               savestate();
+       if ($config{aggregate} && ! ($config{post_commit} && 
+                                    IkiWiki::commit_hook_enabled())) {
+               if (! IkiWiki::lockwiki(0)) {
+                       debug("wiki is locked by another process, not aggregating");
+                       exit 1;
+               }
+               
+               # Fork a child process to handle the aggregation.
+               # The parent process will then handle building the result.
+               # This avoids messy code to clear state accumulated while
+               # aggregating.
+               defined(my $pid = fork) or error("Can't fork: $!");
+               if (! $pid) {
+                       loadstate();
+                       IkiWiki::loadindex();
+                       aggregate();
+                       expire();
+                       savestate();
+                       exit 0;
+               }
+               waitpid($pid,0);
+               if ($?) {
+                       error "aggregation failed with code $?";
+               }
+               
+               IkiWiki::unlockwiki();
        }
-       IkiWiki::unlockwiki();
 } #}}}
 
-sub filter (@) { #{{{
-       my %params=@_;
-       my $page=$params{page};
-
-       # Mark all feeds originating on this page as removable;
-       # preprocess will unmark those that still exist.
-       remove_feeds($page);
+sub needsbuild (@) { #{{{
+       my $needsbuild=shift;
+       
+       loadstate(); # if not already loaded
 
-       return $params{content};
+       foreach my $feed (values %feeds) {
+               if (exists $pagesources{$page} && 
+                   grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
+                       # Mark all feeds originating on this page as removable;
+                       # preprocess will unmark those that still exist.
+                       remove_feeds($feed->{sourcepage});
+               }
+       }
 } # }}}
 
 sub preprocess (@) { #{{{
@@ -58,7 +80,7 @@ sub preprocess (@) { #{{{
 
        foreach my $required (qw{name url}) {
                if (! exists $params{$required}) {
-                       return "[[".sprintf(gettext("aggregate plugin missing %s parameter"), $required)."]]";
+                       return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
                }
        }
 
@@ -116,10 +138,13 @@ sub delete (@) { #{{{
        }
 } #}}}
 
+my $state_loaded=0;
 sub loadstate () { #{{{
+       return if $state_loaded;
+       $state_loaded=1;
        if (-e "$config{wikistatedir}/aggregate") {
-               open (IN, "$config{wikistatedir}/aggregate" ||
-                       die "$config{wikistatedir}/aggregate: $!");
+               open(IN, "$config{wikistatedir}/aggregate") ||
+                       die "$config{wikistatedir}/aggregate: $!";
                while (<IN>) {
                        $_=IkiWiki::possibly_foolish_untaint($_);
                        chomp;
@@ -151,10 +176,12 @@ sub loadstate () { #{{{
 } #}}}
 
 sub savestate () { #{{{
+       return unless $state_loaded;
        eval q{use HTML::Entities};
        error($@) if $@;
-       open (OUT, ">$config{wikistatedir}/aggregate" ||
-               die "$config{wikistatedir}/aggregate: $!");
+       my $newfile="$config{wikistatedir}/aggregate.new";
+       my $cleanup = sub { unlink($newfile) };
+       open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
        foreach my $data (values %feeds, values %guids) {
                if ($data->{remove}) {
                        if ($data->{name}) {
@@ -165,7 +192,8 @@ sub savestate () { #{{{
                                }
                        }
                        else {
-                               unlink pagefile($data->{page});
+                               unlink pagefile($data->{page})
+                                       if exists $data->{page};
                        }
                        next;
                }
@@ -188,15 +216,18 @@ sub savestate () { #{{{
                                push @line, "$field=".$data->{$field};
                        }
                }
-               print OUT join(" ", @line)."\n";
+               print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
        }
-       close OUT;
+       close OUT || error("save $newfile: $!", $cleanup);
+       rename($newfile, "$config{wikistatedir}/aggregate") ||
+               error("rename $newfile: $!", $cleanup);
 } #}}}
 
 sub expire () { #{{{
        foreach my $feed (values %feeds) {
                next unless $feed->{expireage} || $feed->{expirecount};
                my $count=0;
+               my %seen;
                foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
                                  grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
                                  values %guids) {
@@ -204,7 +235,7 @@ sub expire () { #{{{
                                my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
                                if ($days_old > $feed->{expireage}) {
                                        debug(sprintf(gettext("expiring %s (%s days old)"),
-                                               $item->{page}, $days_old));
+                                               $item->{page}, int($days_old)));
                                        $item->{expired}=1;
                                }
                        }
@@ -214,7 +245,10 @@ sub expire () { #{{{
                                $item->{expired}=1;
                        }
                        else {
-                               $count++;
+                               if (! $seen{$item->{page}}) {
+                                       $seen{$item->{page}}=1;
+                                       $count++;
+                               }
                        }
                }
        }
@@ -223,6 +257,8 @@ sub expire () { #{{{
 sub aggregate () { #{{{
        eval q{use XML::Feed};
        error($@) if $@;
+       eval q{use URI::Fetch};
+       error($@) if $@;
        eval q{use HTML::Entities};
        error($@) if $@;
 
@@ -231,6 +267,9 @@ sub aggregate () { #{{{
                        time - $feed->{lastupdate} >= $feed->{updateinterval};
                $feed->{lastupdate}=time;
                $feed->{newposts}=0;
+               $feed->{message}=sprintf(gettext("processed ok at %s"),
+                       displaytime($feed->{lastupdate}));
+               $feed->{error}=0;
                $IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
 
                debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
@@ -238,14 +277,43 @@ sub aggregate () { #{{{
                if (! length $feed->{feedurl}) {
                        my @urls=XML::Feed->find_feeds($feed->{url});
                        if (! @urls) {
-                               $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{feedurl});
+                               $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
                                $feed->{error}=1;
                                debug($feed->{message});
                                next;
                        }
                        $feed->{feedurl}=pop @urls;
                }
-               my $f=eval{XML::Feed->parse(URI->new($feed->{feedurl}))};
+               my $res=URI::Fetch->fetch($feed->{feedurl});
+               if (! $res) {
+                       $feed->{message}=URI::Fetch->errstr;
+                       $feed->{error}=1;
+                       debug($feed->{message});
+                       next;
+               }
+               if ($res->status == URI::Fetch::URI_GONE()) {
+                       $feed->{message}=gettext("feed not found");
+                       $feed->{error}=1;
+                       debug($feed->{message});
+                       next;
+               }
+               my $content=$res->content;
+               my $f=eval{XML::Feed->parse(\$content)};
+               if ($@) {
+                       # One common cause of XML::Feed crashing is a feed
+                       # that contains invalid UTF-8 sequences. Convert
+                       # feed to ascii to try to work around.
+                       $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
+                       $content=Encode::decode_utf8($content);
+                       $f=eval{XML::Feed->parse(\$content)};
+               }
+               if ($@) {
+                       # Another possibility is badly escaped entities.
+                       $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
+                       $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
+                       $content=Encode::decode_utf8($content);
+                       $f=eval{XML::Feed->parse(\$content)};
+               }
                if ($@) {
                        $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
                        $feed->{error}=1;
@@ -262,17 +330,14 @@ sub aggregate () { #{{{
                foreach my $entry ($f->entries) {
                        add_page(
                                feed => $feed,
+                               copyright => $f->copyright,
                                title => defined $entry->title ? decode_entities($entry->title) : "untitled",
                                link => $entry->link,
-                               content => $entry->content->body,
+                               content => defined $entry->content->body ? $entry->content->body : "",
                                guid => defined $entry->id ? $entry->id : time."_".$feed->name,
                                ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
                        );
                }
-
-               $feed->{message}=sprintf(gettext("processed ok at %s"),
-                       displaytime($feed->{lastupdate}));
-               $feed->{error}=0;
        }
 } #}}}
 
@@ -313,9 +378,8 @@ sub add_page (@) { #{{{
 
                # Make sure that the file name isn't too long. 
                # NB: This doesn't check for path length limits.
-               eval q{use POSIX};
                my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
-               if (defined $max && length(htmlpage($page)) >= $max) {
+               if (defined $max && length(htmlfn($page)) >= $max) {
                        $c="";
                        $page=$feed->{dir}."/item";
                        while (exists $IkiWiki::pagecase{lc $page.$c} ||
@@ -346,17 +410,20 @@ sub add_page (@) { #{{{
        $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
        $template->param(name => $feed->{name});
        $template->param(url => $feed->{url});
+       $template->param(copyright => $params{copyright})
+               if defined $params{copyright} && length $params{copyright};
        $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
                if defined $params{link};
        if (ref $feed->{tags}) {
                $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
        }
-       writefile(htmlpage($guid->{page}), $config{srcdir},
+       writefile(htmlfn($guid->{page}), $config{srcdir},
                $template->output);
 
        # Set the mtime, this lets the build process get the right creation
        # time on record for the new page.
-       utime $mtime, $mtime, pagefile($guid->{page}) if defined $mtime;
+       utime $mtime, $mtime, pagefile($guid->{page})
+               if defined $mtime && $mtime <= time;
 } #}}}
 
 sub htmlescape ($) { #{{{
@@ -426,7 +493,11 @@ sub remove_feeds () { #{{{
 sub pagefile ($) { #{{{
        my $page=shift;
 
-       return "$config{srcdir}/".htmlpage($page);
+       return "$config{srcdir}/".htmlfn($page);
+} #}}}
+
+sub htmlfn ($) { #{{{
+       return shift().".".$config{htmlext};
 } #}}}
 
 1