]> sipb.mit.edu Git - ikiwiki.git/blobdiff - IkiWiki/Plugin/aggregate.pm
* aggregate: Forking a child broke the one state that mattered: Forcing
[ikiwiki.git] / IkiWiki / Plugin / aggregate.pm
index 0da527f25e7bff733b0f64473e6fc7f67d323982..736b0e0d5977c67428222c363ae29be9006fc34e 100644 (file)
@@ -37,14 +37,30 @@ sub checkconfig () { #{{{
                        debug("wiki is locked by another process, not aggregating");
                        exit 1;
                }
-       
-               loadstate();
-               IkiWiki::loadindex();
-               aggregate();
-               expire();
-               savestate();
-               clearstate();
 
+               loadstate();
+               my @feeds=needsaggregate();
+               return unless @feeds;
+
+               # Fork a child process to handle the aggregation.
+               # The parent process will then handle building the
+               # result. This avoids messy code to clear state
+               # accumulated while aggregating.
+               defined(my $pid = fork) or error("Can't fork: $!");
+               if (! $pid) {
+                       IkiWiki::loadindex();
+                       aggregate(@feeds);
+                       expire();
+                       savestate();
+                       exit 0;
+               }
+               waitpid($pid,0);
+               if ($?) {
+                       error "aggregation failed with code $?";
+               }
+               $IkiWiki::forcerebuild{$_->{sourcepage}}=1
+                       foreach @feeds;
+               
                IkiWiki::unlockwiki();
        }
 } #}}}
@@ -55,7 +71,8 @@ sub needsbuild (@) { #{{{
        loadstate(); # if not already loaded
 
        foreach my $feed (values %feeds) {
-               if (grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
+               if (exists $pagesources{$feed->{sourcepage}} && 
+                   grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
                        # Mark all feeds originating on this page as removable;
                        # preprocess will unmark those that still exist.
                        remove_feeds($feed->{sourcepage});
@@ -211,12 +228,6 @@ sub savestate () { #{{{
                error("rename $newfile: $!", $cleanup);
 } #}}}
 
-sub clearstate () { #{{{
-       %feeds=();
-       %guids=();
-       $state_loaded=0;
-} #}}}
-
 sub expire () { #{{{
        foreach my $feed (values %feeds) {
                next unless $feed->{expireage} || $feed->{expirecount};
@@ -248,7 +259,12 @@ sub expire () { #{{{
        }
 } #}}}
 
-sub aggregate () { #{{{
+sub needsaggregate () { #{{{
+       return values %feeds if $config{rebuild};
+       return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
+} #}}}
+
+sub aggregate (@) { #{{{
        eval q{use XML::Feed};
        error($@) if $@;
        eval q{use URI::Fetch};
@@ -256,15 +272,12 @@ sub aggregate () { #{{{
        eval q{use HTML::Entities};
        error($@) if $@;
 
-       foreach my $feed (values %feeds) {
-               next unless $config{rebuild} || 
-                       time - $feed->{lastupdate} >= $feed->{updateinterval};
+       foreach my $feed (@_) {
                $feed->{lastupdate}=time;
                $feed->{newposts}=0;
                $feed->{message}=sprintf(gettext("processed ok at %s"),
                        displaytime($feed->{lastupdate}));
                $feed->{error}=0;
-               $IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
 
                debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));