X-Git-Url: https://sipb.mit.edu/gitweb.cgi/ikiwiki.git/blobdiff_plain/2a6e353c205a6c2c8b8e2eaf85fe9c585c1af0cd..fafb2edaa7aeb1293e716fa96f087cb713f4a70a:/IkiWiki/Plugin/aggregate.pm diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index ea27fc199..71368e254 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -17,7 +17,7 @@ my %guids; sub import { #{{{ hook(type => "getopt", id => "aggregate", call => \&getopt); hook(type => "checkconfig", id => "aggregate", call => \&checkconfig); - hook(type => "filter", id => "aggregate", call => \&filter); + hook(type => "needsbuild", id => "aggregate", call => \&needsbuild); hook(type => "preprocess", id => "aggregate", call => \&preprocess); hook(type => "delete", id => "aggregate", call => \&delete); hook(type => "savestate", id => "aggregate", call => \&savestate); @@ -37,28 +37,41 @@ sub checkconfig () { #{{{ debug("wiki is locked by another process, not aggregating"); exit 1; } - - loadstate(); - IkiWiki::loadindex(); - aggregate(); - expire(); - savestate(); - clearstate(); - + + # Fork a child process to handle the aggregation. + # The parent process will then handle building the result. + # This avoids messy code to clear state accumulated while + # aggregating. + defined(my $pid = fork) or error("Can't fork: $!"); + if (! $pid) { + loadstate(); + IkiWiki::loadindex(); + aggregate(); + expire(); + savestate(); + exit 0; + } + waitpid($pid,0); + if ($?) { + error "aggregation failed with code $?"; + } + IkiWiki::unlockwiki(); } } #}}} -sub filter (@) { #{{{ - my %params=@_; - my $page=$params{page}; - +sub needsbuild (@) { #{{{ + my $needsbuild=shift; + loadstate(); # if not already loaded - # Mark all feeds originating on this page as removable; - # preprocess will unmark those that still exist. - remove_feeds($page); - return $params{content}; + foreach my $feed (values %feeds) { + if (grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) { + # Mark all feeds originating on this page as removable; + # preprocess will unmark those that still exist. + remove_feeds($feed->{sourcepage}); + } + } } # }}} sub preprocess (@) { #{{{ @@ -127,6 +140,7 @@ sub delete (@) { #{{{ my $state_loaded=0; sub loadstate () { #{{{ return if $state_loaded; + $state_loaded=1; if (-e "$config{wikistatedir}/aggregate") { open(IN, "$config{wikistatedir}/aggregate") || die "$config{wikistatedir}/aggregate: $!"; @@ -157,17 +171,14 @@ sub loadstate () { #{{{ } close IN; - - $state_loaded=1; } } #}}} sub savestate () { #{{{ + return unless $state_loaded; eval q{use HTML::Entities}; error($@) if $@; my $newfile="$config{wikistatedir}/aggregate.new"; - # TODO: This cleanup function could use improvement. Any newly - # aggregated files are left behind unrecorded, and should be deleted. my $cleanup = sub { unlink($newfile) }; open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup); foreach my $data (values %feeds, values %guids) { @@ -180,7 +191,8 @@ sub savestate () { #{{{ } } else { - unlink pagefile($data->{page}); + unlink pagefile($data->{page}) + if exists $data->{page}; } next; } @@ -210,12 +222,6 @@ sub savestate () { #{{{ error("rename $newfile: $!", $cleanup); } #}}} -sub clearstate () { #{{{ - %feeds=(); - %guids=(); - $state_loaded=0; -} #}}} - sub expire () { #{{{ foreach my $feed (values %feeds) { next unless $feed->{expireage} || $feed->{expirecount}; @@ -228,7 +234,7 @@ sub expire () { #{{{ my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24; if ($days_old > $feed->{expireage}) { debug(sprintf(gettext("expiring %s (%s days old)"), - $item->{page}, $days_old)); + $item->{page}, int($days_old))); $item->{expired}=1; } } @@ -300,6 +306,13 @@ sub aggregate () { #{{{ $content=Encode::decode_utf8($content); $f=eval{XML::Feed->parse(\$content)}; } + if ($@) { + # Another possibility is badly escaped entities. + $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)")); + $content=~s/\&(?!amp)(\w+);/&$1;/g; + $content=Encode::decode_utf8($content); + $f=eval{XML::Feed->parse(\$content)}; + } if ($@) { $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)"; $feed->{error}=1; @@ -316,6 +329,7 @@ sub aggregate () { #{{{ foreach my $entry ($f->entries) { add_page( feed => $feed, + copyright => $f->copyright, title => defined $entry->title ? decode_entities($entry->title) : "untitled", link => $entry->link, content => defined $entry->content->body ? $entry->content->body : "", @@ -395,6 +409,8 @@ sub add_page (@) { #{{{ $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl}))); $template->param(name => $feed->{name}); $template->param(url => $feed->{url}); + $template->param(copyright => $params{copyright}) + if defined $params{copyright} && length $params{copyright}; $template->param(permalink => urlabs($params{link}, $feed->{feedurl})) if defined $params{link}; if (ref $feed->{tags}) { @@ -405,7 +421,8 @@ sub add_page (@) { #{{{ # Set the mtime, this lets the build process get the right creation # time on record for the new page. - utime $mtime, $mtime, pagefile($guid->{page}) if defined $mtime; + utime $mtime, $mtime, pagefile($guid->{page}) + if defined $mtime && $mtime <= time; } #}}} sub htmlescape ($) { #{{{