2 # Blog aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
12 use open qw{:utf8 :std};
18 hook(type => "getopt", id => "aggregate", call => \&getopt);
19 hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
20 hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
21 hook(type => "preprocess", id => "aggregate", call => \&preprocess);
22 hook(type => "delete", id => "aggregate", call => \&delete);
23 hook(type => "savestate", id => "aggregate", call => \&savestate);
27 eval q{use Getopt::Long};
29 Getopt::Long::Configure('pass_through');
30 GetOptions("aggregate" => \$config{aggregate});
33 sub checkconfig () { #{{{
34 if ($config{aggregate} && ! ($config{post_commit} &&
35 IkiWiki::commit_hook_enabled())) {
36 if (! IkiWiki::lockwiki(0)) {
37 debug("wiki is locked by another process, not aggregating");
42 my @feeds=needsaggregate();
45 # Fork a child process to handle the aggregation.
46 # The parent process will then handle building the
47 # result. This avoids messy code to clear state
48 # accumulated while aggregating.
49 defined(my $pid = fork) or error("Can't fork: $!");
59 error "aggregation failed with code $?";
61 $IkiWiki::forcerebuild{$_->{sourcepage}}=1
64 IkiWiki::unlockwiki();
68 sub needsbuild (@) { #{{{
71 loadstate(); # if not already loaded
73 foreach my $feed (values %feeds) {
74 if (exists $pagesources{$feed->{sourcepage}} &&
75 grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
76 # Mark all feeds originating on this page as removable;
77 # preprocess will unmark those that still exist.
78 remove_feeds($feed->{sourcepage});
83 sub preprocess (@) { #{{{
86 foreach my $required (qw{name url}) {
87 if (! exists $params{$required}) {
88 return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
93 my $name=$params{name};
94 if (exists $feeds{$name}) {
101 $feed->{sourcepage}=$params{page};
102 $feed->{url}=$params{url};
103 my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
105 ($dir)=$dir=~/$config{wiki_file_regexp}/;
107 $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
108 $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
109 $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
110 $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
111 delete $feed->{remove};
112 delete $feed->{expired};
113 $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
114 $feed->{numposts}=0 unless defined $feed->{numposts};
115 $feed->{newposts}=0 unless defined $feed->{newposts};
116 $feed->{message}=gettext("new feed") unless defined $feed->{message};
117 $feed->{error}=0 unless defined $feed->{error};
123 push @{$feed->{tags}}, $value;
127 return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
128 ($feed->{error} ? "<em>" : "").$feed->{message}.
129 ($feed->{error} ? "</em>" : "").
130 " (".$feed->{numposts}." ".gettext("posts").
131 ($feed->{newposts} ? "; ".$feed->{newposts}.
132 " ".gettext("new") : "").
136 sub delete (@) { #{{{
139 # Remove feed data for removed pages.
140 foreach my $file (@files) {
141 my $page=pagename($file);
147 sub loadstate () { #{{{
148 return if $state_loaded;
150 if (-e "$config{wikistatedir}/aggregate") {
151 open(IN, "$config{wikistatedir}/aggregate") ||
152 die "$config{wikistatedir}/aggregate: $!";
154 $_=IkiWiki::possibly_foolish_untaint($_);
157 foreach my $i (split(/ /, $_)) {
158 my ($field, $val)=split(/=/, $i, 2);
159 if ($field eq "name" || $field eq "feed" ||
160 $field eq "guid" || $field eq "message") {
161 $data->{$field}=decode_entities($val, " \t\n");
163 elsif ($field eq "tag") {
164 push @{$data->{tags}}, $val;
167 $data->{$field}=$val;
171 if (exists $data->{name}) {
172 $feeds{$data->{name}}=$data;
174 elsif (exists $data->{guid}) {
175 $guids{$data->{guid}}=$data;
183 sub savestate () { #{{{
184 return unless $state_loaded;
185 eval q{use HTML::Entities};
187 my $newfile="$config{wikistatedir}/aggregate.new";
188 my $cleanup = sub { unlink($newfile) };
189 open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
190 foreach my $data (values %feeds, values %guids) {
191 if ($data->{remove}) {
193 foreach my $guid (values %guids) {
194 if ($guid->{feed} eq $data->{name}) {
200 unlink pagefile($data->{page})
201 if exists $data->{page};
205 elsif ($data->{expired} && exists $data->{page}) {
206 unlink pagefile($data->{page});
207 delete $data->{page};
212 foreach my $field (keys %$data) {
213 if ($field eq "name" || $field eq "feed" ||
214 $field eq "guid" || $field eq "message") {
215 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
217 elsif ($field eq "tags") {
218 push @line, "tag=$_" foreach @{$data->{tags}};
221 push @line, "$field=".$data->{$field};
224 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
226 close OUT || error("save $newfile: $!", $cleanup);
227 rename($newfile, "$config{wikistatedir}/aggregate") ||
228 error("rename $newfile: $!", $cleanup);
232 foreach my $feed (values %feeds) {
233 next unless $feed->{expireage} || $feed->{expirecount};
236 foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
237 grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
239 if ($feed->{expireage}) {
240 my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
241 if ($days_old > $feed->{expireage}) {
242 debug(sprintf(gettext("expiring %s (%s days old)"),
243 $item->{page}, int($days_old)));
247 elsif ($feed->{expirecount} &&
248 $count >= $feed->{expirecount}) {
249 debug(sprintf(gettext("expiring %s"), $item->{page}));
253 if (! $seen{$item->{page}}) {
254 $seen{$item->{page}}=1;
262 sub needsaggregate () { #{{{
263 return values %feeds if $config{rebuild};
264 return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
267 sub aggregate (@) { #{{{
268 eval q{use XML::Feed};
270 eval q{use URI::Fetch};
272 eval q{use HTML::Entities};
275 foreach my $feed (@_) {
276 $feed->{lastupdate}=time;
278 $feed->{message}=sprintf(gettext("processed ok at %s"),
279 displaytime($feed->{lastupdate}));
282 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
284 if (! length $feed->{feedurl}) {
285 my @urls=XML::Feed->find_feeds($feed->{url});
287 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
289 debug($feed->{message});
292 $feed->{feedurl}=pop @urls;
294 my $res=URI::Fetch->fetch($feed->{feedurl});
296 $feed->{message}=URI::Fetch->errstr;
298 debug($feed->{message});
301 if ($res->status == URI::Fetch::URI_GONE()) {
302 $feed->{message}=gettext("feed not found");
304 debug($feed->{message});
307 my $content=$res->content;
308 my $f=eval{XML::Feed->parse(\$content)};
310 # One common cause of XML::Feed crashing is a feed
311 # that contains invalid UTF-8 sequences. Convert
312 # feed to ascii to try to work around.
313 $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
314 $content=Encode::decode_utf8($content);
315 $f=eval{XML::Feed->parse(\$content)};
318 # Another possibility is badly escaped entities.
319 $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
320 $content=~s/\&(?!amp)(\w+);/&$1;/g;
321 $content=Encode::decode_utf8($content);
322 $f=eval{XML::Feed->parse(\$content)};
325 $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
327 debug($feed->{message});
331 $feed->{message}=XML::Feed->errstr;
333 debug($feed->{message});
337 foreach my $entry ($f->entries) {
340 copyright => $f->copyright,
341 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
342 link => $entry->link,
343 content => defined $entry->content->body ? $entry->content->body : "",
344 guid => defined $entry->id ? $entry->id : time."_".$feed->name,
345 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
351 sub add_page (@) { #{{{
354 my $feed=$params{feed};
357 if (exists $guids{$params{guid}}) {
358 # updating an existing post
359 $guid=$guids{$params{guid}};
360 return if $guid->{expired};
364 $guid->{guid}=$params{guid};
365 $guids{$params{guid}}=$guid;
366 $mtime=$params{ctime};
370 # assign it an unused page
371 my $page=IkiWiki::titlepage($params{title});
372 # escape slashes and periods in title so it doesn't specify
373 # directory name or trigger ".." disallowing code.
374 $page=~s!([/.])!"__".ord($1)."__"!eg;
375 $page=$feed->{dir}."/".$page;
376 ($page)=$page=~/$config{wiki_file_regexp}/;
377 if (! defined $page || ! length $page) {
378 $page=$feed->{dir}."/item";
381 while (exists $IkiWiki::pagecase{lc $page.$c} ||
382 -e pagefile($page.$c)) {
386 # Make sure that the file name isn't too long.
387 # NB: This doesn't check for path length limits.
388 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
389 if (defined $max && length(htmlfn($page)) >= $max) {
391 $page=$feed->{dir}."/item";
392 while (exists $IkiWiki::pagecase{lc $page.$c} ||
393 -e pagefile($page.$c)) {
399 debug(sprintf(gettext("creating new page %s"), $page));
401 $guid->{feed}=$feed->{name};
403 # To write or not to write? Need to avoid writing unchanged pages
404 # to avoid unneccessary rebuilding. The mtime from rss cannot be
405 # trusted; let's use a digest.
406 eval q{use Digest::MD5 'md5_hex'};
409 my $digest=md5_hex(Encode::encode_utf8($params{content}));
410 return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
411 $guid->{md5}=$digest;
414 my $template=template("aggregatepost.tmpl", blind_cache => 1);
415 $template->param(title => $params{title})
416 if defined $params{title} && length($params{title});
417 $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
418 $template->param(name => $feed->{name});
419 $template->param(url => $feed->{url});
420 $template->param(copyright => $params{copyright})
421 if defined $params{copyright} && length $params{copyright};
422 $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
423 if defined $params{link};
424 if (ref $feed->{tags}) {
425 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
427 writefile(htmlfn($guid->{page}), $config{srcdir},
430 # Set the mtime, this lets the build process get the right creation
431 # time on record for the new page.
432 utime $mtime, $mtime, pagefile($guid->{page})
433 if defined $mtime && $mtime <= time;
436 sub htmlescape ($) { #{{{
437 # escape accidental wikilinks and preprocessor stuff
439 $html=~s/(?<!\\)\[\[/\\\[\[/g;
443 sub urlabs ($$) { #{{{
447 URI->new_abs($url, $urlbase)->as_string;
450 sub htmlabs ($$) { #{{{
451 # Convert links in html from relative to absolute.
452 # Note that this is a heuristic, which is not specified by the rss
453 # spec and may not be right for all feeds. Also, see Debian
459 my $p = HTML::Parser->new(api_version => 3);
460 $p->handler(default => sub { $ret.=join("", @_) }, "text");
461 $p->handler(start => sub {
462 my ($tagname, $pos, $text) = @_;
463 if (ref $HTML::Tagset::linkElements{$tagname}) {
465 # use attribute sets from right to left
466 # to avoid invalidating the offsets
467 # when replacing the values
468 my($k_offset, $k_len, $v_offset, $v_len) =
470 my $attrname = lc(substr($text, $k_offset, $k_len));
471 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
472 next unless $v_offset; # 0 v_offset means no value
473 my $v = substr($text, $v_offset, $v_len);
474 $v =~ s/^([\'\"])(.*)\1$/$2/;
475 my $new_v=urlabs($v, $urlbase);
476 $new_v =~ s/\"/"/g; # since we quote with ""
477 substr($text, $v_offset, $v_len) = qq("$new_v");
481 }, "tagname, tokenpos, text");
488 sub remove_feeds () { #{{{
492 foreach my $id (keys %feeds) {
493 if ($feeds{$id}->{sourcepage} eq $page) {
494 $feeds{$id}->{remove}=1;
500 sub pagefile ($) { #{{{
503 return "$config{srcdir}/".htmlfn($page);
506 sub htmlfn ($) { #{{{
507 return shift().".".$config{htmlext};