added getsetup hooks for all plugins up to recentchanges
[ikiwiki.git] / IkiWiki / Plugin / aggregate.pm
1 #!/usr/bin/perl
2 # Feed aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
4
5 use warnings;
6 use strict;
7 use IkiWiki 2.00;
8 use HTML::Parser;
9 use HTML::Tagset;
10 use HTML::Entities;
11 use URI;
12 use open qw{:utf8 :std};
13
14 my %feeds;
15 my %guids;
16
17 sub import { #{{{
18         hook(type => "getopt", id => "aggregate", call => \&getopt);
19         hook(type => "getsetup", id => "aggregate", call => \&getsetup);
20         hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
21         hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
22         hook(type => "preprocess", id => "aggregate", call => \&preprocess);
23         hook(type => "delete", id => "aggregate", call => \&delete);
24         hook(type => "savestate", id => "aggregate", call => \&savestate);
25         hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
26         if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
27                 hook(type => "cgi", id => "aggregate", call => \&cgi);
28         }
29 } # }}}
30
31 sub getopt () { #{{{
32         eval q{use Getopt::Long};
33         error($@) if $@;
34         Getopt::Long::Configure('pass_through');
35         GetOptions(
36                 "aggregate" => \$config{aggregate},
37                 "aggregateinternal!" => \$config{aggregateinternal},
38         );
39 } #}}}
40
41 sub getsetup () { #{{{
42         return
43                 aggregateinternal => {
44                         type => "boolean",
45                         default => 0,
46                         description => "enable aggregation to internal pages",
47                         safe => 0, # enabling needs manual transition
48                         rebuild => 0,
49                 },
50                 aggregate_webtrigger => {
51                         type => "boolean",
52                         default => 0,
53                         description => "allow aggregation to be triggered via the web",
54                         safe => 1,
55                         rebuild => 0,
56                 },
57 } #}}}
58
59 sub checkconfig () { #{{{
60         if ($config{aggregate} && ! ($config{post_commit} && 
61                                      IkiWiki::commit_hook_enabled())) {
62                 launchaggregation();
63         }
64 } #}}}
65
66 sub cgi ($) { #{{{
67         my $cgi=shift;
68
69         if (defined $cgi->param('do') &&
70             $cgi->param("do") eq "aggregate_webtrigger") {
71                 $|=1;
72                 print "Content-Type: text/plain\n\n";
73                 $config{cgi}=0;
74                 $config{verbose}=1;
75                 $config{syslog}=0;
76                 print gettext("Aggregation triggered via web.")."\n\n";
77                 if (launchaggregation()) {
78                         IkiWiki::lockwiki();
79                         IkiWiki::loadindex();
80                         require IkiWiki::Render;
81                         IkiWiki::refresh();
82                         IkiWiki::saveindex();
83                 }
84                 else {
85                         print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
86                 }
87                 exit 0;
88         }
89 } #}}}
90
91 sub launchaggregation () { #{{{
92         # See if any feeds need aggregation.
93         loadstate();
94         my @feeds=needsaggregate();
95         return unless @feeds;
96         if (! lockaggregate()) {
97                 debug("an aggregation process is already running");
98                 return;
99         }
100         # force a later rebuild of source pages
101         $IkiWiki::forcerebuild{$_->{sourcepage}}=1
102                 foreach @feeds;
103
104         # Fork a child process to handle the aggregation.
105         # The parent process will then handle building the
106         # result. This avoids messy code to clear state
107         # accumulated while aggregating.
108         defined(my $pid = fork) or error("Can't fork: $!");
109         if (! $pid) {
110                 IkiWiki::loadindex();
111                 # Aggregation happens without the main wiki lock
112                 # being held. This allows editing pages etc while
113                 # aggregation is running.
114                 aggregate(@feeds);
115
116                 IkiWiki::lockwiki;
117                 # Merge changes, since aggregation state may have
118                 # changed on disk while the aggregation was happening.
119                 mergestate();
120                 expire();
121                 savestate();
122                 IkiWiki::unlockwiki;
123                 exit 0;
124         }
125         waitpid($pid,0);
126         if ($?) {
127                 error "aggregation failed with code $?";
128         }
129
130         clearstate();
131         unlockaggregate();
132
133         return 1;
134 } #}}}
135
136 #  Pages with extension _aggregated have plain html markup, pass through.
137 sub htmlize (@) { #{{{
138         my %params=@_;
139         return $params{content};
140 } #}}}
141
142 # Used by ikiwiki-transition aggregateinternal.
143 sub migrate_to_internal { #{{{
144         if (! lockaggregate()) {
145                 error("an aggregation process is currently running");
146         }
147
148         IkiWiki::lockwiki();
149         loadstate();
150         $config{verbose}=1;
151
152         foreach my $data (values %guids) {
153                 next unless $data->{page};
154                 next if $data->{expired};
155                 
156                 $config{aggregateinternal} = 0;
157                 my $oldname = pagefile($data->{page});
158                 my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
159                 
160                 $config{aggregateinternal} = 1;
161                 my $newname = pagefile($data->{page});
162                 
163                 debug "moving $oldname -> $newname";
164                 if (-e $newname) {
165                         if (-e $oldname) {
166                                 error("$newname already exists");
167                         }
168                         else {
169                                 debug("already renamed to $newname?");
170                         }
171                 }
172                 elsif (-e $oldname) {
173                         rename($oldname, $newname) || error("$!");
174                 }
175                 else {
176                         debug("$oldname not found");
177                 }
178                 if (-e $oldoutput) {
179                         require IkiWiki::Render;
180                         debug("removing output file $oldoutput");
181                         IkiWiki::prune($oldoutput);
182                 }
183         }
184         
185         savestate();
186         IkiWiki::unlockwiki;
187         
188         unlockaggregate();
189 } #}}}
190
191 sub needsbuild (@) { #{{{
192         my $needsbuild=shift;
193         
194         loadstate();
195
196         foreach my $feed (values %feeds) {
197                 if (exists $pagesources{$feed->{sourcepage}} && 
198                     grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
199                         # Mark all feeds originating on this page as 
200                         # not yet seen; preprocess will unmark those that
201                         # still exist.
202                         markunseen($feed->{sourcepage});
203                 }
204         }
205 } # }}}
206
207 sub preprocess (@) { #{{{
208         my %params=@_;
209
210         foreach my $required (qw{name url}) {
211                 if (! exists $params{$required}) {
212                         error sprintf(gettext("missing %s parameter"), $required)
213                 }
214         }
215
216         my $feed={};
217         my $name=$params{name};
218         if (exists $feeds{$name}) {
219                 $feed=$feeds{$name};
220         }
221         else {
222                 $feeds{$name}=$feed;
223         }
224         $feed->{name}=$name;
225         $feed->{sourcepage}=$params{page};
226         $feed->{url}=$params{url};
227         my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
228         $dir=~s/^\/+//;
229         ($dir)=$dir=~/$config{wiki_file_regexp}/;
230         $feed->{dir}=$dir;
231         $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
232         $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
233         $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
234         $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
235         if (exists $params{template}) {
236                 $params{template}=~s/[^-_a-zA-Z0-9]+//g;
237         }
238         else {
239                 $params{template} = "aggregatepost"
240         }
241         $feed->{template}=$params{template} . ".tmpl";
242         delete $feed->{unseen};
243         $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
244         $feed->{numposts}=0 unless defined $feed->{numposts};
245         $feed->{newposts}=0 unless defined $feed->{newposts};
246         $feed->{message}=gettext("new feed") unless defined $feed->{message};
247         $feed->{error}=0 unless defined $feed->{error};
248         $feed->{tags}=[];
249         while (@_) {
250                 my $key=shift;
251                 my $value=shift;
252                 if ($key eq 'tag') {
253                         push @{$feed->{tags}}, $value;
254                 }
255         }
256
257         return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
258                ($feed->{error} ? "<em>" : "").$feed->{message}.
259                ($feed->{error} ? "</em>" : "").
260                " (".$feed->{numposts}." ".gettext("posts").
261                ($feed->{newposts} ? "; ".$feed->{newposts}.
262                                     " ".gettext("new") : "").
263                ")";
264 } # }}}
265
266 sub delete (@) { #{{{
267         my @files=@_;
268
269         # Remove feed data for removed pages.
270         foreach my $file (@files) {
271                 my $page=pagename($file);
272                 markunseen($page);
273         }
274 } #}}}
275
276 sub markunseen ($) { #{{{
277         my $page=shift;
278
279         foreach my $id (keys %feeds) {
280                 if ($feeds{$id}->{sourcepage} eq $page) {
281                         $feeds{$id}->{unseen}=1;
282                 }
283         }
284 } #}}}
285
286 my $state_loaded=0;
287
288 sub loadstate () { #{{{
289         return if $state_loaded;
290         $state_loaded=1;
291         if (-e "$config{wikistatedir}/aggregate") {
292                 open(IN, "$config{wikistatedir}/aggregate") ||
293                         die "$config{wikistatedir}/aggregate: $!";
294                 while (<IN>) {
295                         $_=IkiWiki::possibly_foolish_untaint($_);
296                         chomp;
297                         my $data={};
298                         foreach my $i (split(/ /, $_)) {
299                                 my ($field, $val)=split(/=/, $i, 2);
300                                 if ($field eq "name" || $field eq "feed" ||
301                                     $field eq "guid" || $field eq "message") {
302                                         $data->{$field}=decode_entities($val, " \t\n");
303                                 }
304                                 elsif ($field eq "tag") {
305                                         push @{$data->{tags}}, $val;
306                                 }
307                                 else {
308                                         $data->{$field}=$val;
309                                 }
310                         }
311                         
312                         if (exists $data->{name}) {
313                                 $feeds{$data->{name}}=$data;
314                         }
315                         elsif (exists $data->{guid}) {
316                                 $guids{$data->{guid}}=$data;
317                         }
318                 }
319
320                 close IN;
321         }
322 } #}}}
323
324 sub savestate () { #{{{
325         return unless $state_loaded;
326         garbage_collect();
327         my $newfile="$config{wikistatedir}/aggregate.new";
328         my $cleanup = sub { unlink($newfile) };
329         open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
330         foreach my $data (values %feeds, values %guids) {
331                 my @line;
332                 foreach my $field (keys %$data) {
333                         if ($field eq "name" || $field eq "feed" ||
334                             $field eq "guid" || $field eq "message") {
335                                 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
336                         }
337                         elsif ($field eq "tags") {
338                                 push @line, "tag=$_" foreach @{$data->{tags}};
339                         }
340                         else {
341                                 push @line, "$field=".$data->{$field};
342                         }
343                 }
344                 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
345         }
346         close OUT || error("save $newfile: $!", $cleanup);
347         rename($newfile, "$config{wikistatedir}/aggregate") ||
348                 error("rename $newfile: $!", $cleanup);
349 } #}}}
350
351 sub garbage_collect () { #{{{
352         foreach my $name (keys %feeds) {
353                 # remove any feeds that were not seen while building the pages
354                 # that used to contain them
355                 if ($feeds{$name}->{unseen}) {
356                         delete $feeds{$name};
357                 }
358         }
359
360         foreach my $guid (values %guids) {
361                 # any guid whose feed is gone should be removed
362                 if (! exists $feeds{$guid->{feed}}) {
363                         unlink pagefile($guid->{page})
364                                 if exists $guid->{page};
365                         delete $guids{$guid->{guid}};
366                 }
367                 # handle expired guids
368                 elsif ($guid->{expired} && exists $guid->{page}) {
369                         unlink pagefile($guid->{page});
370                         delete $guid->{page};
371                         delete $guid->{md5};
372                 }
373         }
374 } #}}}
375
376 sub mergestate () { #{{{
377         # Load the current state in from disk, and merge into it
378         # values from the state in memory that might have changed
379         # during aggregation.
380         my %myfeeds=%feeds;
381         my %myguids=%guids;
382         clearstate();
383         loadstate();
384
385         # All that can change in feed state during aggregation is a few
386         # fields.
387         foreach my $name (keys %myfeeds) {
388                 if (exists $feeds{$name}) {
389                         foreach my $field (qw{message lastupdate numposts
390                                               newposts error}) {
391                                 $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
392                         }
393                 }
394         }
395
396         # New guids can be created during aggregation.
397         # It's also possible that guids were removed from the on-disk state
398         # while the aggregation was in process. That would only happen if
399         # their feed was also removed, so any removed guids added back here
400         # will be garbage collected later.
401         foreach my $guid (keys %myguids) {
402                 if (! exists $guids{$guid}) {
403                         $guids{$guid}=$myguids{$guid};
404                 }
405         }
406 } #}}}
407
408 sub clearstate () { #{{{
409         %feeds=();
410         %guids=();
411         $state_loaded=0;
412 } #}}}
413
414 sub expire () { #{{{
415         foreach my $feed (values %feeds) {
416                 next unless $feed->{expireage} || $feed->{expirecount};
417                 my $count=0;
418                 my %seen;
419                 foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
420                                   grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
421                                   values %guids) {
422                         if ($feed->{expireage}) {
423                                 my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
424                                 if ($days_old > $feed->{expireage}) {
425                                         debug(sprintf(gettext("expiring %s (%s days old)"),
426                                                 $item->{page}, int($days_old)));
427                                         $item->{expired}=1;
428                                 }
429                         }
430                         elsif ($feed->{expirecount} &&
431                                $count >= $feed->{expirecount}) {
432                                 debug(sprintf(gettext("expiring %s"), $item->{page}));
433                                 $item->{expired}=1;
434                         }
435                         else {
436                                 if (! $seen{$item->{page}}) {
437                                         $seen{$item->{page}}=1;
438                                         $count++;
439                                 }
440                         }
441                 }
442         }
443 } #}}}
444
445 sub needsaggregate () { #{{{
446         return values %feeds if $config{rebuild};
447         return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
448 } #}}}
449
450 sub aggregate (@) { #{{{
451         eval q{use XML::Feed};
452         error($@) if $@;
453         eval q{use URI::Fetch};
454         error($@) if $@;
455
456         foreach my $feed (@_) {
457                 $feed->{lastupdate}=time;
458                 $feed->{newposts}=0;
459                 $feed->{message}=sprintf(gettext("processed ok at %s"),
460                         displaytime($feed->{lastupdate}));
461                 $feed->{error}=0;
462
463                 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
464
465                 if (! length $feed->{feedurl}) {
466                         my @urls=XML::Feed->find_feeds($feed->{url});
467                         if (! @urls) {
468                                 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
469                                 $feed->{error}=1;
470                                 debug($feed->{message});
471                                 next;
472                         }
473                         $feed->{feedurl}=pop @urls;
474                 }
475                 my $res=URI::Fetch->fetch($feed->{feedurl});
476                 if (! $res) {
477                         $feed->{message}=URI::Fetch->errstr;
478                         $feed->{error}=1;
479                         debug($feed->{message});
480                         next;
481                 }
482                 if ($res->status == URI::Fetch::URI_GONE()) {
483                         $feed->{message}=gettext("feed not found");
484                         $feed->{error}=1;
485                         debug($feed->{message});
486                         next;
487                 }
488                 my $content=$res->content;
489                 my $f=eval{XML::Feed->parse(\$content)};
490                 if ($@) {
491                         # One common cause of XML::Feed crashing is a feed
492                         # that contains invalid UTF-8 sequences. Convert
493                         # feed to ascii to try to work around.
494                         $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
495                         $content=Encode::decode_utf8($content, 0);
496                         $f=eval{XML::Feed->parse(\$content)};
497                 }
498                 if ($@) {
499                         # Another possibility is badly escaped entities.
500                         $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
501                         $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
502                         $content=Encode::decode_utf8($content, 0);
503                         $f=eval{XML::Feed->parse(\$content)};
504                 }
505                 if ($@) {
506                         $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
507                         $feed->{error}=1;
508                         debug($feed->{message});
509                         next;
510                 }
511                 if (! $f) {
512                         $feed->{message}=XML::Feed->errstr;
513                         $feed->{error}=1;
514                         debug($feed->{message});
515                         next;
516                 }
517
518                 foreach my $entry ($f->entries) {
519                         add_page(
520                                 feed => $feed,
521                                 copyright => $f->copyright,
522                                 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
523                                 link => $entry->link,
524                                 content => defined $entry->content->body ? $entry->content->body : "",
525                                 guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
526                                 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
527                         );
528                 }
529         }
530 } #}}}
531
532 sub add_page (@) { #{{{
533         my %params=@_;
534         
535         my $feed=$params{feed};
536         my $guid={};
537         my $mtime;
538         if (exists $guids{$params{guid}}) {
539                 # updating an existing post
540                 $guid=$guids{$params{guid}};
541                 return if $guid->{expired};
542         }
543         else {
544                 # new post
545                 $guid->{guid}=$params{guid};
546                 $guids{$params{guid}}=$guid;
547                 $mtime=$params{ctime};
548                 $feed->{numposts}++;
549                 $feed->{newposts}++;
550
551                 # assign it an unused page
552                 my $page=IkiWiki::titlepage($params{title});
553                 # escape slashes and periods in title so it doesn't specify
554                 # directory name or trigger ".." disallowing code.
555                 $page=~s!([/.])!"__".ord($1)."__"!eg;
556                 $page=$feed->{dir}."/".$page;
557                 ($page)=$page=~/$config{wiki_file_regexp}/;
558                 if (! defined $page || ! length $page) {
559                         $page=$feed->{dir}."/item";
560                 }
561                 my $c="";
562                 while (exists $IkiWiki::pagecase{lc $page.$c} ||
563                        -e pagefile($page.$c)) {
564                         $c++
565                 }
566
567                 # Make sure that the file name isn't too long. 
568                 # NB: This doesn't check for path length limits.
569                 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
570                 if (defined $max && length(htmlfn($page)) >= $max) {
571                         $c="";
572                         $page=$feed->{dir}."/item";
573                         while (exists $IkiWiki::pagecase{lc $page.$c} ||
574                                -e pagefile($page.$c)) {
575                                 $c++
576                         }
577                 }
578
579                 $guid->{page}=$page;
580                 debug(sprintf(gettext("creating new page %s"), $page));
581         }
582         $guid->{feed}=$feed->{name};
583         
584         # To write or not to write? Need to avoid writing unchanged pages
585         # to avoid unneccessary rebuilding. The mtime from rss cannot be
586         # trusted; let's use a digest.
587         eval q{use Digest::MD5 'md5_hex'};
588         error($@) if $@;
589         require Encode;
590         my $digest=md5_hex(Encode::encode_utf8($params{content}));
591         return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
592         $guid->{md5}=$digest;
593
594         # Create the page.
595         my $template=template($feed->{template}, blind_cache => 1);
596         $template->param(title => $params{title})
597                 if defined $params{title} && length($params{title});
598         $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
599         $template->param(name => $feed->{name});
600         $template->param(url => $feed->{url});
601         $template->param(copyright => $params{copyright})
602                 if defined $params{copyright} && length $params{copyright};
603         $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
604                 if defined $params{link};
605         if (ref $feed->{tags}) {
606                 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
607         }
608         writefile(htmlfn($guid->{page}), $config{srcdir},
609                 $template->output);
610
611         # Set the mtime, this lets the build process get the right creation
612         # time on record for the new page.
613         utime $mtime, $mtime, pagefile($guid->{page})
614                 if defined $mtime && $mtime <= time;
615 } #}}}
616
617 sub htmlescape ($) { #{{{
618         # escape accidental wikilinks and preprocessor stuff
619         my $html=shift;
620         $html=~s/(?<!\\)\[\[/\\\[\[/g;
621         return $html;
622 } #}}}
623
624 sub urlabs ($$) { #{{{
625         my $url=shift;
626         my $urlbase=shift;
627
628         URI->new_abs($url, $urlbase)->as_string;
629 } #}}}
630
631 sub htmlabs ($$) { #{{{
632         # Convert links in html from relative to absolute.
633         # Note that this is a heuristic, which is not specified by the rss
634         # spec and may not be right for all feeds. Also, see Debian
635         # bug #381359.
636         my $html=shift;
637         my $urlbase=shift;
638
639         my $ret="";
640         my $p = HTML::Parser->new(api_version => 3);
641         $p->handler(default => sub { $ret.=join("", @_) }, "text");
642         $p->handler(start => sub {
643                 my ($tagname, $pos, $text) = @_;
644                 if (ref $HTML::Tagset::linkElements{$tagname}) {
645                         while (4 <= @$pos) {
646                                 # use attribute sets from right to left
647                                 # to avoid invalidating the offsets
648                                 # when replacing the values
649                                 my($k_offset, $k_len, $v_offset, $v_len) =
650                                         splice(@$pos, -4);
651                                 my $attrname = lc(substr($text, $k_offset, $k_len));
652                                 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
653                                 next unless $v_offset; # 0 v_offset means no value
654                                 my $v = substr($text, $v_offset, $v_len);
655                                 $v =~ s/^([\'\"])(.*)\1$/$2/;
656                                 my $new_v=urlabs($v, $urlbase);
657                                 $new_v =~ s/\"/&quot;/g; # since we quote with ""
658                                 substr($text, $v_offset, $v_len) = qq("$new_v");
659                         }
660                 }
661                 $ret.=$text;
662         }, "tagname, tokenpos, text");
663         $p->parse($html);
664         $p->eof;
665
666         return $ret;
667 } #}}}
668
669 sub pagefile ($) { #{{{
670         my $page=shift;
671
672         return "$config{srcdir}/".htmlfn($page);
673 } #}}}
674
675 sub htmlfn ($) { #{{{
676         return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
677 } #}}}
678
679 my $aggregatelock;
680
681 sub lockaggregate () { #{{{
682         # Take an exclusive lock to prevent multiple concurrent aggregators.
683         # Returns true if the lock was aquired.
684         if (! -d $config{wikistatedir}) {
685                 mkdir($config{wikistatedir});
686         }
687         open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
688                 error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
689         if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
690                 close($aggregatelock) || error("failed closing aggregatelock: $!");
691                 return 0;
692         }
693         return 1;
694 } #}}}
695
696 sub unlockaggregate () { #{{{
697         return close($aggregatelock) if $aggregatelock;
698         return;
699 } #}}}
700
701 1