add paths to plugin files in copyright
[ikiwiki.git] / IkiWiki / Plugin / aggregate.pm
blob419b40fdb115e221b18876ef11aa6252b6501f21
1 #!/usr/bin/perl
2 # Feed aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
5 use warnings;
6 use strict;
7 use IkiWiki 3.00;
8 use HTML::Parser;
9 use HTML::Tagset;
10 use HTML::Entities;
11 use open qw{:utf8 :std};
13 my %feeds;
14 my %guids;
16 sub import {
17 hook(type => "getopt", id => "aggregate", call => \&getopt);
18 hook(type => "getsetup", id => "aggregate", call => \&getsetup);
19 hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
20 hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
21 hook(type => "preprocess", id => "aggregate", call => \&preprocess);
22 hook(type => "delete", id => "aggregate", call => \&delete);
23 hook(type => "savestate", id => "aggregate", call => \&savestate);
24 hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
25 if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
26 hook(type => "cgi", id => "aggregate", call => \&cgi);
30 sub getopt () {
31 eval q{use Getopt::Long};
32 error($@) if $@;
33 Getopt::Long::Configure('pass_through');
34 GetOptions(
35 "aggregate" => \$config{aggregate},
36 "aggregateinternal!" => \$config{aggregateinternal},
40 sub getsetup () {
41 return
42 plugin => {
43 safe => 1,
44 rebuild => undef,
46 aggregateinternal => {
47 type => "boolean",
48 example => 1,
49 description => "enable aggregation to internal pages?",
50 safe => 0, # enabling needs manual transition
51 rebuild => 0,
53 aggregate_webtrigger => {
54 type => "boolean",
55 example => 0,
56 description => "allow aggregation to be triggered via the web?",
57 safe => 1,
58 rebuild => 0,
62 sub checkconfig () {
63 if (! defined $config{aggregateinternal}) {
64 $config{aggregateinternal}=1;
67 if ($config{aggregate} && ! ($config{post_commit} &&
68 IkiWiki::commit_hook_enabled())) {
69 launchaggregation();
73 sub cgi ($) {
74 my $cgi=shift;
76 if (defined $cgi->param('do') &&
77 $cgi->param("do") eq "aggregate_webtrigger") {
78 $|=1;
79 print "Content-Type: text/plain\n\n";
80 $config{cgi}=0;
81 $config{verbose}=1;
82 $config{syslog}=0;
83 print gettext("Aggregation triggered via web.")."\n\n";
84 if (launchaggregation()) {
85 IkiWiki::lockwiki();
86 IkiWiki::loadindex();
87 require IkiWiki::Render;
88 IkiWiki::refresh();
89 IkiWiki::saveindex();
91 else {
92 print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
94 exit 0;
98 sub launchaggregation () {
99 # See if any feeds need aggregation.
100 loadstate();
101 my @feeds=needsaggregate();
102 return unless @feeds;
103 if (! lockaggregate()) {
104 debug("an aggregation process is already running");
105 return;
107 # force a later rebuild of source pages
108 $IkiWiki::forcerebuild{$_->{sourcepage}}=1
109 foreach @feeds;
111 # Fork a child process to handle the aggregation.
112 # The parent process will then handle building the
113 # result. This avoids messy code to clear state
114 # accumulated while aggregating.
115 defined(my $pid = fork) or error("Can't fork: $!");
116 if (! $pid) {
117 IkiWiki::loadindex();
118 # Aggregation happens without the main wiki lock
119 # being held. This allows editing pages etc while
120 # aggregation is running.
121 aggregate(@feeds);
123 IkiWiki::lockwiki;
124 # Merge changes, since aggregation state may have
125 # changed on disk while the aggregation was happening.
126 mergestate();
127 expire();
128 savestate();
129 IkiWiki::unlockwiki;
130 exit 0;
132 waitpid($pid,0);
133 if ($?) {
134 error "aggregation failed with code $?";
137 clearstate();
138 unlockaggregate();
140 return 1;
143 # Pages with extension _aggregated have plain html markup, pass through.
144 sub htmlize (@) {
145 my %params=@_;
146 return $params{content};
149 # Used by ikiwiki-transition aggregateinternal.
150 sub migrate_to_internal {
151 if (! lockaggregate()) {
152 error("an aggregation process is currently running");
155 IkiWiki::lockwiki();
156 loadstate();
157 $config{verbose}=1;
159 foreach my $data (values %guids) {
160 next unless $data->{page};
161 next if $data->{expired};
163 $config{aggregateinternal} = 0;
164 my $oldname = "$config{srcdir}/".htmlfn($data->{page});
165 if (! -e $oldname) {
166 $oldname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
169 my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
171 $config{aggregateinternal} = 1;
172 my $newname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
174 debug "moving $oldname -> $newname";
175 if (-e $newname) {
176 if (-e $oldname) {
177 error("$newname already exists");
179 else {
180 debug("already renamed to $newname?");
183 elsif (-e $oldname) {
184 rename($oldname, $newname) || error("$!");
186 else {
187 debug("$oldname not found");
189 if (-e $oldoutput) {
190 require IkiWiki::Render;
191 debug("removing output file $oldoutput");
192 IkiWiki::prune($oldoutput);
196 savestate();
197 IkiWiki::unlockwiki;
199 unlockaggregate();
202 sub needsbuild (@) {
203 my $needsbuild=shift;
205 loadstate();
207 foreach my $feed (values %feeds) {
208 if (exists $pagesources{$feed->{sourcepage}} &&
209 grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
210 # Mark all feeds originating on this page as
211 # not yet seen; preprocess will unmark those that
212 # still exist.
213 markunseen($feed->{sourcepage});
217 return $needsbuild;
220 sub preprocess (@) {
221 my %params=@_;
223 foreach my $required (qw{name url}) {
224 if (! exists $params{$required}) {
225 error sprintf(gettext("missing %s parameter"), $required)
229 my $feed={};
230 my $name=$params{name};
231 if (exists $feeds{$name}) {
232 $feed=$feeds{$name};
234 else {
235 $feeds{$name}=$feed;
237 $feed->{name}=$name;
238 $feed->{sourcepage}=$params{page};
239 $feed->{url}=$params{url};
240 my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".titlepage($params{name});
241 $dir=~s/^\/+//;
242 ($dir)=$dir=~/$config{wiki_file_regexp}/;
243 $feed->{dir}=$dir;
244 $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
245 $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
246 $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
247 $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
248 if (exists $params{template}) {
249 $params{template}=~s/[^-_a-zA-Z0-9]+//g;
251 else {
252 $params{template} = "aggregatepost"
254 $feed->{template}=$params{template} . ".tmpl";
255 delete $feed->{unseen};
256 $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
257 $feed->{lasttry}=$feed->{lastupdate} unless defined $feed->{lasttry};
258 $feed->{numposts}=0 unless defined $feed->{numposts};
259 $feed->{newposts}=0 unless defined $feed->{newposts};
260 $feed->{message}=gettext("new feed") unless defined $feed->{message};
261 $feed->{error}=0 unless defined $feed->{error};
262 $feed->{tags}=[];
263 while (@_) {
264 my $key=shift;
265 my $value=shift;
266 if ($key eq 'tag') {
267 push @{$feed->{tags}}, $value;
271 return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
272 ($feed->{error} ? "<em>" : "").$feed->{message}.
273 ($feed->{error} ? "</em>" : "").
274 " (".$feed->{numposts}." ".gettext("posts").
275 ($feed->{newposts} ? "; ".$feed->{newposts}.
276 " ".gettext("new") : "").
277 ")";
280 sub delete (@) {
281 my @files=@_;
283 # Remove feed data for removed pages.
284 foreach my $file (@files) {
285 my $page=pagename($file);
286 markunseen($page);
290 sub markunseen ($) {
291 my $page=shift;
293 foreach my $id (keys %feeds) {
294 if ($feeds{$id}->{sourcepage} eq $page) {
295 $feeds{$id}->{unseen}=1;
300 my $state_loaded=0;
302 sub loadstate () {
303 return if $state_loaded;
304 $state_loaded=1;
305 if (-e "$config{wikistatedir}/aggregate") {
306 open(IN, "<", "$config{wikistatedir}/aggregate") ||
307 die "$config{wikistatedir}/aggregate: $!";
308 while (<IN>) {
309 $_=IkiWiki::possibly_foolish_untaint($_);
310 chomp;
311 my $data={};
312 foreach my $i (split(/ /, $_)) {
313 my ($field, $val)=split(/=/, $i, 2);
314 if ($field eq "name" || $field eq "feed" ||
315 $field eq "guid" || $field eq "message") {
316 $data->{$field}=decode_entities($val, " \t\n");
318 elsif ($field eq "tag") {
319 push @{$data->{tags}}, $val;
321 else {
322 $data->{$field}=$val;
326 if (exists $data->{name}) {
327 $feeds{$data->{name}}=$data;
329 elsif (exists $data->{guid}) {
330 $guids{$data->{guid}}=$data;
334 close IN;
338 sub savestate () {
339 return unless $state_loaded;
340 garbage_collect();
341 my $newfile="$config{wikistatedir}/aggregate.new";
342 my $cleanup = sub { unlink($newfile) };
343 open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
344 foreach my $data (values %feeds, values %guids) {
345 my @line;
346 foreach my $field (keys %$data) {
347 if ($field eq "name" || $field eq "feed" ||
348 $field eq "guid" || $field eq "message") {
349 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
351 elsif ($field eq "tags") {
352 push @line, "tag=$_" foreach @{$data->{tags}};
354 else {
355 push @line, "$field=".$data->{$field}
356 if defined $data->{$field};
359 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
361 close OUT || error("save $newfile: $!", $cleanup);
362 rename($newfile, "$config{wikistatedir}/aggregate") ||
363 error("rename $newfile: $!", $cleanup);
365 my $timestamp=undef;
366 foreach my $feed (keys %feeds) {
367 my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
368 if (! defined $timestamp || $timestamp > $t) {
369 $timestamp=$t;
372 $newfile=~s/\.new$/time/;
373 open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
374 if (defined $timestamp) {
375 print OUT $timestamp."\n";
377 close OUT || error("save $newfile: $!", $cleanup);
380 sub garbage_collect () {
381 foreach my $name (keys %feeds) {
382 # remove any feeds that were not seen while building the pages
383 # that used to contain them
384 if ($feeds{$name}->{unseen}) {
385 delete $feeds{$name};
389 foreach my $guid (values %guids) {
390 # any guid whose feed is gone should be removed
391 if (! exists $feeds{$guid->{feed}}) {
392 if (exists $guid->{page}) {
393 unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
394 || unlink "$config{srcdir}/".htmlfn($guid->{page});
396 delete $guids{$guid->{guid}};
398 # handle expired guids
399 elsif ($guid->{expired} && exists $guid->{page}) {
400 unlink "$config{srcdir}/".htmlfn($guid->{page});
401 unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
402 delete $guid->{page};
403 delete $guid->{md5};
408 sub mergestate () {
409 # Load the current state in from disk, and merge into it
410 # values from the state in memory that might have changed
411 # during aggregation.
412 my %myfeeds=%feeds;
413 my %myguids=%guids;
414 clearstate();
415 loadstate();
417 # All that can change in feed state during aggregation is a few
418 # fields.
419 foreach my $name (keys %myfeeds) {
420 if (exists $feeds{$name}) {
421 foreach my $field (qw{message lastupdate lasttry
422 numposts newposts error}) {
423 $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
428 # New guids can be created during aggregation.
429 # Guids have a few fields that may be updated during aggregation.
430 # It's also possible that guids were removed from the on-disk state
431 # while the aggregation was in process. That would only happen if
432 # their feed was also removed, so any removed guids added back here
433 # will be garbage collected later.
434 foreach my $guid (keys %myguids) {
435 if (! exists $guids{$guid}) {
436 $guids{$guid}=$myguids{$guid};
438 else {
439 foreach my $field (qw{md5}) {
440 $guids{$guid}->{$field}=$myguids{$guid}->{$field};
446 sub clearstate () {
447 %feeds=();
448 %guids=();
449 $state_loaded=0;
452 sub expire () {
453 foreach my $feed (values %feeds) {
454 next unless $feed->{expireage} || $feed->{expirecount};
455 my $count=0;
456 my %seen;
457 foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
458 grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
459 values %guids) {
460 if ($feed->{expireage}) {
461 my $days_old = (time - ($IkiWiki::pagectime{$item->{page}} || 0)) / 60 / 60 / 24;
462 if ($days_old > $feed->{expireage}) {
463 debug(sprintf(gettext("expiring %s (%s days old)"),
464 $item->{page}, int($days_old)));
465 $item->{expired}=1;
468 elsif ($feed->{expirecount} &&
469 $count >= $feed->{expirecount}) {
470 debug(sprintf(gettext("expiring %s"), $item->{page}));
471 $item->{expired}=1;
473 else {
474 if (! $seen{$item->{page}}) {
475 $seen{$item->{page}}=1;
476 $count++;
483 sub needsaggregate () {
484 return values %feeds if $config{rebuild};
485 return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
488 sub aggregate (@) {
489 eval q{use XML::Feed};
490 error($@) if $@;
491 eval q{use URI::Fetch};
492 error($@) if $@;
494 foreach my $feed (@_) {
495 $feed->{lasttry}=time;
496 $feed->{newposts}=0;
497 $feed->{message}=sprintf(gettext("last checked %s"),
498 displaytime($feed->{lasttry}));
499 $feed->{error}=0;
501 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
503 if (! length $feed->{feedurl}) {
504 my @urls=XML::Feed->find_feeds($feed->{url});
505 if (! @urls) {
506 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
507 $feed->{error}=1;
508 debug($feed->{message});
509 next;
511 $feed->{feedurl}=pop @urls;
513 my $res=URI::Fetch->fetch($feed->{feedurl});
514 if (! $res) {
515 $feed->{message}=URI::Fetch->errstr;
516 $feed->{error}=1;
517 debug($feed->{message});
518 next;
521 # lastupdate is only set if we were able to contact the server
522 $feed->{lastupdate}=$feed->{lasttry};
524 if ($res->status == URI::Fetch::URI_GONE()) {
525 $feed->{message}=gettext("feed not found");
526 $feed->{error}=1;
527 debug($feed->{message});
528 next;
530 my $content=$res->content;
531 my $f=eval{XML::Feed->parse(\$content)};
532 if ($@) {
533 # One common cause of XML::Feed crashing is a feed
534 # that contains invalid UTF-8 sequences. Convert
535 # feed to ascii to try to work around.
536 $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
537 $f=eval {
538 $content=Encode::decode_utf8($content, 0);
539 XML::Feed->parse(\$content)
542 if ($@) {
543 # Another possibility is badly escaped entities.
544 $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
545 $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
546 $f=eval {
547 $content=Encode::decode_utf8($content, 0);
548 XML::Feed->parse(\$content)
551 if ($@) {
552 $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
553 $feed->{error}=1;
554 debug($feed->{message});
555 next;
557 if (! $f) {
558 $feed->{message}=XML::Feed->errstr;
559 $feed->{error}=1;
560 debug($feed->{message});
561 next;
564 foreach my $entry ($f->entries) {
565 # XML::Feed doesn't work around XML::Atom's bizarre
566 # API, so we will. Real unicode strings? Yes please.
567 # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
568 local $XML::Atom::ForceUnicode = 1;
570 my $c=$entry->content;
571 # atom feeds may have no content, only a summary
572 if (! defined $c && ref $entry->summary) {
573 $c=$entry->summary;
576 add_page(
577 feed => $feed,
578 copyright => $f->copyright,
579 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
580 link => $entry->link,
581 content => (defined $c && defined $c->body) ? $c->body : "",
582 guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
583 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
584 base => (defined $c && $c->can("base")) ? $c->base : undef,
590 sub add_page (@) {
591 my %params=@_;
593 my $feed=$params{feed};
594 my $guid={};
595 my $mtime;
596 if (exists $guids{$params{guid}}) {
597 # updating an existing post
598 $guid=$guids{$params{guid}};
599 return if $guid->{expired};
601 else {
602 # new post
603 $guid->{guid}=$params{guid};
604 $guids{$params{guid}}=$guid;
605 $mtime=$params{ctime};
606 $feed->{numposts}++;
607 $feed->{newposts}++;
609 # assign it an unused page
610 my $page=titlepage($params{title});
611 # escape slashes and periods in title so it doesn't specify
612 # directory name or trigger ".." disallowing code.
613 $page=~s!([/.])!"__".ord($1)."__"!eg;
614 $page=$feed->{dir}."/".$page;
615 ($page)=$page=~/$config{wiki_file_regexp}/;
616 if (! defined $page || ! length $page) {
617 $page=$feed->{dir}."/item";
619 my $c="";
620 while (exists $IkiWiki::pagecase{lc $page.$c} ||
621 -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
622 -e "$config{srcdir}/".htmlfn($page.$c)) {
623 $c++
626 # Make sure that the file name isn't too long.
627 # NB: This doesn't check for path length limits.
628 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
629 if (defined $max && length(htmlfn($page)) >= $max) {
630 $c="";
631 $page=$feed->{dir}."/item";
632 while (exists $IkiWiki::pagecase{lc $page.$c} ||
633 -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
635 -e "$config{srcdir}/".htmlfn($page.$c)) {
636 $c++
640 $guid->{page}=$page;
641 debug(sprintf(gettext("creating new page %s"), $page));
643 $guid->{feed}=$feed->{name};
645 # To write or not to write? Need to avoid writing unchanged pages
646 # to avoid unneccessary rebuilding. The mtime from rss cannot be
647 # trusted; let's use a digest.
648 eval q{use Digest::MD5 'md5_hex'};
649 error($@) if $@;
650 require Encode;
651 my $digest=md5_hex(Encode::encode_utf8($params{content}));
652 return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
653 $guid->{md5}=$digest;
655 # Create the page.
656 my $template;
657 eval {
658 $template=template($feed->{template}, blind_cache => 1);
660 if ($@) {
661 print STDERR gettext("failed to process template:")." $@";
662 return;
664 $template->param(title => $params{title})
665 if defined $params{title} && length($params{title});
666 $template->param(content => wikiescape(htmlabs($params{content},
667 defined $params{base} ? $params{base} : $feed->{feedurl})));
668 $template->param(name => $feed->{name});
669 $template->param(url => $feed->{url});
670 $template->param(copyright => $params{copyright})
671 if defined $params{copyright} && length $params{copyright};
672 $template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
673 if defined $params{link};
674 if (ref $feed->{tags}) {
675 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
677 writefile(htmlfn($guid->{page}),
678 $IkiWiki::Plugin::transient::transientdir, $template->output);
680 if (defined $mtime && $mtime <= time) {
681 # Set the mtime, this lets the build process get the right
682 # creation time on record for the new page.
683 utime $mtime, $mtime,
684 $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
685 # Store it in pagectime for expiry code to use also.
686 $IkiWiki::pagectime{$guid->{page}}=$mtime
687 unless exists $IkiWiki::pagectime{$guid->{page}};
689 else {
690 # Dummy value for expiry code.
691 $IkiWiki::pagectime{$guid->{page}}=time
692 unless exists $IkiWiki::pagectime{$guid->{page}};
696 sub wikiescape ($) {
697 # escape accidental wikilinks and preprocessor stuff
698 return encode_entities(shift, '\[\]');
701 sub htmlabs ($$) {
702 # Convert links in html from relative to absolute.
703 # Note that this is a heuristic, which is not specified by the rss
704 # spec and may not be right for all feeds. Also, see Debian
705 # bug #381359.
706 my $html=shift;
707 my $urlbase=shift;
709 my $ret="";
710 my $p = HTML::Parser->new(api_version => 3);
711 $p->handler(default => sub { $ret.=join("", @_) }, "text");
712 $p->handler(start => sub {
713 my ($tagname, $pos, $text) = @_;
714 if (ref $HTML::Tagset::linkElements{$tagname}) {
715 while (4 <= @$pos) {
716 # use attribute sets from right to left
717 # to avoid invalidating the offsets
718 # when replacing the values
719 my($k_offset, $k_len, $v_offset, $v_len) =
720 splice(@$pos, -4);
721 my $attrname = lc(substr($text, $k_offset, $k_len));
722 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
723 next unless $v_offset; # 0 v_offset means no value
724 my $v = substr($text, $v_offset, $v_len);
725 $v =~ s/^([\'\"])(.*)\1$/$2/;
726 my $new_v=IkiWiki::urlabs($v, $urlbase);
727 $new_v =~ s/\"/&quot;/g; # since we quote with ""
728 substr($text, $v_offset, $v_len) = qq("$new_v");
731 $ret.=$text;
732 }, "tagname, tokenpos, text");
733 $p->parse($html);
734 $p->eof;
736 return $ret;
739 sub htmlfn ($) {
740 return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
743 my $aggregatelock;
745 sub lockaggregate () {
746 # Take an exclusive lock to prevent multiple concurrent aggregators.
747 # Returns true if the lock was aquired.
748 if (! -d $config{wikistatedir}) {
749 mkdir($config{wikistatedir});
751 open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
752 error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
753 if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
754 close($aggregatelock) || error("failed closing aggregatelock: $!");
755 return 0;
757 return 1;
760 sub unlockaggregate () {
761 return close($aggregatelock) if $aggregatelock;
762 return;