aboutsummaryrefslogtreecommitdiff
path: root/IkiWiki/Plugin/aggregate.pm
diff options
context:
space:
mode:
authorJoey Hess <joey@kodama.kitenet.net>2008-02-03 03:04:19 -0500
committerJoey Hess <joey@kodama.kitenet.net>2008-02-03 03:04:19 -0500
commit1f6591f0a61415777a662d979c5c142c7f4ad7cd (patch)
tree0f29bf6d8ab4ca3e4070c7731278ab81db094d9b /IkiWiki/Plugin/aggregate.pm
parent72bbc415205fed81dfa028d3575873ea84003a8f (diff)
downloadikiwiki-1f6591f0a61415777a662d979c5c142c7f4ad7cd.tar
ikiwiki-1f6591f0a61415777a662d979c5c142c7f4ad7cd.tar.gz
* aggregate: Revert use of forking to not save state, that was not the right
approach.
Diffstat (limited to 'IkiWiki/Plugin/aggregate.pm')
-rw-r--r--IkiWiki/Plugin/aggregate.pm52
1 files changed, 20 insertions, 32 deletions
diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm
index 736b0e0d5..0f50fab06 100644
--- a/IkiWiki/Plugin/aggregate.pm
+++ b/IkiWiki/Plugin/aggregate.pm
@@ -37,30 +37,14 @@ sub checkconfig () { #{{{
debug("wiki is locked by another process, not aggregating");
exit 1;
}
-
+
loadstate();
- my @feeds=needsaggregate();
- return unless @feeds;
-
- # Fork a child process to handle the aggregation.
- # The parent process will then handle building the
- # result. This avoids messy code to clear state
- # accumulated while aggregating.
- defined(my $pid = fork) or error("Can't fork: $!");
- if (! $pid) {
- IkiWiki::loadindex();
- aggregate(@feeds);
- expire();
- savestate();
- exit 0;
- }
- waitpid($pid,0);
- if ($?) {
- error "aggregation failed with code $?";
- }
- $IkiWiki::forcerebuild{$_->{sourcepage}}=1
- foreach @feeds;
-
+ IkiWiki::loadindex();
+ aggregate();
+ expire();
+ savestate();
+ clearstate();
+
IkiWiki::unlockwiki();
}
} #}}}
@@ -148,7 +132,7 @@ sub loadstate () { #{{{
return if $state_loaded;
$state_loaded=1;
if (-e "$config{wikistatedir}/aggregate") {
- open(IN, "$config{wikistatedir}/aggregate") ||
+ open(IN, "<", "$config{wikistatedir}/aggregate") ||
die "$config{wikistatedir}/aggregate: $!";
while (<IN>) {
$_=IkiWiki::possibly_foolish_untaint($_);
@@ -186,7 +170,7 @@ sub savestate () { #{{{
error($@) if $@;
my $newfile="$config{wikistatedir}/aggregate.new";
my $cleanup = sub { unlink($newfile) };
- open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
+ open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
foreach my $data (values %feeds, values %guids) {
if ($data->{remove}) {
if ($data->{name}) {
@@ -228,6 +212,12 @@ sub savestate () { #{{{
error("rename $newfile: $!", $cleanup);
} #}}}
+sub clearstate () { #{{{
+ %feeds=();
+ %guids=();
+ $state_loaded=0;
+} #}}}
+
sub expire () { #{{{
foreach my $feed (values %feeds) {
next unless $feed->{expireage} || $feed->{expirecount};
@@ -259,12 +249,7 @@ sub expire () { #{{{
}
} #}}}
-sub needsaggregate () { #{{{
- return values %feeds if $config{rebuild};
- return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
-} #}}}
-
-sub aggregate (@) { #{{{
+sub aggregate () { #{{{
eval q{use XML::Feed};
error($@) if $@;
eval q{use URI::Fetch};
@@ -272,12 +257,15 @@ sub aggregate (@) { #{{{
eval q{use HTML::Entities};
error($@) if $@;
- foreach my $feed (@_) {
+ foreach my $feed (values %feeds) {
+ next unless $config{rebuild} ||
+ time - $feed->{lastupdate} >= $feed->{updateinterval};
$feed->{lastupdate}=time;
$feed->{newposts}=0;
$feed->{message}=sprintf(gettext("processed ok at %s"),
displaytime($feed->{lastupdate}));
$feed->{error}=0;
+ $IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));