aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoey Hess <joey@gnu.kitenet.net>2010-04-05 17:02:10 -0400
committerJoey Hess <joey@gnu.kitenet.net>2010-04-05 17:02:10 -0400
commita01028ae8156679a108a40c62aa6b4cc3e2b3ae7 (patch)
treeb31782b60b4ba8e5324b4b8184a83dfab765c754
parent09d4e9d6bb7a049b0721ffbf7dc6e64bee4404cc (diff)
downloadikiwiki-a01028ae8156679a108a40c62aa6b4cc3e2b3ae7.tar
ikiwiki-a01028ae8156679a108a40c62aa6b4cc3e2b3ae7.tar.gz
txt: Add a special case for robots.txt.
-rw-r--r--IkiWiki/Plugin/txt.pm9
-rw-r--r--debian/changelog1
-rw-r--r--doc/plugins/txt.mdwn5
3 files changed, 14 insertions, 1 deletions
diff --git a/IkiWiki/Plugin/txt.pm b/IkiWiki/Plugin/txt.pm
index 1ed9f0856..0d9a0b35b 100644
--- a/IkiWiki/Plugin/txt.pm
+++ b/IkiWiki/Plugin/txt.pm
@@ -39,7 +39,14 @@ sub filter (@) {
my %params = @_;
my $content = $params{content};
- if (defined $pagesources{$params{page}} && $pagesources{$params{page}} =~ /\.txt$/) {
+ if (defined $pagesources{$params{page}} &&
+ $pagesources{$params{page}} =~ /\.txt$/) {
+ if ($pagesources{$params{page}} eq 'robots.txt' &&
+ $params{page} eq $params{destpage}) {
+ will_render($params{page}, 'robots.txt');
+ writefile('robots.txt', $config{destdir}, $content);
+ }
+
encode_entities($content, "<>&");
if ($findurl) {
my $finder = URI::Find->new(sub {
diff --git a/debian/changelog b/debian/changelog
index be89d0918..77d17f566 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -2,6 +2,7 @@ ikiwiki (3.20100404) UNRELEASED; urgency=low
* bzr: Fix bzr log parsing to work with bzr 2.0. (liw)
* comments: Fix missing entity encoding in title.
+ * txt: Add a special case for robots.txt.
-- Joey Hess <joeyh@debian.org> Sun, 04 Apr 2010 12:17:11 -0400
diff --git a/doc/plugins/txt.mdwn b/doc/plugins/txt.mdwn
index 420898d09..a3087c9e0 100644
--- a/doc/plugins/txt.mdwn
+++ b/doc/plugins/txt.mdwn
@@ -12,3 +12,8 @@ The only exceptions are that [[WikiLinks|ikiwiki/WikiLink]] and
[[directives|ikiwiki/directive]] are still expanded by
ikiwiki, and that, if the [[!cpan URI::Find]] perl module is installed, URLs
in the txt file are converted to hyperlinks.
+
+----
+
+As a special case, a file `robots.txt` will be copied intact into the
+`destdir`, as well as creating a wiki page named "robots".