X-Git-Url: https://sipb.mit.edu/gitweb.cgi/ikiwiki.git/blobdiff_plain/7fbbcc1615510b622e2d2947e07d9acbb56fbb1c..00595b62be624b2b105a7b137d0502d235e55f87:/IkiWiki/Plugin/txt.pm diff --git a/IkiWiki/Plugin/txt.pm b/IkiWiki/Plugin/txt.pm index 22c9ac903..0d9a0b35b 100644 --- a/IkiWiki/Plugin/txt.pm +++ b/IkiWiki/Plugin/txt.pm @@ -8,13 +8,14 @@ package IkiWiki::Plugin::txt; use warnings; use strict; -use IkiWiki 2.00; +use IkiWiki 3.00; use HTML::Entities; my $findurl=0; sub import { - hook(type => "filter", id => "txt", call => \&filter); + hook(type => "getsetup", id => "txt", call => \&getsetup); + hook(type => "filter", id => "txt", call => \&filter); hook(type => "htmlize", id => "txt", call => \&htmlize); eval q{use URI::Find}; @@ -23,14 +24,30 @@ sub import { } } +sub getsetup () { + return + plugin => { + safe => 1, + rebuild => 1, # format plugin + section => "format", + }, +} + # We use filter to convert raw text to HTML # (htmlize is called after other plugins insert HTML) sub filter (@) { my %params = @_; my $content = $params{content}; - if ($pagesources{$params{page}} =~ /\.txt$/) { - encode_entities($content); + if (defined $pagesources{$params{page}} && + $pagesources{$params{page}} =~ /\.txt$/) { + if ($pagesources{$params{page}} eq 'robots.txt' && + $params{page} eq $params{destpage}) { + will_render($params{page}, 'robots.txt'); + writefile('robots.txt', $config{destdir}, $content); + } + + encode_entities($content, "<>&"); if ($findurl) { my $finder = URI::Find->new(sub { my ($uri, $orig_uri) = @_;