X-Git-Url: https://sipb.mit.edu/gitweb.cgi/ikiwiki.git/blobdiff_plain/bb93fccf0690344aa77f9538a508959a6de09847..86a83a2a69df86879c80759e0df6c517b49e1c57:/IkiWiki.pm diff --git a/IkiWiki.pm b/IkiWiki.pm index d93ff7374..a0a61ac64 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -14,19 +14,19 @@ use open qw{:utf8 :std}; use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase %pagestate %wikistate %renderedfiles %oldrenderedfiles %pagesources %destsources %depends %hooks %forcerebuild - $gettext_obj %loaded_plugins}; + %loaded_plugins}; use Exporter q{import}; our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match - bestlink htmllink readfile writefile pagetype srcfile pagename - displaytime will_render gettext urlto targetpage - add_underlay pagetitle titlepage linkpage newpagefile - inject + pagespec_match_list bestlink htmllink readfile writefile + pagetype srcfile pagename displaytime will_render gettext urlto + targetpage add_underlay pagetitle titlepage linkpage + newpagefile inject add_link %config %links %pagestate %wikistate %renderedfiles %pagesources %destsources); -our $VERSION = 2.00; # plugin interface version, next is ikiwiki version +our $VERSION = 3.00; # plugin interface version, next is ikiwiki version our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE -my $installdir=''; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE +our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE # Optimisation. use Memoize; @@ -100,7 +100,7 @@ sub getsetup () { type => "string", default => '', example => "/var/www/wiki/ikiwiki.cgi", - description => "cgi wrapper to generate", + description => "filename of cgi wrapper to generate", safe => 0, # file rebuild => 0, }, @@ -157,6 +157,13 @@ sub getsetup () { safe => 0, # path rebuild => 0, }, + underlaydirbase => { + type => "internal", + default => "$installdir/share/ikiwiki", + description => "parent directory containing additional underlays", + safe => 0, + rebuild => 0, + }, wrappers => { type => "internal", default => [], @@ -174,7 +181,7 @@ sub getsetup () { verbose => { type => "boolean", example => 1, - description => "display verbose messages when building?", + description => "display verbose messages?", safe => 1, rebuild => 0, }, @@ -194,7 +201,7 @@ sub getsetup () { }, prefix_directives => { type => "boolean", - default => 0, + default => 1, description => "use '!'-prefixed preprocessor directives?", safe => 0, # changing requires manual transition rebuild => 1, @@ -321,7 +328,7 @@ sub getsetup () { default => [qr/(^|\/)\.\.(\/|$)/, qr/^\./, qr/\/\./, qr/\.x?html?$/, qr/\.ikiwiki-new$/, qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//, - qr/(^|\/)_MTN\//, + qr/(^|\/)_MTN\//, qr/(^|\/)_darcs\//, qr/\.dpkg-tmp$/], description => "regexps of source files to ignore", safe => 0, @@ -452,7 +459,7 @@ sub checkconfig () { if (defined $config{locale}) { if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) { $ENV{LANG}=$config{locale}; - $gettext_obj=undef; + define_gettext(); } } @@ -533,7 +540,7 @@ sub loadplugins () { run_hooks(getopt => sub { shift->() }); if (grep /^-/, @ARGV) { - print STDERR "Unknown option: $_\n" + print STDERR "Unknown option (or missing parameter): $_\n" foreach grep /^-/, @ARGV; usage(); } @@ -627,27 +634,34 @@ sub dirname ($) { return $file; } -sub pagetype ($) { +sub isinternal ($) { my $page=shift; + return exists $pagesources{$page} && + $pagesources{$page} =~ /\._([^.]+)$/; +} + +sub pagetype ($) { + my $file=shift; - if ($page =~ /\.([^.]+)$/) { + if ($file =~ /\.([^.]+)$/) { return $1 if exists $hooks{htmlize}{$1}; } + my $base=basename($file); + if (exists $hooks{htmlize}{$base} && + $hooks{htmlize}{$base}{noextension}) { + return $base; + } return; } -sub isinternal ($) { - my $page=shift; - return exists $pagesources{$page} && - $pagesources{$page} =~ /\._([^.]+)$/; -} - sub pagename ($) { my $file=shift; my $type=pagetype($file); my $page=$file; - $page=~s/\Q.$type\E*$// if defined $type && !$hooks{htmlize}{$type}{keepextension}; + $page=~s/\Q.$type\E*$// + if defined $type && !$hooks{htmlize}{$type}{keepextension} + && !$hooks{htmlize}{$type}{noextension}; if ($config{indexpages} && $page=~/(.*)\/index$/) { $page=$1; } @@ -708,7 +722,7 @@ sub add_underlay ($) { my $dir=shift; if ($dir !~ /^\//) { - $dir="$config{underlaydir}/../$dir"; + $dir="$config{underlaydirbase}/$dir"; } if (! grep { $_ eq $dir } @{$config{underlaydirs}}) { @@ -949,16 +963,16 @@ sub formattime ($;$) { sub beautify_urlpath ($) { my $url=shift; - if ($config{usedirs}) { - $url =~ s!/index.$config{htmlext}$!/!; - } - # Ensure url is not an empty link, and if necessary, # add ./ to avoid colon confusion. - if ($url !~ /^\// && $url !~ /^\.\.\//) { + if ($url !~ /^\// && $url !~ /^\.\.?\//) { $url="./$url"; } + if ($config{usedirs}) { + $url =~ s!/index.$config{htmlext}$!/!; + } + return $url; } @@ -1239,7 +1253,7 @@ sub preprocess ($$$;$$) { | "[^"]+" # single-quoted value | - [^\s\]]+ # unquoted value + [^"\s\]]+ # unquoted value ) \s* # whitespace or end # of directive @@ -1262,7 +1276,7 @@ sub preprocess ($$$;$$) { | "[^"]+" # single-quoted value | - [^\s\]]+ # unquoted value + [^"\s\]]+ # unquoted value ) \s* # whitespace or end # of directive @@ -1293,6 +1307,70 @@ sub indexlink () { return "$config{wikiname}"; } +sub check_canedit ($$$;$) { + my $page=shift; + my $q=shift; + my $session=shift; + my $nonfatal=shift; + + my $canedit; + run_hooks(canedit => sub { + return if defined $canedit; + my $ret=shift->($page, $q, $session); + if (defined $ret) { + if ($ret eq "") { + $canedit=1; + } + elsif (ref $ret eq 'CODE') { + $ret->() unless $nonfatal; + $canedit=0; + } + elsif (defined $ret) { + error($ret) unless $nonfatal; + $canedit=0; + } + } + }); + return defined $canedit ? $canedit : 1; +} + +sub check_content (@) { + my %params=@_; + + return 1 if ! exists $hooks{checkcontent}; # optimisation + + if (exists $pagesources{$params{page}}) { + my @diff; + my %old=map { $_ => 1 } + split("\n", readfile(srcfile($pagesources{$params{page}}))); + foreach my $line (split("\n", $params{content})) { + push @diff, $line if ! exists $old{$_}; + } + $params{content}=join("\n", @diff); + } + + my $ok; + run_hooks(checkcontent => sub { + return if defined $ok; + my $ret=shift->(%params); + if (defined $ret) { + if ($ret eq "") { + $ok=1; + } + elsif (ref $ret eq 'CODE') { + $ret->() unless $params{nonfatal}; + $ok=0; + } + elsif (defined $ret) { + error($ret) unless $params{nonfatal}; + $ok=0; + } + } + + }); + return defined $ok ? $ok : 1; +} + my $wikilock; sub lockwiki () { @@ -1536,15 +1614,19 @@ sub run_hooks ($$) { my $sub=shift; if (exists $hooks{$type}) { - my @deferred; + my (@first, @middle, @last); foreach my $id (keys %{$hooks{$type}}) { - if ($hooks{$type}{$id}{last}) { - push @deferred, $id; - next; + if ($hooks{$type}{$id}{first}) { + push @first, $id; + } + elsif ($hooks{$type}{$id}{last}) { + push @last, $id; + } + else { + push @middle, $id; } - $sub->($hooks{$type}{$id}{call}); } - foreach my $id (@deferred) { + foreach my $id (@first, @middle, @last) { $sub->($hooks{$type}{$id}{call}); } } @@ -1596,43 +1678,6 @@ sub rcs_receive () { $hooks{rcs}{rcs_receive}{call}->(); } -sub globlist_to_pagespec ($) { - my @globlist=split(' ', shift); - - my (@spec, @skip); - foreach my $glob (@globlist) { - if ($glob=~/^!(.*)/) { - push @skip, $glob; - } - else { - push @spec, $glob; - } - } - - my $spec=join(' or ', @spec); - if (@skip) { - my $skip=join(' and ', @skip); - if (length $spec) { - $spec="$skip and ($spec)"; - } - else { - $spec=$skip; - } - } - return $spec; -} - -sub is_globlist ($) { - my $s=shift; - return ( $s =~ /[^\s]+\s+([^\s]+)/ && $1 ne "and" && $1 ne "or" ); -} - -sub safequote ($) { - my $s=shift; - $s=~s/[{}]//g; - return "q{$s}"; -} - sub add_depends ($$) { my $page=shift; my $pagespec=shift; @@ -1659,33 +1704,41 @@ sub file_pruned ($$) { return $file =~ m/$regexp/ && $file ne $base; } -sub gettext { - # Only use gettext in the rare cases it's needed. +sub define_gettext () { + # If translation is needed, redefine the gettext function to do it. + # Otherwise, it becomes a quick no-op. + no warnings 'redefine'; if ((exists $ENV{LANG} && length $ENV{LANG}) || (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) || (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) { - if (! $gettext_obj) { - $gettext_obj=eval q{ + *gettext=sub { + my $gettext_obj=eval q{ use Locale::gettext q{textdomain}; Locale::gettext->domain('ikiwiki') }; - if ($@) { - print STDERR "$@"; - $gettext_obj=undef; + + if ($gettext_obj) { + $gettext_obj->get(shift); + } + else { return shift; } - } - return $gettext_obj->get(shift); + }; } else { - return shift; + *gettext=sub { return shift }; } } +sub gettext { + define_gettext(); + gettext(@_); +} + sub yesno ($) { my $val=shift; - return (defined $val && lc($val) eq gettext("yes")); + return (defined $val && (lc($val) eq gettext("yes") || lc($val) eq "yes" || $val eq "1")); } sub inject { @@ -1713,33 +1766,28 @@ sub inject { use warnings; } +sub add_link ($$) { + my $page=shift; + my $link=shift; + + push @{$links{$page}}, $link + unless grep { $_ eq $link } @{$links{$page}}; +} + sub pagespec_merge ($$) { my $a=shift; my $b=shift; return $a if $a eq $b; - - # Support for old-style GlobLists. - if (is_globlist($a)) { - $a=globlist_to_pagespec($a); - } - if (is_globlist($b)) { - $b=globlist_to_pagespec($b); - } - return "($a) or ($b)"; } sub pagespec_translate ($) { my $spec=shift; - # Support for old-style GlobLists. - if (is_globlist($spec)) { - $spec=globlist_to_pagespec($spec); - } - # Convert spec to perl code. my $code=""; + my @data; while ($spec=~m{ \s* # ignore whitespace ( # 1: match a single word @@ -1767,19 +1815,22 @@ sub pagespec_translate ($) { } elsif ($word =~ /^(\w+)\((.*)\)$/) { if (exists $IkiWiki::PageSpec::{"match_$1"}) { - $code.="IkiWiki::PageSpec::match_$1(\$page, ".safequote($2).", \@_)"; + push @data, $2; + $code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)"; } else { - $code.=' 0'; + push @data, qq{unknown function in pagespec "$word"}; + $code.="IkiWiki::ErrorReason->new(\$data[$#data])"; } } else { - $code.=" IkiWiki::PageSpec::match_glob(\$page, ".safequote($word).", \@_)"; + push @data, $word; + $code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)"; } } if (! length $code) { - $code=0; + $code="IkiWiki::FailReason->new('empty pagespec')"; } no warnings; @@ -1797,10 +1848,35 @@ sub pagespec_match ($$;@) { } my $sub=pagespec_translate($spec); - return IkiWiki::FailReason->new("syntax error in pagespec \"$spec\"") if $@; + return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"") + if $@ || ! defined $sub; return $sub->($page, @params); } +sub pagespec_match_list ($$;@) { + my $pages=shift; + my $spec=shift; + my @params=@_; + + my $sub=pagespec_translate($spec); + error "syntax error in pagespec \"$spec\"" + if $@ || ! defined $sub; + + my @ret; + my $r; + foreach my $page (@$pages) { + $r=$sub->($page, @params); + push @ret, $page if $r; + } + + if (! @ret && defined $r && $r->isa("IkiWiki::ErrorReason")) { + error(sprintf(gettext("cannot match pages: %s"), $r)); + } + else { + return @ret; + } +} + sub pagespec_valid ($) { my $spec=shift; @@ -1830,6 +1906,10 @@ sub new { return bless \$value, $class; } +package IkiWiki::ErrorReason; + +our @ISA = 'IkiWiki::FailReason'; + package IkiWiki::SuccessReason; use overload ( @@ -1847,19 +1927,25 @@ sub new { package IkiWiki::PageSpec; +sub derel ($$) { + my $path=shift; + my $from=shift; + + if ($path =~ m!^\./!) { + $from=~s#/?[^/]+$## if defined $from; + $path=~s#^\./##; + $path="$from/$path" if length $from; + } + + return $path; +} + sub match_glob ($$;@) { my $page=shift; my $glob=shift; my %params=@_; - my $from=exists $params{location} ? $params{location} : ''; - - # relative matching - if ($glob =~ m!^\./!) { - $from=~s#/?[^/]+$##; - $glob=~s#^\./##; - $glob="$from/$glob" if length $from; - } + $glob=derel($glob, $params{location}); my $regexp=IkiWiki::glob2re($glob); if ($page=~/^$regexp$/i) { @@ -1884,15 +1970,9 @@ sub match_link ($$;@) { my $link=lc(shift); my %params=@_; + $link=derel($link, $params{location}); my $from=exists $params{location} ? $params{location} : ''; - # relative matching - if ($link =~ m!^\.! && defined $from) { - $from=~s#/?[^/]+$##; - $link=~s#^\./##; - $link="$from/$link" if length $from; - } - my $links = $IkiWiki::links{$page}; return IkiWiki::FailReason->new("$page has no links") unless $links && @{$links}; my $bestlink = IkiWiki::bestlink($from, $link); @@ -1920,6 +2000,9 @@ sub match_backlink ($$;@) { sub match_created_before ($$;@) { my $page=shift; my $testpage=shift; + my %params=@_; + + $testpage=derel($testpage, $params{location}); if (exists $IkiWiki::pagectime{$testpage}) { if ($IkiWiki::pagectime{$page} < $IkiWiki::pagectime{$testpage}) { @@ -1937,6 +2020,9 @@ sub match_created_before ($$;@) { sub match_created_after ($$;@) { my $page=shift; my $testpage=shift; + my %params=@_; + + $testpage=derel($testpage, $params{location}); if (exists $IkiWiki::pagectime{$testpage}) { if ($IkiWiki::pagectime{$page} > $IkiWiki::pagectime{$testpage}) { @@ -1984,7 +2070,7 @@ sub match_user ($$;@) { my %params=@_; if (! exists $params{user}) { - return IkiWiki::FailReason->new("no user specified"); + return IkiWiki::ErrorReason->new("no user specified"); } if (defined $params{user} && lc $params{user} eq lc $user) { @@ -2004,7 +2090,7 @@ sub match_admin ($$;@) { my %params=@_; if (! exists $params{user}) { - return IkiWiki::FailReason->new("no user specified"); + return IkiWiki::ErrorReason->new("no user specified"); } if (defined $params{user} && IkiWiki::is_admin($params{user})) { @@ -2024,7 +2110,7 @@ sub match_ip ($$;@) { my %params=@_; if (! exists $params{ip}) { - return IkiWiki::FailReason->new("no IP specified"); + return IkiWiki::ErrorReason->new("no IP specified"); } if (defined $params{ip} && lc $params{ip} eq lc $ip) {