X-Git-Url: https://sipb.mit.edu/gitweb.cgi/ikiwiki.git/blobdiff_plain/23a4ee6d15dbd9b8e8c6588a829dd30a26a8de32..2ba8bd386142e7f3c0fb03c86eb90eb3885aabd2:/IkiWiki.pm diff --git a/IkiWiki.pm b/IkiWiki.pm index 061a1c6db..d667e7e10 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -13,8 +13,8 @@ use open qw{:utf8 :std}; use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase %pagestate %wikistate %renderedfiles %oldrenderedfiles - %pagesources %destsources %depends %hooks %forcerebuild - $gettext_obj %loaded_plugins}; + %pagesources %destsources %depends %depends_simple %hooks + %forcerebuild %loaded_plugins}; use Exporter q{import}; our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match @@ -32,7 +32,7 @@ our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE use Memoize; memoize("abs2rel"); memoize("pagespec_translate"); -memoize("file_pruned"); +memoize("template_file"); sub getsetup () { wikiname => { @@ -149,6 +149,13 @@ sub getsetup () { safe => 0, # path rebuild => 1, }, + templatedirs => { + type => "internal", + default => [], + description => "additional directories containing template files", + safe => 0, + rebuild => 0, + }, underlaydir => { type => "string", default => "$installdir/share/ikiwiki/basewiki", @@ -220,6 +227,13 @@ sub getsetup () { safe => 1, rebuild => 1, }, + discussionpage => { + type => "string", + default => gettext("Discussion"), + description => "name of Discussion pages", + safe => 1, + rebuild => 1, + }, sslcookie => { type => "boolean", default => 0, @@ -329,7 +343,7 @@ sub getsetup () { qr/\.x?html?$/, qr/\.ikiwiki-new$/, qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//, qr/(^|\/)_MTN\//, qr/(^|\/)_darcs\//, - qr/\.dpkg-tmp$/], + qr/(^|\/)CVS\//, qr/\.dpkg-tmp$/], description => "regexps of source files to ignore", safe => 0, rebuild => 1, @@ -349,7 +363,7 @@ sub getsetup () { }, web_commit_regexp => { type => "internal", - default => qr/^web commit (by (.*?(?=: |$))|from (\d+\.\d+\.\d+\.\d+)):?(.*)/, + default => qr/^web commit (by (.*?(?=: |$))|from ([0-9a-fA-F:.]+[0-9a-fA-F])):?(.*)/, description => "regexp to parse web commits from logs", safe => 0, rebuild => 0, @@ -459,7 +473,7 @@ sub checkconfig () { if (defined $config{locale}) { if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) { $ENV{LANG}=$config{locale}; - $gettext_obj=undef; + define_gettext(); } } @@ -529,12 +543,12 @@ sub loadplugins () { } if ($config{rcs}) { - if (exists $IkiWiki::hooks{rcs}) { + if (exists $hooks{rcs}) { error(gettext("cannot use multiple rcs plugins")); } loadplugin($config{rcs}); } - if (! exists $IkiWiki::hooks{rcs}) { + if (! exists $hooks{rcs}) { loadplugin("norcs"); } @@ -654,9 +668,15 @@ sub pagetype ($) { return; } +my %pagename_cache; + sub pagename ($) { my $file=shift; + if (exists $pagename_cache{$file}) { + return $pagename_cache{$file}; + } + my $type=pagetype($file); my $page=$file; $page=~s/\Q.$type\E*$// @@ -665,6 +685,8 @@ sub pagename ($) { if ($config{indexpages} && $page=~/(.*)\/index$/) { $page=$1; } + + $pagename_cache{$file} = $page; return $page; } @@ -1063,6 +1085,41 @@ sub htmllink ($$$;@) { return "$linktext"; } +sub openiduser ($) { + my $user=shift; + + if ($user =~ m!^https?://! && + eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) { + my $display; + + if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) { + # this works in at least 2.x + $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user); + } + else { + # this only works in 1.x + my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user); + $display=$oid->display; + } + + # Convert "user.somehost.com" to "user [somehost.com]" + # (also "user.somehost.co.uk") + if ($display !~ /\[/) { + $display=~s/^([-a-zA-Z0-9]+?)\.([-.a-zA-Z0-9]+\.[a-z]+)$/$1 [$2]/; + } + # Convert "http://somehost.com/user" to "user [somehost.com]". + # (also "https://somehost.com/user/") + if ($display !~ /\[/) { + $display=~s/^https?:\/\/(.+)\/([^\/]+)\/?$/$2 [$1]/; + } + $display=~s!^https?://!!; # make sure this is removed + eval q{use CGI 'escapeHTML'}; + error($@) if $@; + return escapeHTML($display); + } + return; +} + sub userlink ($) { my $user=shift; @@ -1213,9 +1270,10 @@ sub preprocess ($$$;$$) { ); }; if ($@) { - chomp $@; + my $error=$@; + chomp $error; $ret="[[!$command ". - gettext("Error").": $@"."]]"; + gettext("Error").": $error"."]]"; } } else { @@ -1253,7 +1311,7 @@ sub preprocess ($$$;$$) { | "[^"]+" # single-quoted value | - [^\s\]]+ # unquoted value + [^"\s\]]+ # unquoted value ) \s* # whitespace or end # of directive @@ -1276,7 +1334,7 @@ sub preprocess ($$$;$$) { | "[^"]+" # single-quoted value | - [^\s\]]+ # unquoted value + [^"\s\]]+ # unquoted value ) \s* # whitespace or end # of directive @@ -1346,7 +1404,7 @@ sub check_content (@) { foreach my $line (split("\n", $params{content})) { push @diff, $line if ! exists $old{$_}; } - $params{content}=join("\n", @diff); + $params{diff}=join("\n", @diff); } my $ok; @@ -1424,7 +1482,8 @@ sub loadindex () { %oldrenderedfiles=%pagectime=(); if (! $config{rebuild}) { %pagesources=%pagemtime=%oldlinks=%links=%depends= - %destsources=%renderedfiles=%pagecase=%pagestate=(); + %destsources=%renderedfiles=%pagecase=%pagestate= + %depends_simple=(); } my $in; if (! open ($in, "<", "$config{wikistatedir}/indexdb")) { @@ -1464,8 +1523,18 @@ sub loadindex () { $links{$page}=$d->{links}; $oldlinks{$page}=[@{$d->{links}}]; } - if (exists $d->{depends}) { - $depends{$page}=$d->{depends}; + if (exists $d->{depends_simple}) { + $depends_simple{$page}={ + map { $_ => 1 } @{$d->{depends_simple}} + }; + } + if (exists $d->{dependslist}) { + $depends{$page}={ + map { $_ => 1 } @{$d->{dependslist}} + }; + } + elsif (exists $d->{depends}) { + $depends{$page}={$d->{depends} => 1}; } if (exists $d->{state}) { $pagestate{$page}=$d->{state}; @@ -1511,7 +1580,11 @@ sub saveindex () { }; if (exists $depends{$page}) { - $index{page}{$src}{depends} = $depends{$page}; + $index{page}{$src}{dependslist} = [ keys %{$depends{$page}} ]; + } + + if (exists $depends_simple{$page}) { + $index{page}{$src}{depends_simple} = [ keys %{$depends_simple{$page}} ]; } if (exists $pagestate{$page}) { @@ -1543,7 +1616,8 @@ sub saveindex () { sub template_file ($) { my $template=shift; - foreach my $dir ($config{templatedir}, "$installdir/share/ikiwiki/templates") { + foreach my $dir ($config{templatedir}, @{$config{templatedirs}}, + "$installdir/share/ikiwiki/templates") { return "$dir/$template" if -e "$dir/$template"; } return; @@ -1681,52 +1755,65 @@ sub rcs_receive () { sub add_depends ($$) { my $page=shift; my $pagespec=shift; - - return unless pagespec_valid($pagespec); - if (! exists $depends{$page}) { - $depends{$page}=$pagespec; - } - else { - $depends{$page}=pagespec_merge($depends{$page}, $pagespec); + if ($pagespec =~ /$config{wiki_file_regexp}/ && + $pagespec !~ /[\s*?()!]/) { + # a simple dependency, which can be matched by string eq + $depends_simple{$page}{lc $pagespec} = 1; + return 1; } + return unless pagespec_valid($pagespec); + + $depends{$page}{$pagespec} = 1; return 1; } -sub file_pruned ($$) { - require File::Spec; - my $file=File::Spec->canonpath(shift); - my $base=File::Spec->canonpath(shift); - $file =~ s#^\Q$base\E/+##; +sub file_pruned ($;$) { + my $file=shift; + if (@_) { + require File::Spec; + $file=File::Spec->canonpath($file); + my $base=File::Spec->canonpath(shift); + return if $file eq $base; + $file =~ s#^\Q$base\E/+##; + } my $regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')'; - return $file =~ m/$regexp/ && $file ne $base; + return $file =~ m/$regexp/; } -sub gettext { - # Only use gettext in the rare cases it's needed. +sub define_gettext () { + # If translation is needed, redefine the gettext function to do it. + # Otherwise, it becomes a quick no-op. + no warnings 'redefine'; if ((exists $ENV{LANG} && length $ENV{LANG}) || (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) || (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) { - if (! $gettext_obj) { - $gettext_obj=eval q{ + *gettext=sub { + my $gettext_obj=eval q{ use Locale::gettext q{textdomain}; Locale::gettext->domain('ikiwiki') }; - if ($@) { - print STDERR "$@"; - $gettext_obj=undef; + + if ($gettext_obj) { + $gettext_obj->get(shift); + } + else { return shift; } - } - return $gettext_obj->get(shift); + }; } else { - return shift; + *gettext=sub { return shift }; } } +sub gettext { + define_gettext(); + gettext(@_); +} + sub yesno ($) { my $val=shift; @@ -1766,14 +1853,6 @@ sub add_link ($$) { unless grep { $_ eq $link } @{$links{$page}}; } -sub pagespec_merge ($$) { - my $a=shift; - my $b=shift; - - return $a if $a eq $b; - return "($a) or ($b)"; -} - sub pagespec_translate ($) { my $spec=shift; @@ -1794,7 +1873,7 @@ sub pagespec_translate ($) { [^\s()]+ # any other text ) \s* # ignore whitespace - }igx) { + }gx) { my $word=$1; if (lc $word eq 'and') { $code.=' &&'; @@ -1976,10 +2055,10 @@ sub match_link ($$;@) { else { return IkiWiki::SuccessReason->new("$page links to page $p matching $link") if match_glob($p, $link, %params); - $p=~s/^\///; + my ($p_rel)=$p=~/^\/?(.*)/; $link=~s/^\///; - return IkiWiki::SuccessReason->new("$page links to page $p matching $link") - if match_glob($p, $link, %params); + return IkiWiki::SuccessReason->new("$page links to page $p_rel matching $link") + if match_glob($p_rel, $link, %params); } } return IkiWiki::FailReason->new("$page does not link to $link"); @@ -2005,7 +2084,7 @@ sub match_created_before ($$;@) { } } else { - return IkiWiki::FailReason->new("$testpage has no ctime"); + return IkiWiki::ErrorReason->new("$testpage does not exist"); } } @@ -2025,7 +2104,7 @@ sub match_created_after ($$;@) { } } else { - return IkiWiki::FailReason->new("$testpage has no ctime"); + return IkiWiki::ErrorReason->new("$testpage does not exist"); } }