X-Git-Url: https://sipb.mit.edu/gitweb.cgi/ikiwiki.git/blobdiff_plain/5e7b2dea84a35163b599b88efc02cd7ef3e0ad46..0bfc364a7df124509855b8ed0b1b33ab5bc9ebbb:/IkiWiki.pm diff --git a/IkiWiki.pm b/IkiWiki.pm index fd7e23524..74d452c50 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -7,24 +7,24 @@ use strict; use Encode; use HTML::Entities; use URI::Escape q{uri_escape_utf8}; -use POSIX; +use POSIX (); use Storable; use open qw{:utf8 :std}; use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase %pagestate %wikistate %renderedfiles %oldrenderedfiles %pagesources %destsources %depends %depends_simple %hooks - %forcerebuild %loaded_plugins}; + %forcerebuild %loaded_plugins %typedlinks %oldtypedlinks}; use Exporter q{import}; our @EXPORT = qw(hook debug error template htmlpage deptype add_depends pagespec_match pagespec_match_list bestlink htmllink readfile writefile pagetype srcfile pagename - displaytime will_render gettext urlto targetpage + displaytime will_render gettext ngettext urlto targetpage add_underlay pagetitle titlepage linkpage newpagefile inject add_link %config %links %pagestate %wikistate %renderedfiles - %pagesources %destsources); + %pagesources %destsources %typedlinks); our $VERSION = 3.00; # plugin interface version, next is ikiwiki version our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE @@ -37,6 +37,7 @@ our $DEPEND_LINKS=4; # Optimisation. use Memoize; memoize("abs2rel"); +memoize("sortspec_translate"); memoize("pagespec_translate"); memoize("template_file"); @@ -334,11 +335,20 @@ sub getsetup () { safe => 0, # paranoia rebuild => 0, }, + include => { + type => "string", + default => undef, + example => '^\.htaccess$', + description => "regexp of normally excluded files to include", + advanced => 1, + safe => 0, # regexp + rebuild => 1, + }, exclude => { type => "string", default => undef, - example => '\.wav$', - description => "regexp of source files to ignore", + example => '^(*\.private|Makefile)$', + description => "regexp of files that should be skipped", advanced => 1, safe => 0, # regexp rebuild => 1, @@ -409,6 +419,13 @@ sub getsetup () { safe => 0, rebuild => 0, }, + clean => { + type => "internal", + default => 0, + description => "running in clean mode", + safe => 0, + rebuild => 0, + }, refresh => { type => "internal", default => 0, @@ -451,6 +468,13 @@ sub getsetup () { safe => 0, rebuild => 0, }, + setuptype => { + type => "internal", + default => "Standard", + description => "perl class to use to dump setup file", + safe => 0, + rebuild => 0, + }, allow_symlinks_before_srcdir => { type => "boolean", default => 0, @@ -881,7 +905,7 @@ sub bestlink ($$) { $l.="/" if length $l; $l.=$link; - if (exists $links{$l}) { + if (exists $pagesources{$l}) { return $l; } elsif (exists $pagecase{lc $l}) { @@ -891,7 +915,7 @@ sub bestlink ($$) { if (length $config{userdir}) { my $l = "$config{userdir}/".lc($link); - if (exists $links{$l}) { + if (exists $pagesources{$l}) { return $l; } elsif (exists $pagecase{lc $l}) { @@ -941,7 +965,12 @@ sub linkpage ($) { sub cgiurl (@) { my %params=@_; - return $config{cgiurl}."?". + my $cgiurl=$config{cgiurl}; + if (exists $params{cgiurl}) { + $cgiurl=$params{cgiurl}; + delete $params{cgiurl}; + } + return $cgiurl."?". join("&", map $_."=".uri_escape_utf8($params{$_}), keys %params); } @@ -1081,16 +1110,20 @@ sub htmllink ($$$;@) { } my @attrs; - if (defined $opts{rel}) { - push @attrs, ' rel="'.$opts{rel}.'"'; - } - if (defined $opts{class}) { - push @attrs, ' class="'.$opts{class}.'"'; + foreach my $attr (qw{rel class title}) { + if (defined $opts{$attr}) { + push @attrs, " $attr=\"$opts{$attr}\""; + } } return "$linktext"; } +sub userpage ($) { + my $user=shift; + return length $config{userdir} ? "$config{userdir}/$user" : $user; +} + sub openiduser ($) { my $user=shift; @@ -1099,11 +1132,10 @@ sub openiduser ($) { my $display; if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) { - # this works in at least 2.x $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user); } else { - # this only works in 1.x + # backcompat with old version my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user); $display=$oid->display; } @@ -1116,7 +1148,7 @@ sub openiduser ($) { # Convert "http://somehost.com/user" to "user [somehost.com]". # (also "https://somehost.com/user/") if ($display !~ /\[/) { - $display=~s/^https?:\/\/(.+)\/([^\/]+)\/?$/$2 [$1]/; + $display=~s/^https?:\/\/(.+)\/([^\/#?]+)\/?(?:[#?].*)?$/$2 [$1]/; } $display=~s!^https?://!!; # make sure this is removed eval q{use CGI 'escapeHTML'}; @@ -1126,23 +1158,6 @@ sub openiduser ($) { return; } -sub userlink ($) { - my $user=shift; - - my $oiduser=eval { openiduser($user) }; - if (defined $oiduser) { - return "$oiduser"; - } - else { - eval q{use CGI 'escapeHTML'}; - error($@) if $@; - - return htmllink("", "", escapeHTML( - length $config{userdir} ? $config{userdir}."/".$user : $user - ), noimageinline => 1); - } -} - sub htmlize ($$$$) { my $page=shift; my $destpage=shift; @@ -1150,7 +1165,7 @@ sub htmlize ($$$$) { my $content=shift; my $oneline = $content !~ /\n/; - + if (exists $hooks{htmlize}{$type}) { $content=$hooks{htmlize}{$type}{call}->( page => $page, @@ -1171,10 +1186,9 @@ sub htmlize ($$$$) { if ($oneline) { # hack to get rid of enclosing junk added by markdown - # and other htmlizers + # and other htmlizers/sanitizers $content=~s/^

//i; - $content=~s/<\/p>$//i; - chomp $content; + $content=~s/<\/p>\n*$//i; } return $content; @@ -1229,7 +1243,7 @@ sub preprocess ($$$;$$) { (?: """(.*?)""" # 2: triple-quoted value | - "([^"]+)" # 3: single-quoted value + "([^"]*?)" # 3: single-quoted value | (\S+) # 4: unquoted value ) @@ -1315,7 +1329,7 @@ sub preprocess ($$$;$$) { (?: """.*?""" # triple-quoted value | - "[^"]+" # single-quoted value + "[^"]*?" # single-quoted value | [^"\s\]]+ # unquoted value ) @@ -1338,7 +1352,7 @@ sub preprocess ($$$;$$) { (?: """.*?""" # triple-quoted value | - "[^"]+" # single-quoted value + "[^"]*?" # single-quoted value | [^"\s\]]+ # unquoted value ) @@ -1408,7 +1422,7 @@ sub check_content (@) { my %old=map { $_ => 1 } split("\n", readfile(srcfile($pagesources{$params{page}}))); foreach my $line (split("\n", $params{content})) { - push @diff, $line if ! exists $old{$_}; + push @diff, $line if ! exists $old{$line}; } $params{diff}=join("\n", @diff); } @@ -1489,7 +1503,7 @@ sub loadindex () { if (! $config{rebuild}) { %pagesources=%pagemtime=%oldlinks=%links=%depends= %destsources=%renderedfiles=%pagecase=%pagestate= - %depends_simple=(); + %depends_simple=%typedlinks=%oldtypedlinks=(); } my $in; if (! open ($in, "<", "$config{wikistatedir}/indexdb")) { @@ -1555,6 +1569,14 @@ sub loadindex () { if (exists $d->{state}) { $pagestate{$page}=$d->{state}; } + if (exists $d->{typedlinks}) { + $typedlinks{$page}=$d->{typedlinks}; + + while (my ($type, $links) = each %{$typedlinks{$page}}) { + next unless %$links; + $oldtypedlinks{$page}{$type} = {%$links}; + } + } } $oldrenderedfiles{$page}=[@{$d->{dest}}]; } @@ -1603,6 +1625,10 @@ sub saveindex () { $index{page}{$src}{depends_simple} = $depends_simple{$page}; } + if (exists $typedlinks{$page} && %{$typedlinks{$page}}) { + $index{page}{$src}{typedlinks} = $typedlinks{$page}; + } + if (exists $pagestate{$page}) { foreach my $id (@hookids) { foreach my $key (keys %{$pagestate{$page}{$id}}) { @@ -1780,19 +1806,17 @@ sub add_depends ($$;$) { return 1; } - # Analyse the pagespec, and match it against all pages - # to get a list of influences, and add explicit dependencies - # for those. - #my $sub=pagespec_translate($pagespec); - #return if $@; - #foreach my $p (keys %pagesources) { - # my $r=$sub->($p, location => $page ); - # my %i=$r->influences; - # foreach my $i (keys %i) { - # $depends_simple{$page}{lc $i} |= $i{$i}; - # } - #} - print STDERR "warning: use of add_depends; influences not tracked\n"; + # Add explicit dependencies for influences. + my $sub=pagespec_translate($pagespec); + return unless defined $sub; + foreach my $p (keys %pagesources) { + my $r=$sub->($p, location => $page); + my $i=$r->influences; + foreach my $k (keys %$i) { + $depends_simple{$page}{lc $k} |= $i->{$k}; + } + last if $r->influences_static; + } $depends{$page}{$pagespec} |= $deptype; return 1; @@ -1814,6 +1838,7 @@ sub deptype (@) { return $deptype; } +my $file_prune_regexp; sub file_pruned ($;$) { my $file=shift; if (@_) { @@ -1824,34 +1849,52 @@ sub file_pruned ($;$) { $file =~ s#^\Q$base\E/+##; } - my $regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')'; - return $file =~ m/$regexp/; + if (defined $config{include} && length $config{include}) { + return 0 if $file =~ m/$config{include}/; + } + + if (! defined $file_prune_regexp) { + $file_prune_regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')'; + $file_prune_regexp=qr/$file_prune_regexp/; + } + return $file =~ m/$file_prune_regexp/; } sub define_gettext () { # If translation is needed, redefine the gettext function to do it. # Otherwise, it becomes a quick no-op. - no warnings 'redefine'; + my $gettext_obj; + my $getobj; if ((exists $ENV{LANG} && length $ENV{LANG}) || (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) || (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) { - *gettext=sub { - my $gettext_obj=eval q{ + $getobj=sub { + $gettext_obj=eval q{ use Locale::gettext q{textdomain}; Locale::gettext->domain('ikiwiki') }; - - if ($gettext_obj) { - $gettext_obj->get(shift); - } - else { - return shift; - } }; } - else { - *gettext=sub { return shift }; - } + + no warnings 'redefine'; + *gettext=sub { + $getobj->() if $getobj; + if ($gettext_obj) { + $gettext_obj->get(shift); + } + else { + return shift; + } + }; + *ngettext=sub { + $getobj->() if $getobj; + if ($gettext_obj) { + $gettext_obj->nget(@_); + } + else { + return ($_[2] == 1 ? $_[0] : $_[1]) + } + }; } sub gettext { @@ -1859,6 +1902,11 @@ sub gettext { gettext(@_); } +sub ngettext { + define_gettext(); + ngettext(@_); +} + sub yesno ($) { my $val=shift; @@ -1890,12 +1938,82 @@ sub inject { use warnings; } -sub add_link ($$) { +sub add_link ($$;$) { my $page=shift; my $link=shift; + my $type=shift; push @{$links{$page}}, $link unless grep { $_ eq $link } @{$links{$page}}; + + if (defined $type) { + $typedlinks{$page}{$type}{$link} = 1; + } +} + +sub sortspec_translate ($$) { + my $spec = shift; + my $reverse = shift; + + my $code = ""; + my @data; + while ($spec =~ m{ + \s* + (-?) # group 1: perhaps negated + \s* + ( # group 2: a word + \w+\([^\)]*\) # command(params) + | + [^\s]+ # or anything else + ) + \s* + }gx) { + my $negated = $1; + my $word = $2; + my $params = undef; + + if ($word =~ m/^(\w+)\((.*)\)$/) { + # command with parameters + $params = $2; + $word = $1; + } + elsif ($word !~ m/^\w+$/) { + error(sprintf(gettext("invalid sort type %s"), $word)); + } + + if (length $code) { + $code .= " || "; + } + + if ($negated) { + $code .= "-"; + } + + if (exists $IkiWiki::SortSpec::{"cmp_$word"}) { + if (defined $params) { + push @data, $params; + $code .= "IkiWiki::SortSpec::cmp_$word(\$data[$#data])"; + } + else { + $code .= "IkiWiki::SortSpec::cmp_$word(undef)"; + } + } + else { + error(sprintf(gettext("unknown sort type %s"), $word)); + } + } + + if (! length $code) { + # undefined sorting method... sort arbitrarily + return sub { 0 }; + } + + if ($reverse) { + $code="-($code)"; + } + + no warnings; + return eval 'sub { '.$code.' }'; } sub pagespec_translate ($) { @@ -1965,7 +2083,7 @@ sub pagespec_match ($$;@) { my $sub=pagespec_translate($spec); return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"") - if $@ || ! defined $sub; + if ! defined $sub; return $sub->($page, @params); } @@ -1983,89 +2101,71 @@ sub pagespec_match_list ($$;@) { my $sub=pagespec_translate($pagespec); error "syntax error in pagespec \"$pagespec\"" - if $@ || ! defined $sub; + if ! defined $sub; my @candidates; - if (exists $params{limit}) { - @candidates=grep { $params{limit}->($_) } keys %pagesources; + if (exists $params{list}) { + @candidates=exists $params{filter} + ? grep { ! $params{filter}->($_) } @{$params{list}} + : @{$params{list}}; } else { - @candidates=keys %pagesources; + @candidates=exists $params{filter} + ? grep { ! $params{filter}->($_) } keys %pagesources + : keys %pagesources; } - - if (defined $params{sort}) { - my $f; - if ($params{sort} eq 'title') { - $f=sub { pagetitle(basename($a)) cmp pagetitle(basename($b)) }; - } - elsif ($params{sort} eq 'title_natural') { - eval q{use Sort::Naturally}; - if ($@) { - error(gettext("Sort::Naturally needed for title_natural sort")); - } - $f=sub { Sort::Naturally::ncmp(pagetitle(basename($a)), pagetitle(basename($b))) }; - } - elsif ($params{sort} eq 'mtime') { - $f=sub { $pagemtime{$b} <=> $pagemtime{$a} }; - } - elsif ($params{sort} eq 'age') { - $f=sub { $pagectime{$b} <=> $pagectime{$a} }; - } - else { - error sprintf(gettext("unknown sort type %s"), $params{sort}); - } - @candidates = sort { &$f } @candidates; + + my $num=$params{num}; + my $sort=$params{sort}; + my $reverse=$params{reverse}; + # when only the top matches will be returned, it's efficient to + # sort before matching to pagespec, + if (defined $num && defined $sort) { + @candidates=IkiWiki::SortSpec::sort_pages( + $sort, $reverse, @candidates); } - - @candidates=reverse(@candidates) if $params{reverse}; + + $depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT); + + # clear params, remainder is passed to pagespec + delete @params{qw{num deptype reverse sort filter list}}; my @matches; my $firstfail; my $count=0; + my $accum=IkiWiki::SuccessReason->new(); foreach my $p (@candidates) { - my $r=$sub->($p, location => $page); + my $r=$sub->($p, %params, location => $page); + error(sprintf(gettext("cannot match pages: %s"), $r)) + if $r->isa("IkiWiki::ErrorReason"); + $accum |= $r; if ($r) { - push @matches, [$p, $r]; - last if defined $params{num} && ++$count == $params{num}; - } - elsif (! defined $firstfail) { - $firstfail=$r; + push @matches, $p; + last if defined $num && ++$count == $num; } } - - $depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT); - my @ret; - if (@matches) { - # Add all influences from successful matches. - foreach my $m (@matches) { - push @ret, $m->[0]; - my %i=$m->[1]->influences; - foreach my $i (keys %i) { - $depends_simple{$page}{lc $i} |= $i{$i}; - } - } + # Add simple dependencies for accumulated influences. + my $i=$accum->influences; + foreach my $k (keys %$i) { + $depends_simple{$page}{lc $k} |= $i->{$k}; } - elsif (defined $firstfail) { - # Add influences from one failure. (Which one should not - # matter; all should have the same influences.) - my %i=$firstfail->influences; - foreach my $i (keys %i) { - $depends_simple{$page}{lc $i} |= $i{$i}; - } - error(sprintf(gettext("cannot match pages: %s"), $firstfail)) - if $firstfail->isa("IkiWiki::ErrorReason"); + # when all matches will be returned, it's efficient to + # sort after matching + if (! defined $num && defined $sort) { + return IkiWiki::SortSpec::sort_pages( + $sort, $reverse, @matches); + } + else { + return @matches; } - - return @ret; } sub pagespec_valid ($) { my $spec=shift; - my $sub=pagespec_translate($spec); - return ! $@; + return defined pagespec_translate($spec); } sub glob2re ($) { @@ -2081,7 +2181,7 @@ use overload ( '""' => sub { $_[0][0] }, '0+' => sub { 0 }, '!' => sub { bless $_[0], 'IkiWiki::SuccessReason'}, - '&' => sub { $_[0]->merge_influences($_[1]); $_[0] }, + '&' => sub { $_[0]->merge_influences($_[1], 1); $_[0] }, '|' => sub { $_[1]->merge_influences($_[0]); $_[1] }, fallback => 1, ); @@ -2094,7 +2194,7 @@ use overload ( '""' => sub { $_[0][0] }, '0+' => sub { 1 }, '!' => sub { bless $_[0], 'IkiWiki::FailReason'}, - '&' => sub { $_[1]->merge_influences($_[0]); $_[1] }, + '&' => sub { $_[1]->merge_influences($_[0], 1); $_[1] }, '|' => sub { $_[0]->merge_influences($_[1]); $_[0] }, fallback => 1, ); @@ -2107,19 +2207,30 @@ sub new { sub influences { my $this=shift; - if (! @_) { - return %{$this->[1]}; - } - else { - $this->[1]={@_}; - } + $this->[1]={@_} if @_; + my %i=%{$this->[1]}; + delete $i{""}; + return \%i; +} + +sub influences_static { + return ! $_[0][1]->{""}; } sub merge_influences { my $this=shift; my $other=shift; - foreach my $influence (keys %{$other->[1]}) { - $this->[1]{$influence} |= $other->[1]{$influence}; + my $anded=shift; + + if (! $anded || (($this || %{$this->[1]}) && + ($other || %{$other->[1]}))) { + foreach my $influence (keys %{$other->[1]}) { + $this->[1]{$influence} |= $other->[1]{$influence}; + } + } + else { + # influence blocker + $this->[1]={}; } } @@ -2174,26 +2285,34 @@ sub match_link ($$;@) { $link=derel($link, $params{location}); my $from=exists $params{location} ? $params{location} : ''; + my $linktype=$params{linktype}; + my $qualifier=''; + if (defined $linktype) { + $qualifier=" with type $linktype"; + } my $links = $IkiWiki::links{$page}; - return IkiWiki::FailReason->new("$page has no links") + return IkiWiki::FailReason->new("$page has no links", $page => $IkiWiki::DEPEND_LINKS, "" => 1) unless $links && @{$links}; my $bestlink = IkiWiki::bestlink($from, $link); foreach my $p (@{$links}) { if (length $bestlink) { - return IkiWiki::SuccessReason->new("$page links to $link", $page => $IkiWiki::DEPEND_LINKS) - if $bestlink eq IkiWiki::bestlink($page, $p); + if ((!defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p}) && $bestlink eq IkiWiki::bestlink($page, $p)) { + return IkiWiki::SuccessReason->new("$page links to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1) + } } else { - return IkiWiki::SuccessReason->new("$page links to page $p matching $link", $page => $IkiWiki::DEPEND_LINKS) - if match_glob($p, $link, %params); + if ((!defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p}) && match_glob($p, $link, %params)) { + return IkiWiki::SuccessReason->new("$page links to page $p$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) + } my ($p_rel)=$p=~/^\/?(.*)/; $link=~s/^\///; - return IkiWiki::SuccessReason->new("$page links to page $p_rel matching $link", $page => $IkiWiki::DEPEND_LINKS) - if match_glob($p_rel, $link, %params); + if ((!defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p_rel}) && match_glob($p_rel, $link, %params)) { + return IkiWiki::SuccessReason->new("$page links to page $p_rel$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) + } } } - return IkiWiki::FailReason->new("$page does not link to $link"); + return IkiWiki::FailReason->new("$page does not link to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1); } sub match_backlink ($$;@) { @@ -2274,11 +2393,13 @@ sub match_user ($$;@) { my $user=shift; my %params=@_; + my $regexp=IkiWiki::glob2re($user); + if (! exists $params{user}) { return IkiWiki::ErrorReason->new("no user specified"); } - if (defined $params{user} && lc $params{user} eq lc $user) { + if (defined $params{user} && $params{user}=~/^$regexp$/i) { return IkiWiki::SuccessReason->new("user is $user"); } elsif (! defined $params{user}) { @@ -2326,4 +2447,22 @@ sub match_ip ($$;@) { } } +package IkiWiki::SortSpec; + +# This is in the SortSpec namespace so that the $a and $b that sort() uses +# are easily available in this namespace, for cmp functions to use them. +sub sort_pages { + my $f=IkiWiki::sortspec_translate(shift, shift); + sort $f @_ +} + +sub cmp_title { + IkiWiki::pagetitle(IkiWiki::basename($a)) + cmp + IkiWiki::pagetitle(IkiWiki::basename($b)) +} + +sub cmp_mtime { $IkiWiki::pagemtime{$b} <=> $IkiWiki::pagemtime{$a} } +sub cmp_age { $IkiWiki::pagectime{$b} <=> $IkiWiki::pagectime{$a} } + 1