X-Git-Url: https://sipb.mit.edu/gitweb.cgi/ikiwiki.git/blobdiff_plain/4002d7c1a4657e769b036c6e76106991ec5c3897..1030d23a2ca1be2680a6fac8366f880a1d1760c6:/IkiWiki.pm diff --git a/IkiWiki.pm b/IkiWiki.pm index 9c386e154..022bfe3bd 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -7,7 +7,7 @@ use strict; use Encode; use HTML::Entities; use URI::Escape q{uri_escape_utf8}; -use POSIX; +use POSIX (); use Storable; use open qw{:utf8 :std}; @@ -17,11 +17,12 @@ use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase %forcerebuild %loaded_plugins}; use Exporter q{import}; -our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match - pagespec_match_list bestlink htmllink readfile writefile - pagetype srcfile pagename displaytime will_render gettext urlto - targetpage add_underlay pagetitle titlepage linkpage - newpagefile inject add_link +our @EXPORT = qw(hook debug error template htmlpage deptype + add_depends pagespec_match pagespec_match_list bestlink + htmllink readfile writefile pagetype srcfile pagename + displaytime will_render gettext ngettext urlto targetpage + add_underlay pagetitle titlepage linkpage newpagefile + inject add_link %config %links %pagestate %wikistate %renderedfiles %pagesources %destsources); our $VERSION = 3.00; # plugin interface version, next is ikiwiki version @@ -37,7 +38,6 @@ our $DEPEND_LINKS=4; use Memoize; memoize("abs2rel"); memoize("pagespec_translate"); -memoize("file_pruned"); memoize("template_file"); sub getsetup () { @@ -334,11 +334,20 @@ sub getsetup () { safe => 0, # paranoia rebuild => 0, }, + include => { + type => "string", + default => undef, + example => '^\.htaccess$', + description => "regexp of normally excluded files to include", + advanced => 1, + safe => 0, # regexp + rebuild => 1, + }, exclude => { type => "string", default => undef, - example => '\.wav$', - description => "regexp of source files to ignore", + example => '^(*\.private|Makefile)$', + description => "regexp of files that should be skipped", advanced => 1, safe => 0, # regexp rebuild => 1, @@ -409,6 +418,13 @@ sub getsetup () { safe => 0, rebuild => 0, }, + clean => { + type => "internal", + default => 0, + description => "running in clean mode", + safe => 0, + rebuild => 0, + }, refresh => { type => "internal", default => 0, @@ -451,6 +467,13 @@ sub getsetup () { safe => 0, rebuild => 0, }, + setuptype => { + type => "internal", + default => "Standard", + description => "perl class to use to dump setup file", + safe => 0, + rebuild => 0, + }, allow_symlinks_before_srcdir => { type => "boolean", default => 0, @@ -881,7 +904,7 @@ sub bestlink ($$) { $l.="/" if length $l; $l.=$link; - if (exists $links{$l}) { + if (exists $pagesources{$l}) { return $l; } elsif (exists $pagecase{lc $l}) { @@ -891,7 +914,7 @@ sub bestlink ($$) { if (length $config{userdir}) { my $l = "$config{userdir}/".lc($link); - if (exists $links{$l}) { + if (exists $pagesources{$l}) { return $l; } elsif (exists $pagecase{lc $l}) { @@ -941,7 +964,12 @@ sub linkpage ($) { sub cgiurl (@) { my %params=@_; - return $config{cgiurl}."?". + my $cgiurl=$config{cgiurl}; + if (exists $params{cgiurl}) { + $cgiurl=$params{cgiurl}; + delete $params{cgiurl}; + } + return $cgiurl."?". join("&", map $_."=".uri_escape_utf8($params{$_}), keys %params); } @@ -1081,16 +1109,20 @@ sub htmllink ($$$;@) { } my @attrs; - if (defined $opts{rel}) { - push @attrs, ' rel="'.$opts{rel}.'"'; - } - if (defined $opts{class}) { - push @attrs, ' class="'.$opts{class}.'"'; + foreach my $attr (qw{rel class title}) { + if (defined $opts{$attr}) { + push @attrs, " $attr=\"$opts{$attr}\""; + } } return "$linktext"; } +sub userpage ($) { + my $user=shift; + return length $config{userdir} ? "$config{userdir}/$user" : $user; +} + sub openiduser ($) { my $user=shift; @@ -1099,11 +1131,10 @@ sub openiduser ($) { my $display; if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) { - # this works in at least 2.x $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user); } else { - # this only works in 1.x + # backcompat with old version my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user); $display=$oid->display; } @@ -1116,7 +1147,7 @@ sub openiduser ($) { # Convert "http://somehost.com/user" to "user [somehost.com]". # (also "https://somehost.com/user/") if ($display !~ /\[/) { - $display=~s/^https?:\/\/(.+)\/([^\/]+)\/?$/$2 [$1]/; + $display=~s/^https?:\/\/(.+)\/([^\/#?]+)\/?(?:[#?].*)?$/$2 [$1]/; } $display=~s!^https?://!!; # make sure this is removed eval q{use CGI 'escapeHTML'}; @@ -1126,23 +1157,6 @@ sub openiduser ($) { return; } -sub userlink ($) { - my $user=shift; - - my $oiduser=eval { openiduser($user) }; - if (defined $oiduser) { - return "$oiduser"; - } - else { - eval q{use CGI 'escapeHTML'}; - error($@) if $@; - - return htmllink("", "", escapeHTML( - length $config{userdir} ? $config{userdir}."/".$user : $user - ), noimageinline => 1); - } -} - sub htmlize ($$$$) { my $page=shift; my $destpage=shift; @@ -1229,7 +1243,7 @@ sub preprocess ($$$;$$) { (?: """(.*?)""" # 2: triple-quoted value | - "([^"]+)" # 3: single-quoted value + "([^"]*?)" # 3: single-quoted value | (\S+) # 4: unquoted value ) @@ -1315,7 +1329,7 @@ sub preprocess ($$$;$$) { (?: """.*?""" # triple-quoted value | - "[^"]+" # single-quoted value + "[^"]*?" # single-quoted value | [^"\s\]]+ # unquoted value ) @@ -1338,7 +1352,7 @@ sub preprocess ($$$;$$) { (?: """.*?""" # triple-quoted value | - "[^"]+" # single-quoted value + "[^"]*?" # single-quoted value | [^"\s\]]+ # unquoted value ) @@ -1408,7 +1422,7 @@ sub check_content (@) { my %old=map { $_ => 1 } split("\n", readfile(srcfile($pagesources{$params{page}}))); foreach my $line (split("\n", $params{content})) { - push @diff, $line if ! exists $old{$_}; + push @diff, $line if ! exists $old{$line}; } $params{diff}=join("\n", @diff); } @@ -1768,95 +1782,107 @@ sub rcs_receive () { $hooks{rcs}{rcs_receive}{call}->(); } -sub add_depends ($$;@) { +sub add_depends ($$;$) { my $page=shift; my $pagespec=shift; + my $deptype=shift || $DEPEND_CONTENT; # Is the pagespec a simple page name? - my $simple=$pagespec =~ /$config{wiki_file_regexp}/ && - $pagespec !~ /[\s*?()!]/; - - my $deptype=0; - if (@_) { - my %params=@_; - - if ($params{presence}) { - # Is the pagespec limited to terms that will continue - # to match pages as long as those pages exist? - my $presence_limited=1; - while ($presence_limited && $pagespec=~m/(\w+)\([^\)]*\)/g) { - $presence_limited = $1 =~ /^(glob|internal|creation_month|creation_day|creation_year|created_before|created_after)$/; - } - if ($presence_limited) { - $deptype=$deptype | $DEPEND_PRESENCE; - } - else { - $deptype=$deptype | $DEPEND_CONTENT; - } - } - if ($params{links}) { - # Is the pagespec limited to terms that will continue - # to match pages as long as those pages exist and - # link to the same places? - my $links_limited=1; - while ($links_limited && $pagespec=~m/(\w+)\([^\)]*\)/g) { - $links_limited = $1 =~ /^(glob|internal|creation_month|creation_day|creation_year|created_before|created_after|backlink)$/; - } - if ($links_limited) { - $deptype=$deptype | $DEPEND_LINKS; - } - else { - $deptype=$deptype | $DEPEND_CONTENT; - } - } - } - $deptype=$DEPEND_CONTENT unless $deptype; - - if ($simple) { + if ($pagespec =~ /$config{wiki_file_regexp}/ && + $pagespec !~ /[\s*?()!]/) { $depends_simple{$page}{lc $pagespec} |= $deptype; return 1; } - return unless pagespec_valid($pagespec); + # Add explicit dependencies for influences. + my $sub=pagespec_translate($pagespec); + return if $@; + foreach my $p (keys %pagesources) { + my $r=$sub->($p, location => $page); + my $i=$r->influences; + foreach my $k (keys %$i) { + $depends_simple{$page}{lc $k} |= $i->{$k}; + } + last if $r->influences_static; + } $depends{$page}{$pagespec} |= $deptype; return 1; } -sub file_pruned ($$) { - require File::Spec; - my $file=File::Spec->canonpath(shift); - my $base=File::Spec->canonpath(shift); - $file =~ s#^\Q$base\E/+##; +sub deptype (@) { + my $deptype=0; + foreach my $type (@_) { + if ($type eq 'presence') { + $deptype |= $DEPEND_PRESENCE; + } + elsif ($type eq 'links') { + $deptype |= $DEPEND_LINKS; + } + elsif ($type eq 'content') { + $deptype |= $DEPEND_CONTENT; + } + } + return $deptype; +} + +my $file_prune_regexp; +sub file_pruned ($;$) { + my $file=shift; + if (@_) { + require File::Spec; + $file=File::Spec->canonpath($file); + my $base=File::Spec->canonpath(shift); + return if $file eq $base; + $file =~ s#^\Q$base\E/+##; + } + + if (defined $config{include} && length $config{include}) { + return 0 if $file =~ m/$config{include}/; + } - my $regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')'; - return $file =~ m/$regexp/ && $file ne $base; + if (! defined $file_prune_regexp) { + $file_prune_regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')'; + $file_prune_regexp=qr/$file_prune_regexp/; + } + return $file =~ m/$file_prune_regexp/; } sub define_gettext () { # If translation is needed, redefine the gettext function to do it. # Otherwise, it becomes a quick no-op. - no warnings 'redefine'; + my $gettext_obj; + my $getobj; if ((exists $ENV{LANG} && length $ENV{LANG}) || (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) || (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) { - *gettext=sub { - my $gettext_obj=eval q{ + $getobj=sub { + $gettext_obj=eval q{ use Locale::gettext q{textdomain}; Locale::gettext->domain('ikiwiki') }; - - if ($gettext_obj) { - $gettext_obj->get(shift); - } - else { - return shift; - } }; } - else { - *gettext=sub { return shift }; - } + + no warnings 'redefine'; + *gettext=sub { + $getobj->() if $getobj; + if ($gettext_obj) { + $gettext_obj->get(shift); + } + else { + return shift; + } + }; + *ngettext=sub { + $getobj->() if $getobj; + if ($gettext_obj) { + $gettext_obj->nget(@_); + } + else { + return ($_[2] == 1 ? $_[0] : $_[1]) + } + }; } sub gettext { @@ -1864,6 +1890,11 @@ sub gettext { gettext(@_); } +sub ngettext { + define_gettext(); + ngettext(@_); +} + sub yesno ($) { my $val=shift; @@ -1975,27 +2006,87 @@ sub pagespec_match ($$;@) { } sub pagespec_match_list ($$;@) { - my $pages=shift; - my $spec=shift; - my @params=@_; + my $page=shift; + my $pagespec=shift; + my %params=@_; - my $sub=pagespec_translate($spec); - error "syntax error in pagespec \"$spec\"" - if $@ || ! defined $sub; - - my @ret; - my $r; - foreach my $page (@$pages) { - $r=$sub->($page, @params); - push @ret, $page if $r; + # Backwards compatability with old calling convention. + if (ref $page) { + print STDERR "warning: a plugin (".caller().") is using pagespec_match_list in an obsolete way, and needs to be updated\n"; + $params{list}=$page; + $page=$params{location}; # ugh! } - if (! @ret && defined $r && $r->isa("IkiWiki::ErrorReason")) { - error(sprintf(gettext("cannot match pages: %s"), $r)); + my $sub=pagespec_translate($pagespec); + error "syntax error in pagespec \"$pagespec\"" + if $@ || ! defined $sub; + + my @candidates; + if (exists $params{list}) { + @candidates=exists $params{filter} + ? grep { ! $params{filter}->($_) } @{$params{list}} + : @{$params{list}}; } else { - return @ret; + @candidates=exists $params{filter} + ? grep { ! $params{filter}->($_) } keys %pagesources + : keys %pagesources; + } + + if (defined $params{sort}) { + my $f; + if ($params{sort} eq 'title') { + $f=sub { pagetitle(basename($a)) cmp pagetitle(basename($b)) }; + } + elsif ($params{sort} eq 'title_natural') { + eval q{use Sort::Naturally}; + if ($@) { + error(gettext("Sort::Naturally needed for title_natural sort")); + } + $f=sub { Sort::Naturally::ncmp(pagetitle(basename($a)), pagetitle(basename($b))) }; + } + elsif ($params{sort} eq 'mtime') { + $f=sub { $pagemtime{$b} <=> $pagemtime{$a} }; + } + elsif ($params{sort} eq 'age') { + $f=sub { $pagectime{$b} <=> $pagectime{$a} }; + } + else { + error sprintf(gettext("unknown sort type %s"), $params{sort}); + } + @candidates = sort { &$f } @candidates; } + + @candidates=reverse(@candidates) if $params{reverse}; + + $depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT); + + # clear params, remainder is passed to pagespec + my $num=$params{num}; + delete @params{qw{num deptype reverse sort filter list}}; + + my @matches; + my $firstfail; + my $count=0; + my $accum=IkiWiki::SuccessReason->new(); + foreach my $p (@candidates) { + my $r=$sub->($p, %params, location => $page); + error(sprintf(gettext("cannot match pages: %s"), $r)) + if $r->isa("IkiWiki::ErrorReason"); + $accum |= $r; + if ($r) { + push @matches, $p; + last if defined $num && ++$count == $num; + } + } + + # Add simple dependencies for accumulated influences. + my $i=$accum->influences; + foreach my $k (keys %$i) { + $depends_simple{$page}{lc $k} |= $i->{$k}; + } + + return @matches; } sub pagespec_valid ($) { @@ -2018,8 +2109,8 @@ use overload ( '""' => sub { $_[0][0] }, '0+' => sub { 0 }, '!' => sub { bless $_[0], 'IkiWiki::SuccessReason'}, - '&' => sub { $_[0][1]={%{$_[0][1]}, %{$_[1][1]}}; $_[0] }, - '|' => sub { $_[1][1]={%{$_[0][1]}, %{$_[1][1]}}; $_[1] }, + '&' => sub { $_[0]->merge_influences($_[1], 1); $_[0] }, + '|' => sub { $_[1]->merge_influences($_[0]); $_[1] }, fallback => 1, ); @@ -2031,19 +2122,44 @@ use overload ( '""' => sub { $_[0][0] }, '0+' => sub { 1 }, '!' => sub { bless $_[0], 'IkiWiki::FailReason'}, - '&' => sub { $_[1][1]={%{$_[0][1]}, %{$_[1][1]}}; $_[1] }, - '|' => sub { $_[0][1]={%{$_[0][1]}, %{$_[1][1]}}; $_[0] }, + '&' => sub { $_[1]->merge_influences($_[0], 1); $_[1] }, + '|' => sub { $_[0]->merge_influences($_[1]); $_[0] }, fallback => 1, ); sub new { my $class = shift; my $value = shift; - return bless [$value, {map { $_ => 1 } @_}], $class; + return bless [$value, {@_}], $class; } sub influences { - return keys %{$_[0][1]}; + my $this=shift; + $this->[1]={@_} if @_; + my %i=%{$this->[1]}; + delete $i{""}; + return \%i; +} + +sub influences_static { + return ! $_[0][1]->{""}; +} + +sub merge_influences { + my $this=shift; + my $other=shift; + my $anded=shift; + + if (! $anded || (($this || %{$this->[1]}) && + ($other || %{$other->[1]}))) { + foreach my $influence (keys %{$other->[1]}) { + $this->[1]{$influence} |= $other->[1]{$influence}; + } + } + else { + # influence blocker + $this->[1]={}; + } } package IkiWiki::ErrorReason; @@ -2099,27 +2215,30 @@ sub match_link ($$;@) { my $from=exists $params{location} ? $params{location} : ''; my $links = $IkiWiki::links{$page}; - return IkiWiki::FailReason->new("$page has no links", $link) unless $links && @{$links}; + return IkiWiki::FailReason->new("$page has no links", "" => 1) + unless $links && @{$links}; my $bestlink = IkiWiki::bestlink($from, $link); foreach my $p (@{$links}) { if (length $bestlink) { - return IkiWiki::SuccessReason->new("$page links to $link", $page) + return IkiWiki::SuccessReason->new("$page links to $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) if $bestlink eq IkiWiki::bestlink($page, $p); } else { - return IkiWiki::SuccessReason->new("$page links to page $p matching $link", $page) + return IkiWiki::SuccessReason->new("$page links to page $p matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) if match_glob($p, $link, %params); my ($p_rel)=$p=~/^\/?(.*)/; $link=~s/^\///; - return IkiWiki::SuccessReason->new("$page links to page $p_rel matching $link", $page) + return IkiWiki::SuccessReason->new("$page links to page $p_rel matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) if match_glob($p_rel, $link, %params); } } - return IkiWiki::FailReason->new("$page does not link to $link", $page); + return IkiWiki::FailReason->new("$page does not link to $link", "" => 1); } sub match_backlink ($$;@) { - return match_link($_[1], $_[0], @_); + my $ret=match_link($_[1], $_[0], @_); + $ret->influences($_[1] => $IkiWiki::DEPEND_LINKS); + return $ret; } sub match_created_before ($$;@) { @@ -2131,14 +2250,14 @@ sub match_created_before ($$;@) { if (exists $IkiWiki::pagectime{$testpage}) { if ($IkiWiki::pagectime{$page} < $IkiWiki::pagectime{$testpage}) { - return IkiWiki::SuccessReason->new("$page created before $testpage", $testpage); + return IkiWiki::SuccessReason->new("$page created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE); } else { - return IkiWiki::FailReason->new("$page not created before $testpage", $testpage); + return IkiWiki::FailReason->new("$page not created before $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE); } } else { - return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage); + return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE); } } @@ -2151,14 +2270,14 @@ sub match_created_after ($$;@) { if (exists $IkiWiki::pagectime{$testpage}) { if ($IkiWiki::pagectime{$page} > $IkiWiki::pagectime{$testpage}) { - return IkiWiki::SuccessReason->new("$page created after $testpage", $testpage); + return IkiWiki::SuccessReason->new("$page created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE); } else { - return IkiWiki::FailReason->new("$page not created after $testpage", $testpage); + return IkiWiki::FailReason->new("$page not created after $testpage", $testpage => $IkiWiki::DEPEND_PRESENCE); } } else { - return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage); + return IkiWiki::ErrorReason->new("$testpage does not exist", $testpage => $IkiWiki::DEPEND_PRESENCE); } } @@ -2194,11 +2313,13 @@ sub match_user ($$;@) { my $user=shift; my %params=@_; + my $regexp=IkiWiki::glob2re($user); + if (! exists $params{user}) { return IkiWiki::ErrorReason->new("no user specified"); } - if (defined $params{user} && lc $params{user} eq lc $user) { + if (defined $params{user} && $params{user}=~/^$regexp$/i) { return IkiWiki::SuccessReason->new("user is $user"); } elsif (! defined $params{user}) {