From: Amitai Schlair Date: Fri, 25 Jan 2013 13:47:17 +0000 (-0500) Subject: Merge branch 'master' into cvs X-Git-Url: https://sipb.mit.edu/gitweb.cgi/ikiwiki.git/commitdiff_plain/64370885cca3a37ee1f4a9e96673aca7ba5daae4?hp=9faa0f3c6560be7b5e3aea0f8ca12e04a8c85a32 Merge branch 'master' into cvs --- diff --git a/.gitignore b/.gitignore index fe1c3d441..f8991a63d 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ ikiwiki.out ikiwiki-transition.out ikiwiki-calendar.out pm_to_blib +/MYMETA.yml *.man /po/cover_db po/po2wiki_stamp diff --git a/Bundle/IkiWiki/Extras.pm b/Bundle/IkiWiki/Extras.pm index d01d52e5b..0a7cd3ae3 100644 --- a/Bundle/IkiWiki/Extras.pm +++ b/Bundle/IkiWiki/Extras.pm @@ -35,6 +35,7 @@ HTML::Tree Sort::Naturally Gravatar::URL Net::INET6Glue +XML::Writer =head1 AUTHOR diff --git a/IkiWiki.pm b/IkiWiki.pm index 2a83777e6..52da3c112 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -118,6 +118,22 @@ sub getsetup () { safe => 0, rebuild => 0, }, + cgi_overload_delay => { + type => "string", + default => '', + example => "10", + description => "number of seconds to delay CGI requests when overloaded", + safe => 1, + rebuild => 0, + }, + cgi_overload_message => { + type => "string", + default => '', + example => "Please wait", + description => "message to display when overloaded (may contain html)", + safe => 1, + rebuild => 0, + }, rcs => { type => "string", default => '', @@ -1092,6 +1108,11 @@ sub cgiurl (@) { join("&", map $_."=".uri_escape_utf8($params{$_}), keys %params); } +sub cgiurl_abs (@) { + eval q{use URI}; + URI->new_abs(cgiurl(@_), $config{cgiurl}); +} + sub baseurl (;$) { my $page=shift; diff --git a/IkiWiki/CGI.pm b/IkiWiki/CGI.pm index 62383b6fd..5baa6c179 100644 --- a/IkiWiki/CGI.pm +++ b/IkiWiki/CGI.pm @@ -131,7 +131,7 @@ sub needsignin ($$) { if (! defined $session->param("name") || ! userinfo_get($session->param("name"), "regdate")) { - $session->param(postsignin => $ENV{QUERY_STRING}); + $session->param(postsignin => $q->query_string); cgi_signin($q, $session); cgi_savesession($session); exit; diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index 5e22609c9..89da5c453 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -113,8 +113,7 @@ sub launchaggregation () { my @feeds=needsaggregate(); return unless @feeds; if (! lockaggregate()) { - debug("an aggregation process is already running"); - return; + error("an aggregation process is already running"); } # force a later rebuild of source pages $IkiWiki::forcerebuild{$_->{sourcepage}}=1 @@ -201,7 +200,7 @@ sub migrate_to_internal { if (-e $oldoutput) { require IkiWiki::Render; debug("removing output file $oldoutput"); - IkiWiki::prune($oldoutput); + IkiWiki::prune($oldoutput, $config{destdir}); } } diff --git a/IkiWiki/Plugin/amazon_s3.pm b/IkiWiki/Plugin/amazon_s3.pm index cfd8cd347..a9da6bf12 100644 --- a/IkiWiki/Plugin/amazon_s3.pm +++ b/IkiWiki/Plugin/amazon_s3.pm @@ -232,8 +232,9 @@ sub writefile ($$$;$$) { } # This is a wrapper around the real prune. -sub prune ($) { +sub prune ($;$) { my $file=shift; + my $up_to=shift; my @keys=IkiWiki::Plugin::amazon_s3::file2keys($file); @@ -250,7 +251,7 @@ sub prune ($) { } } - return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file); + return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file, $up_to); } 1 diff --git a/IkiWiki/Plugin/attachment.pm b/IkiWiki/Plugin/attachment.pm index 5a180cd5c..aea70429d 100644 --- a/IkiWiki/Plugin/attachment.pm +++ b/IkiWiki/Plugin/attachment.pm @@ -148,7 +148,7 @@ sub formbuilder (@) { $f=Encode::decode_utf8($f); $f=~s/^$page\///; if (IkiWiki::isinlinableimage($f) && - UNIVERSAL::can("IkiWiki::Plugin::img", "import")) { + IkiWiki::Plugin::img->can("import")) { $add.='[[!img '.$f.' align="right" size="" alt=""]]'; } else { @@ -286,7 +286,7 @@ sub attachments_save { } return unless @attachments; require IkiWiki::Render; - IkiWiki::prune($dir); + IkiWiki::prune($dir, $config{wikistatedir}."/attachments"); # Check the attachments in and trigger a wiki refresh. if ($config{rcs}) { diff --git a/IkiWiki/Plugin/bzr.pm b/IkiWiki/Plugin/bzr.pm index 3bc4ea8dd..72552abcc 100644 --- a/IkiWiki/Plugin/bzr.pm +++ b/IkiWiki/Plugin/bzr.pm @@ -5,6 +5,7 @@ use warnings; use strict; use IkiWiki; use Encode; +use URI::Escape q{uri_escape_utf8}; use open qw{:utf8 :std}; sub import { @@ -242,8 +243,10 @@ sub rcs_recentchanges ($) { # Skip source name in renames $filename =~ s/^.* => //; + my $efilename = uri_escape_utf8($filename); + my $diffurl = defined $config{'diffurl'} ? $config{'diffurl'} : ""; - $diffurl =~ s/\[\[file\]\]/$filename/go; + $diffurl =~ s/\[\[file\]\]/$efilename/go; $diffurl =~ s/\[\[file-id\]\]/$fileid/go; $diffurl =~ s/\[\[r2\]\]/$info->{revno}/go; diff --git a/IkiWiki/Plugin/calendar.pm b/IkiWiki/Plugin/calendar.pm index fc497b3c7..d443198f6 100644 --- a/IkiWiki/Plugin/calendar.pm +++ b/IkiWiki/Plugin/calendar.pm @@ -13,7 +13,7 @@ # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA. require 5.002; package IkiWiki::Plugin::calendar; diff --git a/IkiWiki/Plugin/comments.pm b/IkiWiki/Plugin/comments.pm index 91a482ed6..c00bf5275 100644 --- a/IkiWiki/Plugin/comments.pm +++ b/IkiWiki/Plugin/comments.pm @@ -301,7 +301,8 @@ sub editcomment ($$) { my @buttons = (POST_COMMENT, PREVIEW, CANCEL); my $form = CGI::FormBuilder->new( - fields => [qw{do sid page subject editcontent type author url}], + fields => [qw{do sid page subject editcontent type author + email url subscribe anonsubscribe}], charset => 'utf-8', method => 'POST', required => [qw{editcontent}], @@ -346,18 +347,35 @@ sub editcomment ($$) { $form->field(name => "type", value => $type, force => 1, type => 'select', options => \@page_types); - $form->tmpl_param(username => $session->param('name')); + my $username=$session->param('name'); + $form->tmpl_param(username => $username); + + $form->field(name => "subscribe", type => 'hidden'); + $form->field(name => "anonsubscribe", type => 'hidden'); + if (IkiWiki::Plugin::notifyemail->can("subscribe")) { + if (defined $username) { + $form->field(name => "subscribe", type => "checkbox", + options => [gettext("email replies to me")]); + } + elsif (IkiWiki::Plugin::passwordauth->can("anonuser")) { + $form->field(name => "anonsubscribe", type => "checkbox", + options => [gettext("email replies to me")]); + } + } if ($config{comments_allowauthor} and ! defined $session->param('name')) { $form->tmpl_param(allowauthor => 1); $form->field(name => 'author', type => 'text', size => '40'); + $form->field(name => 'email', type => 'text', size => '40'); $form->field(name => 'url', type => 'text', size => '40'); } else { $form->tmpl_param(allowauthor => 0); $form->field(name => 'author', type => 'hidden', value => '', force => 1); + $form->field(name => 'email', type => 'hidden', value => '', + force => 1); $form->field(name => 'url', type => 'hidden', value => '', force => 1); } @@ -425,10 +443,7 @@ sub editcomment ($$) { $content .= " nickname=\"$nickname\"\n"; } elsif (defined $session->remote_addr()) { - my $ip = $session->remote_addr(); - if ($ip =~ m/^([.0-9]+)$/) { - $content .= " ip=\"$1\"\n"; - } + $content .= " ip=\"".$session->remote_addr()."\"\n"; } if ($config{comments_allowauthor}) { @@ -490,6 +505,20 @@ sub editcomment ($$) { if ($form->submitted eq POST_COMMENT && $form->validate) { IkiWiki::checksessionexpiry($cgi, $session); + + if (IkiWiki::Plugin::notifyemail->can("subscribe")) { + my $subspec="comment($page)"; + if (defined $username && + length $form->field("subscribe")) { + IkiWiki::Plugin::notifyemail::subscribe( + $username, $subspec); + } + elsif (length $form->field("email") && + length $form->field("anonsubscribe")) { + IkiWiki::Plugin::notifyemail::anonsubscribe( + $form->field("email"), $subspec); + } + } $postcomment=1; my $ok=IkiWiki::check_content(content => $form->field('editcontent'), @@ -575,7 +604,8 @@ sub editcomment ($$) { sub getavatar ($) { my $user=shift; - + return undef unless defined $user; + my $avatar; eval q{use Libravatar::URL}; if (! $@) { @@ -632,9 +662,11 @@ sub commentmoderation ($$) { my $page=IkiWiki::dirname($f); my $file="$config{srcdir}/$f"; + my $filedir=$config{srcdir}; if (! -e $file) { # old location $file="$config{wikistatedir}/comments_pending/".$f; + $filedir="$config{wikistatedir}/comments_pending"; } if ($action eq 'Accept') { @@ -649,7 +681,7 @@ sub commentmoderation ($$) { } require IkiWiki::Render; - IkiWiki::prune($file); + IkiWiki::prune($file, $filedir); } } diff --git a/IkiWiki/Plugin/conditional.pm b/IkiWiki/Plugin/conditional.pm index 026078b3c..0a3d7fb4c 100644 --- a/IkiWiki/Plugin/conditional.pm +++ b/IkiWiki/Plugin/conditional.pm @@ -4,7 +4,6 @@ package IkiWiki::Plugin::conditional; use warnings; use strict; use IkiWiki 3.00; -use UNIVERSAL; sub import { hook(type => "getsetup", id => "conditional", call => \&getsetup); diff --git a/IkiWiki/Plugin/cvs.pm b/IkiWiki/Plugin/cvs.pm index 42812ddef..759ea1c23 100644 --- a/IkiWiki/Plugin/cvs.pm +++ b/IkiWiki/Plugin/cvs.pm @@ -33,6 +33,7 @@ use warnings; use strict; use IkiWiki; +use URI::Escape q{uri_escape_utf8}; use File::chdir; @@ -313,7 +314,8 @@ sub rcs_recentchanges ($) { $oldrev =~ s/INITIAL/0/; $newrev =~ s/\(DEAD\)//; my $diffurl = defined $config{diffurl} ? $config{diffurl} : ""; - $diffurl=~s/\[\[file\]\]/$page/g; + my $epage = uri_escape_utf8($page); + $diffurl=~s/\[\[file\]\]/$epage/g; $diffurl=~s/\[\[r1\]\]/$oldrev/g; $diffurl=~s/\[\[r2\]\]/$newrev/g; unshift @pages, { diff --git a/IkiWiki/Plugin/darcs.pm b/IkiWiki/Plugin/darcs.pm index 1313041e7..646f65df1 100644 --- a/IkiWiki/Plugin/darcs.pm +++ b/IkiWiki/Plugin/darcs.pm @@ -3,6 +3,7 @@ package IkiWiki::Plugin::darcs; use warnings; use strict; +use URI::Escape q{uri_escape_utf8}; use IkiWiki; sub import { @@ -336,7 +337,8 @@ sub rcs_recentchanges ($) { foreach my $f (@files) { my $d = defined $config{'diffurl'} ? $config{'diffurl'} : ""; - $d =~ s/\[\[file\]\]/$f/go; + my $ef = uri_escape_utf8($f); + $d =~ s/\[\[file\]\]/$ef/go; $d =~ s/\[\[hash\]\]/$hash/go; push @pg, { diff --git a/IkiWiki/Plugin/editpage.pm b/IkiWiki/Plugin/editpage.pm index 54051c58c..d15607990 100644 --- a/IkiWiki/Plugin/editpage.pm +++ b/IkiWiki/Plugin/editpage.pm @@ -39,7 +39,7 @@ sub refresh () { } if ($delete) { debug(sprintf(gettext("removing old preview %s"), $file)); - IkiWiki::prune("$config{destdir}/$file"); + IkiWiki::prune("$config{destdir}/$file", $config{destdir}); } } elsif (defined $mtime) { @@ -64,7 +64,8 @@ sub cgi_editpage ($$) { decode_cgi_utf8($q); - my @fields=qw(do rcsinfo subpage from page type editcontent editmessage); + my @fields=qw(do rcsinfo subpage from page type editcontent + editmessage subscribe); my @buttons=("Save Page", "Preview", "Cancel"); eval q{use CGI::FormBuilder}; error($@) if $@; @@ -157,6 +158,17 @@ sub cgi_editpage ($$) { noimageinline => 1, linktext => "FormattingHelp")); + my $cansubscribe=IkiWiki::Plugin::notifyemail->can("subscribe") + && IkiWiki::Plugin::comments->can("import") + && defined $session->param('name'); + if ($cansubscribe) { + $form->field(name => "subscribe", type => "checkbox", + options => [gettext("email comments to me")]); + } + else { + $form->field(name => "subscribe", type => 'hidden'); + } + my $previewing=0; if ($form->submitted eq "Cancel") { if ($form->field("do") eq "create" && defined $from) { @@ -448,6 +460,12 @@ sub cgi_editpage ($$) { # caches and get the most recent version of the page. redirect($q, $baseurl."?updated"); } + + if ($cansubscribe && length $form->field("subscribe")) { + my $subspec="comment($page)"; + IkiWiki::Plugin::notifyemail::subscribe( + $session->param('name'), $subspec); + } } exit; diff --git a/IkiWiki/Plugin/edittemplate.pm b/IkiWiki/Plugin/edittemplate.pm index 061242fd8..c7f1e4fa7 100644 --- a/IkiWiki/Plugin/edittemplate.pm +++ b/IkiWiki/Plugin/edittemplate.pm @@ -132,7 +132,7 @@ sub filltemplate ($$) { if ($@) { # Indicate that the earlier preprocessor directive set # up a template that doesn't work. - return "[[!pagetemplate ".gettext("failed to process template:")." $@]]"; + return "[[!edittemplate ".gettext("failed to process template:")." $@]]"; } $template->param(name => $page); diff --git a/IkiWiki/Plugin/filecheck.pm b/IkiWiki/Plugin/filecheck.pm index 4f4e67489..cdea5c706 100644 --- a/IkiWiki/Plugin/filecheck.pm +++ b/IkiWiki/Plugin/filecheck.pm @@ -48,7 +48,6 @@ sub getsetup () { plugin => { safe => 1, rebuild => undef, - section => "misc", }, } @@ -140,7 +139,7 @@ sub match_mimetype ($$;@) { my $mimeinfo_ok=! $@; my $mimetype; if ($mimeinfo_ok) { - my $mimetype=File::MimeInfo::Magic::magic($file); + $mimetype=File::MimeInfo::Magic::magic($file); } # Fall back to using file, which has a more complete diff --git a/IkiWiki/Plugin/git.pm b/IkiWiki/Plugin/git.pm index 3dd910cd5..3879abeae 100644 --- a/IkiWiki/Plugin/git.pm +++ b/IkiWiki/Plugin/git.pm @@ -5,6 +5,7 @@ use warnings; use strict; use IkiWiki; use Encode; +use URI::Escape q{uri_escape_utf8}; use open qw{:utf8 :std}; my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate Git sha1sums @@ -340,8 +341,8 @@ sub parse_diff_tree ($) { my $dt_ref = shift; # End of stream? - return if !defined @{ $dt_ref } || - !defined @{ $dt_ref }[0] || !length @{ $dt_ref }[0]; + return if ! @{ $dt_ref } || + !defined $dt_ref->[0] || !length $dt_ref->[0]; my %ci; # Header line. @@ -468,13 +469,10 @@ sub git_sha1 (;$) { # Ignore error since a non-existing file might be given. my ($sha1) = run_or_non('git', 'rev-list', '--max-count=1', 'HEAD', '--', $file); - if ($sha1) { + if (defined $sha1) { ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now } - else { - debug("Empty sha1sum for '$file'."); - } - return defined $sha1 ? $sha1 : q{}; + return defined $sha1 ? $sha1 : ''; } sub rcs_update () { @@ -617,9 +615,10 @@ sub rcs_recentchanges ($) { my @pages; foreach my $detail (@{ $ci->{'details'} }) { my $file = $detail->{'file'}; + my $efile = uri_escape_utf8($file); my $diffurl = defined $config{'diffurl'} ? $config{'diffurl'} : ""; - $diffurl =~ s/\[\[file\]\]/$file/go; + $diffurl =~ s/\[\[file\]\]/$efile/go; $diffurl =~ s/\[\[sha1_parent\]\]/$ci->{'parent'}/go; $diffurl =~ s/\[\[sha1_from\]\]/$detail->{'sha1_from'}/go; $diffurl =~ s/\[\[sha1_to\]\]/$detail->{'sha1_to'}/go; diff --git a/IkiWiki/Plugin/graphviz.pm b/IkiWiki/Plugin/graphviz.pm index b9f997e04..d4018edaa 100644 --- a/IkiWiki/Plugin/graphviz.pm +++ b/IkiWiki/Plugin/graphviz.pm @@ -132,6 +132,7 @@ sub graph (@) { }, "text"); $p->parse($src); $p->eof; + $s=~s/\[ href= \]//g; # handle self-links $params{src}=$s; } else { diff --git a/IkiWiki/Plugin/htmlscrubber.pm b/IkiWiki/Plugin/htmlscrubber.pm index a58a27d52..36c012c73 100644 --- a/IkiWiki/Plugin/htmlscrubber.pm +++ b/IkiWiki/Plugin/htmlscrubber.pm @@ -29,6 +29,7 @@ sub import { "irc", "ircs", "lastfm", "ldaps", "magnet", "mms", "msnim", "notes", "rsync", "secondlife", "skype", "ssh", "sftp", "smb", "sms", "snews", "webcal", "ymsgr", + "bitcoin", "git", "svn", "bzr", "darcs", "hg" ); # data is a special case. Allow a few data:image/ types, # but disallow data:text/javascript and everything else. diff --git a/IkiWiki/Plugin/httpauth.pm b/IkiWiki/Plugin/httpauth.pm index cb488449d..76d574b2a 100644 --- a/IkiWiki/Plugin/httpauth.pm +++ b/IkiWiki/Plugin/httpauth.pm @@ -7,6 +7,7 @@ use strict; use IkiWiki 3.00; sub import { + hook(type => "checkconfig", id => "httpauth", call => \&checkconfig); hook(type => "getsetup", id => "httpauth", call => \&getsetup); hook(type => "auth", id => "httpauth", call => \&auth); hook(type => "formbuilder_setup", id => "httpauth", @@ -37,6 +38,19 @@ sub getsetup () { rebuild => 0, }, } + +sub checkconfig () { + if ($config{cgi} && defined $config{cgiauthurl} && + keys %{$IkiWiki::hooks{auth}} < 2) { + # There are no other auth hooks registered, so avoid + # the normal signin form, and jump right to httpauth. + require IkiWiki::CGI; + inject(name => "IkiWiki::cgi_signin", call => sub ($$) { + my $cgi=shift; + redir_cgiauthurl($cgi, $cgi->query_string()); + }); + } +} sub redir_cgiauthurl ($;@) { my $cgi=shift; diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm index 159cc5def..8eb033951 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm @@ -19,14 +19,14 @@ sub import { hook(type => "checkconfig", id => "inline", call => \&checkconfig); hook(type => "sessioncgi", id => "inline", call => \&sessioncgi); hook(type => "preprocess", id => "inline", - call => \&IkiWiki::preprocess_inline); + call => \&IkiWiki::preprocess_inline, scan => 1); hook(type => "pagetemplate", id => "inline", call => \&IkiWiki::pagetemplate_inline); hook(type => "format", id => "inline", call => \&format, first => 1); # Hook to change to do pinging since it's called late. # This ensures each page only pings once and prevents slow # pings interrupting page builds. - hook(type => "change", id => "inline", call => \&IkiWiki::pingurl); + hook(type => "rendered", id => "inline", call => \&IkiWiki::pingurl); } sub getopt () { @@ -155,6 +155,23 @@ sub preprocess_inline (@) { if (! exists $params{pages} && ! exists $params{pagenames}) { error gettext("missing pages parameter"); } + + if (! defined wantarray) { + # Running in scan mode: only do the essentials + + if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) { + # default to sorting age, the same as inline itself, + # but let the params override that + IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params); + } + + return; + } + + if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) { + scalar IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params); + } + my $raw=yesno($params{raw}); my $archive=yesno($params{archive}); my $rss=(($config{rss} || $config{allowrss}) && exists $params{rss}) ? yesno($params{rss}) : $config{rss}; @@ -194,8 +211,7 @@ sub preprocess_inline (@) { } } - @list = map { bestlink($params{page}, $_) } - split ' ', $params{pagenames}; + @list = split ' ', $params{pagenames}; if (yesno($params{reverse})) { @list=reverse(@list); @@ -204,6 +220,8 @@ sub preprocess_inline (@) { foreach my $p (@list) { add_depends($params{page}, $p, deptype($quick ? "presence" : "content")); } + + @list = grep { exists $pagesources{$_} } @list; } else { my $num=0; @@ -677,7 +695,6 @@ sub genfeed ($$$$$@) { guid => $guid, feeddate => date_3339($lasttime), feedurl => $feedurl, - version => $IkiWiki::version, ); run_hooks(pagetemplate => sub { shift->(page => $page, destpage => $page, diff --git a/IkiWiki/Plugin/link.pm b/IkiWiki/Plugin/link.pm index ef01f1107..1ba28eafd 100644 --- a/IkiWiki/Plugin/link.pm +++ b/IkiWiki/Plugin/link.pm @@ -144,9 +144,9 @@ sub renamepage (@) { my $old=$params{oldpage}; my $new=$params{newpage}; - $params{content} =~ s{(?{files}) { my $diffurl = defined $config{diffurl} ? $config{'diffurl'} : ""; - $diffurl =~ s/\[\[file\]\]/$file/go; + my $efile = uri_escape_utf8($file); + $diffurl =~ s/\[\[file\]\]/$efile/go; $diffurl =~ s/\[\[r2\]\]/$info->{changeset}/go; push @pages, { diff --git a/IkiWiki/Plugin/meta.pm b/IkiWiki/Plugin/meta.pm index 220fff9dc..421f1dc86 100644 --- a/IkiWiki/Plugin/meta.pm +++ b/IkiWiki/Plugin/meta.pm @@ -275,17 +275,23 @@ sub preprocess (@) { push @{$metaheaders{$page}}, ''; } - elsif ($key eq 'description') { - push @{$metaheaders{$page}}, ''; } elsif ($key eq 'name') { - push @{$metaheaders{$page}}, scrub('', $page, $destpage); } + elsif ($key eq 'keywords') { + # Make sure the keyword string is safe: only allow alphanumeric + # characters, space and comma and strip the rest. + $value =~ s/[^[:alnum:], ]+//g; + push @{$metaheaders{$page}}, ''; + } else { push @{$metaheaders{$page}}, scrub(' 1, + rebuild => 1, + }, +} + +sub checkconfig () { + if (! defined $config{mirrorlist_use_cgi}) { + $config{mirrorlist_use_cgi}=0; + } } sub pagetemplate (@) { @@ -46,7 +59,9 @@ sub mirrorlist ($) { join(", ", map { qq{{$_}."?do=goto&page=$page" : + $config{mirrorlist}->{$_}."/".urlto($page, "") ). qq{">$_} } keys %{$config{mirrorlist}} ). diff --git a/IkiWiki/Plugin/monotone.pm b/IkiWiki/Plugin/monotone.pm index 1d89e3f6b..105627814 100644 --- a/IkiWiki/Plugin/monotone.pm +++ b/IkiWiki/Plugin/monotone.pm @@ -7,6 +7,7 @@ use IkiWiki; use Monotone; use Date::Parse qw(str2time); use Date::Format qw(time2str); +use URI::Escape q{uri_escape_utf8}; my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate sha1sums my $mtn_version = undef; @@ -593,7 +594,8 @@ sub rcs_recentchanges ($) { my $diffurl=$config{diffurl}; $diffurl=~s/\[\[r1\]\]/$parent/g; $diffurl=~s/\[\[r2\]\]/$rev/g; - $diffurl=~s/\[\[file\]\]/$file/g; + my $efile = uri_escape_utf8($file); + $diffurl=~s/\[\[file\]\]/$efile/g; push @pages, { page => pagename($file), diffurl => $diffurl, diff --git a/IkiWiki/Plugin/notifyemail.pm b/IkiWiki/Plugin/notifyemail.pm new file mode 100644 index 000000000..2c1775f2e --- /dev/null +++ b/IkiWiki/Plugin/notifyemail.pm @@ -0,0 +1,168 @@ +#!/usr/bin/perl +package IkiWiki::Plugin::notifyemail; + +use warnings; +use strict; +use IkiWiki 3.00; + +sub import { + hook(type => "formbuilder", id => "notifyemail", call => \&formbuilder); + hook(type => "getsetup", id => "notifyemail", call => \&getsetup); + hook(type => "changes", id => "notifyemail", call => \¬ify); +} + +sub getsetup () { + return + plugin => { + safe => 1, + rebuild => 0, + }, +} + +sub formbuilder (@) { + my %params=@_; + my $form=$params{form}; + return unless $form->title eq "preferences"; + my $session=$params{session}; + my $username=$session->param("name"); + $form->field(name => "subscriptions", size => 50, + fieldset => "preferences", + comment => "(".htmllink("", "", "ikiwiki/PageSpec", noimageinline => 1).")"); + if (! $form->submitted) { + $form->field(name => "subscriptions", force => 1, + value => getsubscriptions($username)); + } + elsif ($form->submitted eq "Save Preferences" && $form->validate && + defined $form->field("subscriptions")) { + setsubscriptions($username, $form->field('subscriptions')); + } +} + +sub getsubscriptions ($) { + my $user=shift; + eval q{use IkiWiki::UserInfo}; + error $@ if $@; + IkiWiki::userinfo_get($user, "subscriptions"); +} + +sub setsubscriptions ($$) { + my $user=shift; + my $subscriptions=shift; + eval q{use IkiWiki::UserInfo}; + error $@ if $@; + IkiWiki::userinfo_set($user, "subscriptions", $subscriptions); +} + +# Called by other plugins to subscribe the user to a pagespec. +sub subscribe ($$) { + my $user=shift; + my $addpagespec=shift; + my $pagespec=getsubscriptions($user); + setsubscriptions($user, + length $pagespec ? $pagespec." or ".$addpagespec : $addpagespec); +} + +# Called by other plugins to subscribe an email to a pagespec. +sub anonsubscribe ($$) { + my $email=shift; + my $addpagespec=shift; + if (IkiWiki::Plugin::passwordauth->can("anonuser")) { + my $user=IkiWiki::Plugin::passwordauth::anonuser($email); + if (! defined $user) { + error(gettext("Cannot subscribe your email address without logging in.")); + } + subscribe($user, $addpagespec); + } +} + +sub notify (@) { + my @files=@_; + return unless @files; + + eval q{use Mail::Sendmail}; + error $@ if $@; + eval q{use IkiWiki::UserInfo}; + error $@ if $@; + eval q{use URI}; + error($@) if $@; + + # Daemonize, in case the mail sending takes a while. + defined(my $pid = fork) or error("Can't fork: $!"); + return if $pid; # parent + chdir '/'; + open STDIN, '/dev/null'; + open STDOUT, '>/dev/null'; + POSIX::setsid() or error("Can't start a new session: $!"); + open STDERR, '>&STDOUT' or error("Can't dup stdout: $!"); + + # Don't need to keep a lock on the wiki as a daemon. + IkiWiki::unlockwiki(); + + my $userinfo=IkiWiki::userinfo_retrieve(); + exit 0 unless defined $userinfo; + + foreach my $user (keys %$userinfo) { + my $pagespec=$userinfo->{$user}->{"subscriptions"}; + next unless defined $pagespec && length $pagespec; + my $email=$userinfo->{$user}->{email}; + next unless defined $email && length $email; + + foreach my $file (@files) { + my $page=pagename($file); + next unless pagespec_match($page, $pagespec); + my $content=""; + my $showcontent=defined pagetype($file); + if ($showcontent) { + $content=eval { readfile(srcfile($file)) }; + $showcontent=0 if $@; + } + my $url; + if (! IkiWiki::isinternal($page)) { + $url=urlto($page, undef, 1); + } + elsif (defined $pagestate{$page}{meta}{permalink}) { + # need to use permalink for an internal page + $url=URI->new_abs($pagestate{$page}{meta}{permalink}, $config{url}); + } + else { + $url=$config{url}; # crummy fallback url + } + my $pagedesc=$page; + if (defined $pagestate{$page}{meta}{title} && + length $pagestate{$page}{meta}{title}) { + $pagedesc=qq{"$pagestate{$page}{meta}{title}"}; + } + my $subject=gettext("change notification:")." ".$pagedesc; + if (pagetype($file) eq '_comment') { + $subject=gettext("comment notification:")." ".$pagedesc; + } + my $prefsurl=IkiWiki::cgiurl_abs(do => 'prefs'); + if (IkiWiki::Plugin::passwordauth->can("anonusertoken")) { + my $token=IkiWiki::Plugin::passwordauth::anonusertoken($userinfo->{$user}); + $prefsurl=IkiWiki::cgiurl_abs( + do => 'tokenauth', + name => $user, + token => $token, + ) if defined $token; + } + my $template=template("notifyemail.tmpl"); + $template->param( + wikiname => $config{wikiname}, + url => $url, + prefsurl => $prefsurl, + showcontent => $showcontent, + content => $content, + ); + sendmail( + To => $email, + From => "$config{wikiname} <$config{adminemail}>", + Subject => $subject, + Message => $template->output, + ); + } + } + + exit 0; # daemon child +} + +1 diff --git a/IkiWiki/Plugin/opendiscussion.pm b/IkiWiki/Plugin/opendiscussion.pm index 2805f60ef..808d3cd2b 100644 --- a/IkiWiki/Plugin/opendiscussion.pm +++ b/IkiWiki/Plugin/opendiscussion.pm @@ -25,7 +25,7 @@ sub canedit ($$) { my $cgi=shift; my $session=shift; - return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i; + return "" if $config{discussion} && $page=~/(\/|^)\Q$config{discussionpage}\E$/i; return "" if pagespec_match($page, "postcomment(*)"); return undef; } diff --git a/IkiWiki/Plugin/openid.pm b/IkiWiki/Plugin/openid.pm index b6642619a..40a956849 100644 --- a/IkiWiki/Plugin/openid.pm +++ b/IkiWiki/Plugin/openid.pm @@ -100,9 +100,10 @@ sub formbuilder_setup (@) { IkiWiki::openiduser($session->param("name"))) { $form->field(name => "openid_identifier", disabled => 1, label => htmllink("", "", "ikiwiki/OpenID", noimageinline => 1), - value => $session->param("name"), - size => length($session->param("name")), force => 1, - fieldset => "login"); + value => "", + size => 1, force => 1, + fieldset => "login", + comment => $session->param("name")); $form->field(name => "email", type => "hidden"); } } diff --git a/IkiWiki/Plugin/osm.pm b/IkiWiki/Plugin/osm.pm new file mode 100644 index 000000000..a7baa5f2b --- /dev/null +++ b/IkiWiki/Plugin/osm.pm @@ -0,0 +1,594 @@ +#!/usr/bin/perl +# Copyright 2011 Blars Blarson +# Released under GPL version 2 + +package IkiWiki::Plugin::osm; +use utf8; +use strict; +use warnings; +use IkiWiki 3.0; + +sub import { + add_underlay("osm"); + hook(type => "getsetup", id => "osm", call => \&getsetup); + hook(type => "format", id => "osm", call => \&format); + hook(type => "preprocess", id => "osm", call => \&preprocess); + hook(type => "preprocess", id => "waypoint", call => \&process_waypoint); + hook(type => "savestate", id => "waypoint", call => \&savestate); + hook(type => "cgi", id => "osm", call => \&cgi); +} + +sub getsetup () { + return + plugin => { + safe => 1, + rebuild => 1, + section => "special-purpose", + }, + osm_default_zoom => { + type => "integer", + example => "15", + description => "the default zoom when you click on the map link", + safe => 1, + rebuild => 1, + }, + osm_default_icon => { + type => "string", + example => "ikiwiki/images/osm.png", + description => "the icon shown on links and on the main map", + safe => 0, + rebuild => 1, + }, + osm_alt => { + type => "string", + example => "", + description => "the alt tag of links, defaults to empty", + safe => 0, + rebuild => 1, + }, + osm_format => { + type => "string", + example => "KML", + description => "the output format for waypoints, can be KML, GeoJSON or CSV (one or many, comma-separated)", + safe => 1, + rebuild => 1, + }, + osm_tag_default_icon => { + type => "string", + example => "icon.png", + description => "the icon attached to a tag, displayed on the map for tagged pages", + safe => 0, + rebuild => 1, + }, + osm_openlayers_url => { + type => "string", + example => "http://www.openlayers.org/api/OpenLayers.js", + description => "Url for the OpenLayers.js file", + safe => 0, + rebuild => 1, + }, + osm_layers => { + type => "string", + example => { 'OSM', 'GoogleSatellite' }, + description => "Layers to use in the map. Can be either the 'OSM' string or a type option for Google maps (GoogleNormal, GoogleSatellite, GoogleHybrid or GooglePhysical). It can also be an arbitrary URL in a syntax acceptable for OpenLayers.Layer.OSM.url parameter.", + safe => 0, + rebuild => 1, + }, + osm_google_apikey => { + type => "string", + example => "", + description => "Google maps API key, Google layer not used if missing, see https://code.google.com/apis/console/ to get an API key", + safe => 1, + rebuild => 1, + }, +} + +sub register_rendered_files { + my $map = shift; + my $page = shift; + my $dest = shift; + + if ($page eq $dest) { + my %formats = get_formats(); + if ($formats{'GeoJSON'}) { + will_render($page, "$map/pois.json"); + } + if ($formats{'CSV'}) { + will_render($page, "$map/pois.txt"); + } + if ($formats{'KML'}) { + will_render($page, "$map/pois.kml"); + } + } +} + +sub preprocess { + my %params=@_; + my $page = $params{page}; + my $dest = $params{destpage}; + my $loc = $params{loc}; # sanitized below + my $lat = $params{lat}; # sanitized below + my $lon = $params{lon}; # sanitized below + my $href = $params{href}; + + my ($width, $height, $float); + $height = scrub($params{'height'} || "300px", $page, $dest); # sanitized here + $width = scrub($params{'width'} || "500px", $page, $dest); # sanitized here + $float = (defined($params{'right'}) && 'right') || (defined($params{'left'}) && 'left'); # sanitized here + + my $zoom = scrub($params{'zoom'} // $config{'osm_default_zoom'} // 15, $page, $dest); # sanitized below + my $map; + $map = $params{'map'} || 'map'; + + $map = scrub($map, $page, $dest); # sanitized here + my $name = scrub($params{'name'} || $map, $page, $dest); + + if (defined($lon) || defined($lat) || defined($loc)) { + ($lon, $lat) = scrub_lonlat($loc, $lon, $lat); + } + + if ($zoom !~ /^\d\d?$/ || $zoom < 2 || $zoom > 18) { + error("Bad zoom"); + } + + if (! defined $href || ! length $href) { + $href=IkiWiki::cgiurl( + do => "osm", + map => $map, + ); + } + + register_rendered_files($map, $page, $dest); + + $pagestate{$page}{'osm'}{$map}{'displays'}{$name} = { + height => $height, + width => $width, + float => $float, + zoom => $zoom, + fullscreen => 0, + editable => defined($params{'editable'}), + lat => $lat, + lon => $lon, + href => $href, + google_apikey => $config{'osm_google_apikey'}, + }; + return "
"; +} + +sub process_waypoint { + my %params=@_; + my $loc = $params{'loc'}; # sanitized below + my $lat = $params{'lat'}; # sanitized below + my $lon = $params{'lon'}; # sanitized below + my $page = $params{'page'}; # not sanitized? + my $dest = $params{'destpage'}; # not sanitized? + my $hidden = defined($params{'hidden'}); # sanitized here + my ($p) = $page =~ /(?:^|\/)([^\/]+)\/?$/; # shorter page name + my $name = scrub($params{'name'} || $p, $page, $dest); # sanitized here + my $desc = scrub($params{'desc'} || '', $page, $dest); # sanitized here + my $zoom = scrub($params{'zoom'} // $config{'osm_default_zoom'} // 15, $page, $dest); # sanitized below + my $icon = $config{'osm_default_icon'} || "ikiwiki/images/osm.png"; # sanitized: we trust $config + my $map = scrub($params{'map'} || 'map', $page, $dest); # sanitized here + my $alt = $config{'osm_alt'} ? "alt=\"$config{'osm_alt'}\"" : ''; # sanitized: we trust $config + if ($zoom !~ /^\d\d?$/ || $zoom < 2 || $zoom > 18) { + error("Bad zoom"); + } + + ($lon, $lat) = scrub_lonlat($loc, $lon, $lat); + if (!defined($lat) || !defined($lon)) { + error("Must specify lat and lon"); + } + + my $tag = $params{'tag'}; + foreach my $t (keys %{$typedlinks{$page}{'tag'}}) { + if ($icon = get_tag_icon($t)) { + $tag = $t; + last; + } + $t =~ s!/$config{'tagbase'}/!!; + if ($icon = get_tag_icon($t)) { + $tag = $t; + last; + } + } + $icon = urlto($icon, $dest, 1); + $tag = '' unless $tag; + register_rendered_files($map, $page, $dest); + $pagestate{$page}{'osm'}{$map}{'waypoints'}{$name} = { + page => $page, + desc => $desc, + icon => $icon, + tag => $tag, + lat => $lat, + lon => $lon, + # How to link back to the page from the map, not to be + # confused with the URL of the map itself sent to the + # embeded map below. Note: used in generated KML etc file, + # so must be absolute. + href => urlto($page), + }; + + my $mapurl = IkiWiki::cgiurl( + do => "osm", + map => $map, + lat => $lat, + lon => $lon, + zoom => $zoom, + ); + my $output = ''; + if (defined($params{'embed'})) { + $output .= preprocess(%params, + href => $mapurl, + ); + } + if (!$hidden) { + $output .= ""; + } + return $output; +} + +# get the icon from the given tag +sub get_tag_icon($) { + my $tag = shift; + # look for an icon attached to the tag + my $attached = $tag . '/' . $config{'osm_tag_default_icon'}; + if (srcfile($attached)) { + return $attached; + } + else { + return undef; + } +} + +sub scrub_lonlat($$$) { + my ($loc, $lon, $lat) = @_; + if ($loc) { + if ($loc =~ /^\s*(\-?\d+(?:\.\d*°?|(?:°?|\s)\s*\d+(?:\.\d*\'?|(?:\'|\s)\s*\d+(?:\.\d*)?\"?|\'?)°?)[NS]?)\s*\,?\;?\s*(\-?\d+(?:\.\d*°?|(?:°?|\s)\s*\d+(?:\.\d*\'?|(?:\'|\s)\s*\d+(?:\.\d*)?\"?|\'?)°?)[EW]?)\s*$/) { + $lat = $1; + $lon = $2; + } + else { + error("Bad loc"); + } + } + if (defined($lat)) { + if ($lat =~ /^(\-?)(\d+)(?:(\.\d*)°?|(?:°|\s)\s*(\d+)(?:(\.\d*)\'?|(?:\'|\s)\s*(\d+(?:\.\d*)?\"?)|\'?)|°?)\s*([NS])?\s*$/) { + $lat = $2 + ($3//0) + ((($4//0) + (($5//0) + (($6//0)/60.)))/60.); + if (($1 eq '-') || (($7//'') eq 'S')) { + $lat = - $lat; + } + } + else { + error("Bad lat"); + } + } + if (defined($lon)) { + if ($lon =~ /^(\-?)(\d+)(?:(\.\d*)°?|(?:°|\s)\s*(\d+)(?:(\.\d*)\'?|(?:\'|\s)\s*(\d+(?:\.\d*)?\"?)|\'?)|°?)\s*([EW])?$/) { + $lon = $2 + ($3//0) + ((($4//0) + (($5//0) + (($6//0)/60.)))/60.); + if (($1 eq '-') || (($7//'') eq 'W')) { + $lon = - $lon; + } + } + else { + error("Bad lon"); + } + } + if ($lat < -90 || $lat > 90 || $lon < -180 || $lon > 180) { + error("Location out of range"); + } + return ($lon, $lat); +} + +sub savestate { + my %waypoints = (); + my %linestrings = (); + + foreach my $page (keys %pagestate) { + if (exists $pagestate{$page}{'osm'}) { + foreach my $map (keys %{$pagestate{$page}{'osm'}}) { + foreach my $name (keys %{$pagestate{$page}{'osm'}{$map}{'waypoints'}}) { + debug("found waypoint $name"); + $waypoints{$map}{$name} = $pagestate{$page}{'osm'}{$map}{'waypoints'}{$name}; + } + } + } + } + + foreach my $page (keys %pagestate) { + if (exists $pagestate{$page}{'osm'}) { + foreach my $map (keys %{$pagestate{$page}{'osm'}}) { + # examine the links on this page + foreach my $name (keys %{$pagestate{$page}{'osm'}{$map}{'waypoints'}}) { + if (exists $links{$page}) { + foreach my $otherpage (@{$links{$page}}) { + if (exists $waypoints{$map}{$otherpage}) { + push(@{$linestrings{$map}}, [ + [ $waypoints{$map}{$name}{'lon'}, $waypoints{$map}{$name}{'lat'} ], + [ $waypoints{$map}{$otherpage}{'lon'}, $waypoints{$map}{$otherpage}{'lat'} ] + ]); + } + } + } + } + } + # clear the state, it will be regenerated on the next parse + # the idea here is to clear up removed waypoints... + $pagestate{$page}{'osm'} = (); + } + } + + my %formats = get_formats(); + if ($formats{'GeoJSON'}) { + writejson(\%waypoints, \%linestrings); + } + if ($formats{'CSV'}) { + writecsvs(\%waypoints, \%linestrings); + } + if ($formats{'KML'}) { + writekml(\%waypoints, \%linestrings); + } +} + +sub writejson($;$) { + my %waypoints = %{$_[0]}; + my %linestrings = %{$_[1]}; + eval q{use JSON}; + error $@ if $@; + foreach my $map (keys %waypoints) { + my %geojson = ( "type" => "FeatureCollection", "features" => []); + foreach my $name (keys %{$waypoints{$map}}) { + my %marker = ( "type" => "Feature", + "geometry" => { "type" => "Point", "coordinates" => [ $waypoints{$map}{$name}{'lon'}, $waypoints{$map}{$name}{'lat'} ] }, + "properties" => $waypoints{$map}{$name} ); + push @{$geojson{'features'}}, \%marker; + } + foreach my $linestring (@{$linestrings{$map}}) { + my %json = ( "type" => "Feature", + "geometry" => { "type" => "LineString", "coordinates" => $linestring }); + push @{$geojson{'features'}}, \%json; + } + writefile("pois.json", $config{destdir} . "/$map", to_json(\%geojson)); + } +} + +sub writekml($;$) { + my %waypoints = %{$_[0]}; + my %linestrings = %{$_[1]}; + eval q{use XML::Writer}; + error $@ if $@; + foreach my $map (keys %waypoints) { + my $output; + my $writer = XML::Writer->new( OUTPUT => \$output, + DATA_MODE => 1, DATA_INDENT => ' ', ENCODING => 'UTF-8'); + $writer->xmlDecl(); + $writer->startTag("kml", "xmlns" => "http://www.opengis.net/kml/2.2"); + $writer->startTag("Document"); + + # first pass: get the icons + my %tags_map = (); # keep track of tags seen + foreach my $name (keys %{$waypoints{$map}}) { + my %options = %{$waypoints{$map}{$name}}; + if (!$tags_map{$options{tag}}) { + debug("found new style " . $options{tag}); + $tags_map{$options{tag}} = (); + $writer->startTag("Style", id => $options{tag}); + $writer->startTag("IconStyle"); + $writer->startTag("Icon"); + $writer->startTag("href"); + $writer->characters($options{icon}); + $writer->endTag(); + $writer->endTag(); + $writer->endTag(); + $writer->endTag(); + } + $tags_map{$options{tag}}{$name} = \%options; + } + + foreach my $name (keys %{$waypoints{$map}}) { + my %options = %{$waypoints{$map}{$name}}; + $writer->startTag("Placemark"); + $writer->startTag("name"); + $writer->characters($name); + $writer->endTag(); + $writer->startTag("styleUrl"); + $writer->characters('#' . $options{tag}); + $writer->endTag(); + #$writer->emptyTag('atom:link', href => $options{href}); + # to make it easier for us as the atom:link parameter is + # hard to access from javascript + $writer->startTag('href'); + $writer->characters($options{href}); + $writer->endTag(); + $writer->startTag("description"); + $writer->characters($options{desc}); + $writer->endTag(); + $writer->startTag("Point"); + $writer->startTag("coordinates"); + $writer->characters($options{lon} . "," . $options{lat}); + $writer->endTag(); + $writer->endTag(); + $writer->endTag(); + } + + my $i = 0; + foreach my $linestring (@{$linestrings{$map}}) { + $writer->startTag("Placemark"); + $writer->startTag("name"); + $writer->characters("linestring " . $i++); + $writer->endTag(); + $writer->startTag("LineString"); + $writer->startTag("coordinates"); + my $str = ''; + foreach my $coord (@{$linestring}) { + $str .= join(',', @{$coord}) . " \n"; + } + $writer->characters($str); + $writer->endTag(); + $writer->endTag(); + $writer->endTag(); + } + $writer->endTag(); + $writer->endTag(); + $writer->end(); + + writefile("pois.kml", $config{destdir} . "/$map", $output); + } +} + +sub writecsvs($;$) { + my %waypoints = %{$_[0]}; + foreach my $map (keys %waypoints) { + my $poisf = "lat\tlon\ttitle\tdescription\ticon\ticonSize\ticonOffset\n"; + foreach my $name (keys %{$waypoints{$map}}) { + my %options = %{$waypoints{$map}{$name}}; + my $line = + $options{'lat'} . "\t" . + $options{'lon'} . "\t" . + $name . "\t" . + $options{'desc'} . '
' . $name . "\t" . + $options{'icon'} . "\n"; + $poisf .= $line; + } + writefile("pois.txt", $config{destdir} . "/$map", $poisf); + } +} + +# pipe some data through the HTML scrubber +# +# code taken from the meta.pm plugin +sub scrub($$$) { + if (IkiWiki::Plugin::htmlscrubber->can("sanitize")) { + return IkiWiki::Plugin::htmlscrubber::sanitize( + content => shift, page => shift, destpage => shift); + } + else { + return shift; + } +} + +# taken from toggle.pm +sub format (@) { + my %params=@_; + + if ($params{content}=~m!]*id="mapdiv-[^"]*"[^>]*>!g) { + if (! ($params{content}=~s!!include_javascript($params{page}).""!em)) { + # no tag, probably in preview mode + $params{content}=$params{content} . include_javascript($params{page}); + } + } + return $params{content}; +} + +sub preferred_format() { + if (!defined($config{'osm_format'}) || !$config{'osm_format'}) { + $config{'osm_format'} = 'KML'; + } + my @spl = split(/, */, $config{'osm_format'}); + return shift @spl; +} + +sub get_formats() { + if (!defined($config{'osm_format'}) || !$config{'osm_format'}) { + $config{'osm_format'} = 'KML'; + } + map { $_ => 1 } split(/, */, $config{'osm_format'}); +} + +sub include_javascript ($) { + my $page=shift; + my $loader; + + if (exists $pagestate{$page}{'osm'}) { + foreach my $map (keys %{$pagestate{$page}{'osm'}}) { + foreach my $name (keys %{$pagestate{$page}{'osm'}{$map}{'displays'}}) { + $loader .= map_setup_code($map, $name, %{$pagestate{$page}{'osm'}{$map}{'displays'}{$name}}); + } + } + } + if ($loader) { + return embed_map_code($page) . ""; + } + else { + return ''; + } +} + +sub cgi($) { + my $cgi=shift; + + return unless defined $cgi->param('do') && + $cgi->param("do") eq "osm"; + + IkiWiki::loadindex(); + + IkiWiki::decode_cgi_utf8($cgi); + + my $map = $cgi->param('map'); + if (!defined $map || $map !~ /^[a-z]*$/) { + error("invalid map parameter"); + } + + print "Content-Type: text/html\r\n"; + print ("\r\n"); + print ""; + print "
"; + print embed_map_code(); + print ""; + print ""; + + exit 0; +} + +sub embed_map_code(;$) { + my $page=shift; + my $olurl = $config{osm_openlayers_url} || "http://www.openlayers.org/api/OpenLayers.js"; + my $code = ''."\n". + ''."\n"; + if ($config{'osm_google_apikey'}) { + $code .= ''; + } + return $code; +} + +sub map_setup_code($;@) { + my $map=shift; + my $name=shift; + my %options=@_; + + my $mapurl = $config{osm_map_url}; + + eval q{use JSON}; + error $@ if $@; + + $options{'format'} = preferred_format(); + + my %formats = get_formats(); + if ($formats{'GeoJSON'}) { + $options{'jsonurl'} = urlto($map."/pois.json"); + } + if ($formats{'CSV'}) { + $options{'csvurl'} = urlto($map."/pois.txt"); + } + if ($formats{'KML'}) { + $options{'kmlurl'} = urlto($map."/pois.kml"); + } + + if ($mapurl) { + $options{'mapurl'} = $mapurl; + } + $options{'layers'} = $config{osm_layers}; + + return "mapsetup('mapdiv-$name', " . to_json(\%options) . ");"; +} + +1; diff --git a/IkiWiki/Plugin/passwordauth.pm b/IkiWiki/Plugin/passwordauth.pm index 35ebd961f..0cf2a26ea 100644 --- a/IkiWiki/Plugin/passwordauth.pm +++ b/IkiWiki/Plugin/passwordauth.pm @@ -96,6 +96,72 @@ sub setpassword ($$;$) { else { IkiWiki::userinfo_set($user, $field, $password); } + + # Setting the password clears any passwordless login token. + if ($field ne 'passwordless') { + IkiWiki::userinfo_set($user, "passwordless", ""); + } +} + +# Generates a token that can be used to log the user in. +# This needs to be hard to guess. Generating a cgi session id will +# make it as hard to guess as any cgi session. +sub gentoken ($$;$) { + my $user=shift; + my $tokenfield=shift; + my $reversable=shift; + + eval q{use CGI::Session}; + error($@) if $@; + my $token = CGI::Session->new->id; + if (! $reversable) { + setpassword($user, $token, $tokenfield); + } + else { + IkiWiki::userinfo_set($user, $tokenfield, $token); + } + return $token; +} + +# An anonymous user has no normal password, only a passwordless login +# token. Given an email address, this sets up such a user for that email, +# unless one already exists, and returns the username. +sub anonuser ($) { + my $email=shift; + + # Want a username for this email that won't overlap with any other. + my $user=$email; + $user=~s/@/_/g; + + my $userinfo=IkiWiki::userinfo_retrieve(); + if (! exists $userinfo->{$user} || ! ref $userinfo->{$user}) { + if (IkiWiki::userinfo_setall($user, { + 'email' => $email, + 'regdate' => time})) { + gentoken($user, "passwordless", 1); + return $user; + } + else { + error(gettext("Error creating account.")); + } + } + elsif (defined anonusertoken($userinfo->{$user})) { + return $user; + } + else { + return undef; + } +} + +sub anonusertoken ($) { + my $userhash=shift; + if (exists $userhash->{passwordless} && + length $userhash->{passwordless}) { + return $userhash->{passwordless}; + } + else { + return undef; + } } sub formbuilder_setup (@) { @@ -277,20 +343,13 @@ sub formbuilder (@) { if (! length $email) { error(gettext("No email address, so cannot email password reset instructions.")); } - - # Store a token that can be used once - # to log the user in. This needs to be hard - # to guess. Generating a cgi session id will - # make it as hard to guess as any cgi session. - eval q{use CGI::Session}; - error($@) if $@; - my $token = CGI::Session->new->id; - setpassword($user_name, $token, "resettoken"); + + my $token=gentoken($user_name, "resettoken"); my $template=template("passwordmail.tmpl"); $template->param( user_name => $user_name, - passwordurl => IkiWiki::cgiurl( + passwordurl => IkiWiki::cgiurl_abs( 'do' => "reset", 'name' => $user_name, 'token' => $token, @@ -329,7 +388,7 @@ sub formbuilder (@) { elsif ($form->title eq "preferences") { if ($form->submitted eq "Save Preferences" && $form->validate) { my $user_name=$form->field('name'); - if ($form->field("password") && length $form->field("password")) { + if (defined $form->field("password") && length $form->field("password")) { setpassword($user_name, $form->field('password')); } } @@ -356,6 +415,22 @@ sub sessioncgi ($$) { IkiWiki::cgi_prefs($q, $session); exit; } + elsif ($q->param('do') eq 'tokenauth') { + my $name=$q->param("name"); + my $token=$q->param("token"); + + if (! defined $name || ! defined $token || + ! length $name || ! length $token) { + error(gettext("incorrect url")); + } + if (! checkpassword($name, $token, "passwordless")) { + error(gettext("access denied")); + } + + $session->param("name", $name); + IkiWiki::cgi_prefs($q, $session); + exit; + } elsif ($q->param("do") eq "register") { # After registration, need to go somewhere, so show prefs page. $session->param(postsignin => "do=prefs"); diff --git a/IkiWiki/Plugin/pinger.pm b/IkiWiki/Plugin/pinger.pm index ea4f3e0dc..588f7a42a 100644 --- a/IkiWiki/Plugin/pinger.pm +++ b/IkiWiki/Plugin/pinger.pm @@ -13,7 +13,7 @@ sub import { hook(type => "needsbuild", id => "pinger", call => \&needsbuild); hook(type => "preprocess", id => "ping", call => \&preprocess); hook(type => "delete", id => "pinger", call => \&ping); - hook(type => "change", id => "pinger", call => \&ping); + hook(type => "rendered", id => "pinger", call => \&ping); } sub getsetup () { diff --git a/IkiWiki/Plugin/po.pm b/IkiWiki/Plugin/po.pm index 6410a1c66..53e6af92f 100644 --- a/IkiWiki/Plugin/po.pm +++ b/IkiWiki/Plugin/po.pm @@ -23,7 +23,6 @@ use File::Copy; use File::Spec; use File::Temp; use Memoize; -use UNIVERSAL; my ($master_language_code, $master_language_name); my %translations; @@ -48,7 +47,7 @@ sub import { hook(type => "pagetemplate", id => "po", call => \&pagetemplate, last => 1); hook(type => "rename", id => "po", call => \&renamepages, first => 1); hook(type => "delete", id => "po", call => \&mydelete); - hook(type => "change", id => "po", call => \&change); + hook(type => "rendered", id => "po", call => \&rendered); hook(type => "checkcontent", id => "po", call => \&checkcontent); hook(type => "canremove", id => "po", call => \&canremove); hook(type => "canrename", id => "po", call => \&canrename); @@ -428,7 +427,7 @@ sub mydelete (@) { map { deletetranslations($_) } grep istranslatablefile($_), @deleted; } -sub change (@) { +sub rendered (@) { my @rendered=@_; my $updated_po_files=0; @@ -1103,7 +1102,7 @@ sub deletetranslations ($) { IkiWiki::rcs_remove($_); } else { - IkiWiki::prune("$config{srcdir}/$_"); + IkiWiki::prune("$config{srcdir}/$_", $config{srcdir}); } } @todelete; diff --git a/IkiWiki/Plugin/poll.pm b/IkiWiki/Plugin/poll.pm index 2773486a6..32756a571 100644 --- a/IkiWiki/Plugin/poll.pm +++ b/IkiWiki/Plugin/poll.pm @@ -23,11 +23,13 @@ sub getsetup () { my %pagenum; sub preprocess (@) { - my %params=(open => "yes", total => "yes", percent => "yes", @_); + my %params=(open => "yes", total => "yes", percent => "yes", + expandable => "no", @_); my $open=IkiWiki::yesno($params{open}); my $showtotal=IkiWiki::yesno($params{total}); my $showpercent=IkiWiki::yesno($params{percent}); + my $expandable=IkiWiki::yesno($params{expandable}); $pagenum{$params{page}}++; my %choices; @@ -74,6 +76,19 @@ sub preprocess (@) { $ret.="\n"; } } + + if ($expandable && $open && exists $config{cgiurl}) { + $ret.="

\n"; + $ret.="

\n"; + $ret.="\n"; + $ret.="\n"; + $ret.="\n"; + $ret.=gettext("Write in").": \n"; + $ret.="\n"; + $ret.="
\n"; + $ret.="

\n"; + } + if ($showtotal) { $ret.="".gettext("Total votes:")." $total\n"; } @@ -85,7 +100,7 @@ sub sessioncgi ($$) { my $session=shift; if (defined $cgi->param('do') && $cgi->param('do') eq "poll") { my $choice=decode_utf8($cgi->param('choice')); - if (! defined $choice) { + if (! defined $choice || not length $choice) { error("no choice specified"); } my $num=$cgi->param('num'); @@ -118,7 +133,14 @@ sub sessioncgi ($$) { my $params=shift; return "\\[[$prefix $params]]" if $escape; if (--$num == 0) { - $params=~s/(^|\s+)(\d+)\s+"?\Q$choice\E"?(\s+|$)/$1.($2+1)." \"$choice\"".$3/se; + if ($params=~s/(^|\s+)(\d+)\s+"?\Q$choice\E"?(\s+|$)/$1.($2+1)." \"$choice\"".$3/se) { + } + elsif ($params=~/expandable=(\w+)/ + & &IkiWiki::yesno($1)) { + $choice=~s/["\]\n\r]//g; + $params.=" 1 \"$choice\"" + if length $choice; + } if (defined $oldchoice) { $params=~s/(^|\s+)(\d+)\s+"?\Q$oldchoice\E"?(\s+|$)/$1.($2-1 >=0 ? $2-1 : 0)." \"$oldchoice\"".$3/se; } diff --git a/IkiWiki/Plugin/recentchanges.pm b/IkiWiki/Plugin/recentchanges.pm index 8ce9474be..4c1863255 100644 --- a/IkiWiki/Plugin/recentchanges.pm +++ b/IkiWiki/Plugin/recentchanges.pm @@ -165,6 +165,7 @@ sub store ($$$) { # Limit pages to first 10, and add links to the changed pages. my $is_excess = exists $change->{pages}[10]; delete @{$change->{pages}}[10 .. @{$change->{pages}}] if $is_excess; + my $has_diffurl=0; $change->{pages} = [ map { if (length $config{cgiurl}) { @@ -180,6 +181,9 @@ sub store ($$$) { else { $_->{link} = pagetitle($_->{page}); } + if (defined $_->{diffurl}) { + $has_diffurl=1; + } $_; } @{$change->{pages}} @@ -227,6 +231,8 @@ sub store ($$$) { wikiname => $config{wikiname}, ); + $template->param(has_diffurl => 1) if $has_diffurl; + $template->param(permalink => urlto($config{recentchangespage})."#change-".titlepage($change->{rev})) if exists $config{url}; diff --git a/IkiWiki/Plugin/recentchangesdiff.pm b/IkiWiki/Plugin/recentchangesdiff.pm index 418822793..eb358be67 100644 --- a/IkiWiki/Plugin/recentchangesdiff.pm +++ b/IkiWiki/Plugin/recentchangesdiff.pm @@ -9,10 +9,12 @@ use HTML::Entities; my $maxlines=200; sub import { + add_underlay("javascript"); hook(type => "getsetup", id => "recentchangesdiff", call => \&getsetup); hook(type => "pagetemplate", id => "recentchangesdiff", call => \&pagetemplate); + hook(type => "format", id => "recentchangesdiff.pm", call => \&format); } sub getsetup () { @@ -55,4 +57,24 @@ sub pagetemplate (@) { } } +sub format (@) { + my %params=@_; + + if (! ($params{content}=~s!^(]*>)!$1.include_javascript($params{page})!em)) { + # no tag, probably in preview mode + $params{content}=include_javascript(undef).$params{content}; + } + return $params{content}; +} + +# taken verbatim from toggle.pm +sub include_javascript ($) { + my $from=shift; + + return ''."\n". + ''; +} + 1 diff --git a/IkiWiki/Plugin/remove.pm b/IkiWiki/Plugin/remove.pm index 14ac01c9b..d48b28f95 100644 --- a/IkiWiki/Plugin/remove.pm +++ b/IkiWiki/Plugin/remove.pm @@ -22,6 +22,13 @@ sub getsetup () { }, } +sub allowed_dirs { + return grep { defined $_ } ( + $config{srcdir}, + $IkiWiki::Plugin::transient::transientdir, + ); +} + sub check_canremove ($$$) { my $page=shift; my $q=shift; @@ -33,12 +40,22 @@ sub check_canremove ($$$) { htmllink("", "", $page, noimageinline => 1))); } - # Must exist on disk, and be a regular file. + # Must exist in either the srcdir or a suitable underlay (e.g. + # transient underlay), and be a regular file. my $file=$pagesources{$page}; - if (! -e "$config{srcdir}/$file") { + my $dir; + + foreach my $srcdir (allowed_dirs()) { + if (-e "$srcdir/$file") { + $dir = $srcdir; + last; + } + } + + if (! defined $dir) { error(sprintf(gettext("%s is not in the srcdir, so it cannot be deleted"), $file)); } - elsif (-l "$config{srcdir}/$file" && ! -f _) { + elsif (-l "$dir/$file" && ! -f _) { error(sprintf(gettext("%s is not a file"), $file)); } @@ -46,7 +63,7 @@ sub check_canremove ($$$) { # This is sorta overkill, but better safe than sorry. if (! defined pagetype($pagesources{$page})) { if (IkiWiki::Plugin::attachment->can("check_canattach")) { - IkiWiki::Plugin::attachment::check_canattach($session, $page, "$config{srcdir}/$file"); + IkiWiki::Plugin::attachment::check_canattach($session, $page, "$dir/$file"); } else { error("removal of attachments is not allowed"); @@ -124,7 +141,7 @@ sub removal_confirm ($$@) { my $f=IkiWiki::Plugin::attachment::is_held_attachment($page); if (defined $f) { require IkiWiki::Render; - IkiWiki::prune($f); + IkiWiki::prune($f, "$config{wikistatedir}/attachments"); } } } @@ -223,21 +240,34 @@ sub sessioncgi ($$) { require IkiWiki::Render; if ($config{rcs}) { IkiWiki::disable_commit_hook(); - foreach my $file (@files) { - IkiWiki::rcs_remove($file); + } + my $rcs_removed = 1; + + foreach my $file (@files) { + foreach my $srcdir (allowed_dirs()) { + if (-e "$srcdir/$file") { + if ($srcdir eq $config{srcdir} && $config{rcs}) { + IkiWiki::rcs_remove($file); + $rcs_removed = 1; + } + else { + IkiWiki::prune("$srcdir/$file", $srcdir); + } + } } - IkiWiki::rcs_commit_staged( - message => gettext("removed"), - session => $session, - ); - IkiWiki::enable_commit_hook(); - IkiWiki::rcs_update(); } - else { - foreach my $file (@files) { - IkiWiki::prune("$config{srcdir}/$file"); + + if ($config{rcs}) { + if ($rcs_removed) { + IkiWiki::rcs_commit_staged( + message => gettext("removed"), + session => $session, + ); } + IkiWiki::enable_commit_hook(); + IkiWiki::rcs_update(); } + IkiWiki::refresh(); IkiWiki::saveindex(); diff --git a/IkiWiki/Plugin/rename.pm b/IkiWiki/Plugin/rename.pm index 8e32d41ae..8387a1e32 100644 --- a/IkiWiki/Plugin/rename.pm +++ b/IkiWiki/Plugin/rename.pm @@ -206,14 +206,22 @@ sub rename_start ($$$$) { exit 0; } -sub postrename ($;$$$) { +sub postrename ($$$;$$) { + my $cgi=shift; my $session=shift; my $src=shift; my $dest=shift; my $attachment=shift; - # Load saved form state and return to edit page. - my $postrename=CGI->new($session->param("postrename")); + # Load saved form state and return to edit page, using stored old + # cgi state. Or, if the rename was not started on the edit page, + # return to the renamed page. + my $postrename=$session->param("postrename"); + if (! defined $postrename) { + IkiWiki::redirect($cgi, urlto(defined $dest ? $dest : $src)); + exit; + } + my $oldcgi=CGI->new($postrename); $session->clear("postrename"); IkiWiki::cgi_savesession($session); @@ -222,21 +230,21 @@ sub postrename ($;$$$) { # They renamed the page they were editing. This requires # fixups to the edit form state. # Tweak the edit form to be editing the new page. - $postrename->param("page", $dest); + $oldcgi->param("page", $dest); } # Update edit form content to fix any links present # on it. - $postrename->param("editcontent", + $oldcgi->param("editcontent", renamepage_hook($dest, $src, $dest, - $postrename->param("editcontent"))); + $oldcgi->param("editcontent"))); # Get a new edit token; old was likely invalidated. - $postrename->param("rcsinfo", + $oldcgi->param("rcsinfo", IkiWiki::rcs_prepedit($pagesources{$dest})); } - IkiWiki::cgi_editpage($postrename, $session); + IkiWiki::cgi_editpage($oldcgi, $session); } sub formbuilder (@) { @@ -291,16 +299,16 @@ sub sessioncgi ($$) { my $session=shift; my ($form, $buttons)=rename_form($q, $session, Encode::decode_utf8($q->param("page"))); IkiWiki::decode_form_utf8($form); + my $src=$form->field("page"); if ($form->submitted eq 'Cancel') { - postrename($session); + postrename($q, $session, $src); } elsif ($form->submitted eq 'Rename' && $form->validate) { IkiWiki::checksessionexpiry($q, $session, $q->param('sid')); # These untaints are safe because of the checks # performed in check_canrename later. - my $src=$form->field("page"); my $srcfile=IkiWiki::possibly_foolish_untaint($pagesources{$src}) if exists $pagesources{$src}; my $dest=IkiWiki::possibly_foolish_untaint(titlepage($form->field("new_name"))); @@ -324,7 +332,7 @@ sub sessioncgi ($$) { IkiWiki::Plugin::attachment::is_held_attachment($src); if ($held) { rename($held, IkiWiki::Plugin::attachment::attachment_holding_location($dest)); - postrename($session, $src, $dest, $q->param("attachment")) + postrename($q, $session, $src, $dest, $q->param("attachment")) unless defined $srcfile; } @@ -430,7 +438,7 @@ sub sessioncgi ($$) { $renamesummary.=$template->output; } - postrename($session, $src, $dest, $q->param("attachment")); + postrename($q, $session, $src, $dest, $q->param("attachment")); } else { IkiWiki::showform($form, $buttons, $session, $q); diff --git a/IkiWiki/Plugin/rsync.pm b/IkiWiki/Plugin/rsync.pm index e38801e4a..1b85ea000 100644 --- a/IkiWiki/Plugin/rsync.pm +++ b/IkiWiki/Plugin/rsync.pm @@ -7,7 +7,7 @@ use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "rsync", call => \&getsetup); - hook(type => "change", id => "rsync", call => \&postrefresh); + hook(type => "rendered", id => "rsync", call => \&postrefresh); hook(type => "delete", id => "rsync", call => \&postrefresh); } diff --git a/IkiWiki/Plugin/shortcut.pm b/IkiWiki/Plugin/shortcut.pm index 0cedbe447..98df143ab 100644 --- a/IkiWiki/Plugin/shortcut.pm +++ b/IkiWiki/Plugin/shortcut.pm @@ -73,11 +73,21 @@ sub shortcut_expand ($$@) { add_depends($params{destpage}, "shortcuts"); my $text=join(" ", @params); - my $encoded_text=$text; - $encoded_text=~s/([^A-Za-z0-9])/sprintf("%%%02X", ord($1))/seg; - $url=~s{\%([sS])}{ - $1 eq 's' ? $encoded_text : $text + $url=~s{\%([sSW])}{ + if ($1 eq 's') { + my $t=$text; + $t=~s/([^A-Za-z0-9])/sprintf("%%%02X", ord($1))/seg; + $t; + } + elsif ($1 eq 'S') { + $text; + } + elsif ($1 eq 'W') { + my $t=Encode::encode_utf8($text); + $t=~s/([^A-Za-z0-9])/sprintf("%%%02X", ord($1))/seg; + $t; + } }eg; $text=~s/_/ /g; diff --git a/IkiWiki/Plugin/skeleton.pm.example b/IkiWiki/Plugin/skeleton.pm.example index 7974d5e53..f9caef40c 100644 --- a/IkiWiki/Plugin/skeleton.pm.example +++ b/IkiWiki/Plugin/skeleton.pm.example @@ -26,7 +26,8 @@ sub import { hook(type => "templatefile", id => "skeleton", call => \&templatefile); hook(type => "pageactions", id => "skeleton", call => \&pageactions); hook(type => "delete", id => "skeleton", call => \&delete); - hook(type => "change", id => "skeleton", call => \&change); + hook(type => "rendered", id => "skeleton", call => \&rendered); + hook(type => "changes", id => "skeleton", call => \&changes); hook(type => "cgi", id => "skeleton", call => \&cgi); hook(type => "auth", id => "skeleton", call => \&auth); hook(type => "sessioncgi", id => "skeleton", call => \&sessioncgi); @@ -53,7 +54,6 @@ sub getsetup () { plugin => { safe => 1, rebuild => undef, - section => "misc", }, skeleton => { type => "boolean", @@ -167,10 +167,16 @@ sub delete (@) { debug("skeleton plugin told that files were deleted: @files"); } -sub change (@) { +sub rendered (@) { my @files=@_; - debug("skeleton plugin told that changed files were rendered: @files"); + debug("skeleton plugin told that files were rendered: @files"); +} + +sub changes (@) { + my @files=@_; + + debug("skeleton plugin told that files were changed: @files"); } sub cgi ($) { diff --git a/IkiWiki/Plugin/svn.pm b/IkiWiki/Plugin/svn.pm index 8824a6ce0..fd11f2c63 100644 --- a/IkiWiki/Plugin/svn.pm +++ b/IkiWiki/Plugin/svn.pm @@ -5,6 +5,7 @@ use warnings; use strict; use IkiWiki; use POSIX qw(setlocale LC_CTYPE); +use URI::Escape q{uri_escape_utf8}; sub import { hook(type => "checkconfig", id => "svn", call => \&checkconfig); @@ -292,7 +293,8 @@ sub rcs_recentchanges ($) { } my $diffurl=defined $config{diffurl} ? $config{diffurl} : ""; - $diffurl=~s/\[\[file\]\]/$file/g; + my $efile = uri_escape_utf8($file); + $diffurl=~s/\[\[file\]\]/$efile/g; $diffurl=~s/\[\[r1\]\]/$rev - 1/eg; $diffurl=~s/\[\[r2\]\]/$rev/g; diff --git a/IkiWiki/Plugin/tla.pm b/IkiWiki/Plugin/tla.pm index da4385446..11be248e8 100644 --- a/IkiWiki/Plugin/tla.pm +++ b/IkiWiki/Plugin/tla.pm @@ -4,6 +4,7 @@ package IkiWiki::Plugin::tla; use warnings; use strict; use IkiWiki; +use URI::Escape q{uri_escape_utf8}; sub import { hook(type => "checkconfig", id => "tla", call => \&checkconfig); @@ -224,7 +225,8 @@ sub rcs_recentchanges ($) { foreach my $file (@paths) { my $diffurl=defined $config{diffurl} ? $config{diffurl} : ""; - $diffurl=~s/\[\[file\]\]/$file/g; + my $efile = uri_escape_utf8($file); + $diffurl=~s/\[\[file\]\]/$efile/g; $diffurl=~s/\[\[rev\]\]/$change/g; push @pages, { page => pagename($file), diff --git a/IkiWiki/Plugin/trail.pm b/IkiWiki/Plugin/trail.pm new file mode 100644 index 000000000..d5fb2b5d6 --- /dev/null +++ b/IkiWiki/Plugin/trail.pm @@ -0,0 +1,467 @@ +#!/usr/bin/perl +# Copyright © 2008-2011 Joey Hess +# Copyright © 2009-2012 Simon McVittie +# Licensed under the GNU GPL, version 2, or any later version published by the +# Free Software Foundation +package IkiWiki::Plugin::trail; + +use warnings; +use strict; +use IkiWiki 3.00; + +sub import { + hook(type => "getsetup", id => "trail", call => \&getsetup); + hook(type => "needsbuild", id => "trail", call => \&needsbuild); + hook(type => "preprocess", id => "trailoptions", call => \&preprocess_trailoptions, scan => 1); + hook(type => "preprocess", id => "trailitem", call => \&preprocess_trailitem, scan => 1); + hook(type => "preprocess", id => "trailitems", call => \&preprocess_trailitems, scan => 1); + hook(type => "preprocess", id => "traillink", call => \&preprocess_traillink, scan => 1); + hook(type => "pagetemplate", id => "trail", call => \&pagetemplate); + hook(type => "build_affected", id => "trail", call => \&build_affected); +} + +# Page state +# +# If a page $T is a trail, then it can have +# +# * $pagestate{$T}{trail}{contents} +# Reference to an array of lists each containing either: +# - [pagenames => "page1", "page2"] +# Those literal pages +# - [link => "link"] +# A link specification, pointing to the same page that [[link]] +# would select +# - [pagespec => "posts/*", "age", 0] +# A match by pagespec; the third array element is the sort order +# and the fourth is whether to reverse sorting +# +# * $pagestate{$T}{trail}{sort} +# A sorting order; if absent or undef, the trail is in the order given +# by the links that form it +# +# * $pagestate{$T}{trail}{circular} +# True if this trail is circular (i.e. going "next" from the last item is +# allowed, and takes you back to the first) +# +# * $pagestate{$T}{trail}{reverse} +# True if C is to be reversed. +# +# If a page $M is a member of a trail $T, then it has +# +# * $pagestate{$M}{trail}{item}{$T}[0] +# The page before this one in C<$T> at the last rebuild, or undef. +# +# * $pagestate{$M}{trail}{item}{$T}[1] +# The page after this one in C<$T> at the last refresh, or undef. + +sub getsetup () { + return + plugin => { + safe => 1, + rebuild => undef, + }, +} + +# Cache of pages' old titles, so we can tell whether they changed +my %old_trail_titles; + +sub needsbuild (@) { + my $needsbuild=shift; + + foreach my $page (keys %pagestate) { + if (exists $pagestate{$page}{trail}) { + if (exists $pagesources{$page} && + grep { $_ eq $pagesources{$page} } @$needsbuild) { + # Remember its title, so we can know whether + # it changed. + $old_trail_titles{$page} = title_of($page); + + # Remove state, it will be re-added + # if the preprocessor directive is still + # there during the rebuild. {item} is the + # only thing that's added for items, not + # trails, and it's harmless to delete that - + # the item is being rebuilt anyway. + delete $pagestate{$page}{trail}; + } + } + } + + return $needsbuild; +} + +my $scanned = 0; + +sub preprocess_trailoptions (@) { + my %params = @_; + + if (exists $params{circular}) { + $pagestate{$params{page}}{trail}{circular} = + IkiWiki::yesno($params{circular}); + } + + if (exists $params{sort}) { + $pagestate{$params{page}}{trail}{sort} = $params{sort}; + } + + if (exists $params{reverse}) { + $pagestate{$params{page}}{trail}{reverse} = $params{reverse}; + } + + return ""; +} + +sub preprocess_trailitem (@) { + my $link = shift; + shift; + + # avoid collecting everything in the preprocess stage if we already + # did in the scan stage + if (defined wantarray) { + return "" if $scanned; + } + else { + $scanned = 1; + } + + my %params = @_; + my $trail = $params{page}; + + $link = linkpage($link); + + add_link($params{page}, $link, 'trail'); + push @{$pagestate{$params{page}}{trail}{contents}}, [link => $link]; + + return ""; +} + +sub preprocess_trailitems (@) { + my %params = @_; + + # avoid collecting everything in the preprocess stage if we already + # did in the scan stage + if (defined wantarray) { + return "" if $scanned; + } + else { + $scanned = 1; + } + + # trail members from a pagespec ought to be in some sort of order, + # and path is a nice obvious default + $params{sort} = 'path' unless exists $params{sort}; + $params{reverse} = 'no' unless exists $params{reverse}; + + if (exists $params{pages}) { + push @{$pagestate{$params{page}}{trail}{contents}}, + ["pagespec" => $params{pages}, $params{sort}, + IkiWiki::yesno($params{reverse})]; + } + + if (exists $params{pagenames}) { + push @{$pagestate{$params{page}}{trail}{contents}}, + [pagenames => (split ' ', $params{pagenames})]; + } + + return ""; +} + +sub preprocess_traillink (@) { + my $link = shift; + shift; + + my %params = @_; + my $trail = $params{page}; + + $link =~ qr{ + (?: + ([^\|]+) # 1: link text + \| # followed by | + )? # optional + + (.+) # 2: page to link to + }x; + + my $linktext = $1; + $link = linkpage($2); + + add_link($params{page}, $link, 'trail'); + + # avoid collecting everything in the preprocess stage if we already + # did in the scan stage + my $already; + if (defined wantarray) { + $already = $scanned; + } + else { + $scanned = 1; + } + + push @{$pagestate{$params{page}}{trail}{contents}}, [link => $link] unless $already; + + if (defined $linktext) { + $linktext = pagetitle($linktext); + } + + if (exists $params{text}) { + $linktext = $params{text}; + } + + if (defined $linktext) { + return htmllink($trail, $params{destpage}, + $link, linktext => $linktext); + } + + return htmllink($trail, $params{destpage}, $link); +} + +# trail => [member1, member2] +my %trail_to_members; +# member => { trail => [prev, next] } +# e.g. if %trail_to_members = ( +# trail1 => ["member1", "member2"], +# trail2 => ["member0", "member1"], +# ) +# +# then $member_to_trails{member1} = { +# trail1 => [undef, "member2"], +# trail2 => ["member0", undef], +# } +my %member_to_trails; + +# member => 1 +my %rebuild_trail_members; + +sub trails_differ { + my ($old, $new) = @_; + + foreach my $trail (keys %$old) { + if (! exists $new->{$trail}) { + return 1; + } + + if (exists $old_trail_titles{$trail} && + title_of($trail) ne $old_trail_titles{$trail}) { + return 1; + } + + my ($old_p, $old_n) = @{$old->{$trail}}; + my ($new_p, $new_n) = @{$new->{$trail}}; + $old_p = "" unless defined $old_p; + $old_n = "" unless defined $old_n; + $new_p = "" unless defined $new_p; + $new_n = "" unless defined $new_n; + if ($old_p ne $new_p) { + return 1; + } + + if (exists $old_trail_titles{$old_p} && + title_of($old_p) ne $old_trail_titles{$old_p}) { + return 1; + } + + if ($old_n ne $new_n) { + return 1; + } + + if (exists $old_trail_titles{$old_n} && + title_of($old_n) ne $old_trail_titles{$old_n}) { + return 1; + } + } + + foreach my $trail (keys %$new) { + if (! exists $old->{$trail}) { + return 1; + } + } + + return 0; +} + +my $done_prerender = 0; + +sub prerender { + return if $done_prerender; + + %trail_to_members = (); + %member_to_trails = (); + + foreach my $trail (keys %pagestate) { + next unless exists $pagestate{$trail}{trail}{contents}; + + my $members = []; + my @contents = @{$pagestate{$trail}{trail}{contents}}; + + foreach my $c (@contents) { + if ($c->[0] eq 'pagespec') { + push @$members, pagespec_match_list($trail, + $c->[1], sort => $c->[2], + reverse => $c->[3]); + } + elsif ($c->[0] eq 'pagenames') { + my @pagenames = @$c; + shift @pagenames; + foreach my $page (@pagenames) { + if (exists $pagesources{$page}) { + push @$members, $page; + } + else { + # rebuild trail if it turns up + add_depends($trail, $page, deptype("presence")); + } + } + } + elsif ($c->[0] eq 'link') { + my $best = bestlink($trail, $c->[1]); + push @$members, $best if length $best; + } + } + + if (defined $pagestate{$trail}{trail}{sort}) { + # re-sort + @$members = pagespec_match_list($trail, 'internal(*)', + list => $members, + sort => $pagestate{$trail}{trail}{sort}); + } + + if (IkiWiki::yesno $pagestate{$trail}{trail}{reverse}) { + @$members = reverse @$members; + } + + # uniquify + my %seen; + my @tmp; + foreach my $member (@$members) { + push @tmp, $member unless $seen{$member}; + $seen{$member} = 1; + } + $members = [@tmp]; + + for (my $i = 0; $i <= $#$members; $i++) { + my $member = $members->[$i]; + my $prev; + $prev = $members->[$i - 1] if $i > 0; + my $next = $members->[$i + 1]; + + $member_to_trails{$member}{$trail} = [$prev, $next]; + } + + if ((scalar @$members) > 1 && $pagestate{$trail}{trail}{circular}) { + $member_to_trails{$members->[0]}{$trail}[0] = $members->[$#$members]; + $member_to_trails{$members->[$#$members]}{$trail}[1] = $members->[0]; + } + + $trail_to_members{$trail} = $members; + } + + foreach my $member (keys %pagestate) { + if (exists $pagestate{$member}{trail}{item} && + ! exists $member_to_trails{$member}) { + $rebuild_trail_members{$member} = 1; + delete $pagestate{$member}{trail}{item}; + } + } + + foreach my $member (keys %member_to_trails) { + if (! exists $pagestate{$member}{trail}{item}) { + $rebuild_trail_members{$member} = 1; + } + else { + if (trails_differ($pagestate{$member}{trail}{item}, + $member_to_trails{$member})) { + $rebuild_trail_members{$member} = 1; + } + } + + $pagestate{$member}{trail}{item} = $member_to_trails{$member}; + } + + $done_prerender = 1; +} + +sub build_affected { + my %affected; + + # In principle we might not have done this yet, although in practice + # at least the trail itself has probably changed, and its template + # almost certainly contains TRAILS or TRAILLOOP, triggering our + # prerender as a side-effect. + prerender(); + + foreach my $member (keys %rebuild_trail_members) { + $affected{$member} = sprintf(gettext("building %s, its previous or next page has changed"), $member); + } + + return %affected; +} + +sub title_of ($) { + my $page = shift; + if (defined ($pagestate{$page}{meta}{title})) { + return $pagestate{$page}{meta}{title}; + } + return pagetitle(IkiWiki::basename($page)); +} + +my $recursive = 0; + +sub pagetemplate (@) { + my %params = @_; + my $page = $params{page}; + my $template = $params{template}; + + return unless length $page; + + if ($template->query(name => 'trails') && ! $recursive) { + prerender(); + + $recursive = 1; + my $inner = template("trails.tmpl", blind_cache => 1); + IkiWiki::run_hooks(pagetemplate => sub { + shift->(%params, template => $inner) + }); + $template->param(trails => $inner->output); + $recursive = 0; + } + + if ($template->query(name => 'trailloop')) { + prerender(); + + my @trails; + + # sort backlinks by page name to have a consistent order + foreach my $trail (sort keys %{$member_to_trails{$page}}) { + + my $members = $trail_to_members{$trail}; + my ($prev, $next) = @{$member_to_trails{$page}{$trail}}; + my ($prevurl, $nexturl, $prevtitle, $nexttitle); + + if (defined $prev) { + $prevurl = urlto($prev, $page); + $prevtitle = title_of($prev); + } + + if (defined $next) { + $nexturl = urlto($next, $page); + $nexttitle = title_of($next); + } + + push @trails, { + prevpage => $prev, + prevtitle => $prevtitle, + prevurl => $prevurl, + nextpage => $next, + nexttitle => $nexttitle, + nexturl => $nexturl, + trailpage => $trail, + trailtitle => title_of($trail), + trailurl => urlto($trail, $page), + }; + } + + $template->param(trailloop => \@trails); + } +} + +1; diff --git a/IkiWiki/Plugin/transient.pm b/IkiWiki/Plugin/transient.pm index c0ad5fc11..d4eb005ea 100644 --- a/IkiWiki/Plugin/transient.pm +++ b/IkiWiki/Plugin/transient.pm @@ -8,7 +8,7 @@ use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "transient", call => \&getsetup); hook(type => "checkconfig", id => "transient", call => \&checkconfig); - hook(type => "change", id => "transient", call => \&change); + hook(type => "rendered", id => "transient", call => \&rendered); } sub getsetup () { @@ -33,7 +33,7 @@ sub checkconfig () { } } -sub change (@) { +sub rendered (@) { foreach my $file (@_) { # If the corresponding file exists in the transient underlay # and isn't actually being used, we can get rid of it. @@ -43,7 +43,7 @@ sub change (@) { my $casualty = "$transientdir/$file"; if (srcfile($file) ne $casualty && -e $casualty) { debug(sprintf(gettext("removing transient version of %s"), $file)); - IkiWiki::prune($casualty); + IkiWiki::prune($casualty, $transientdir); } } } diff --git a/IkiWiki/Render.pm b/IkiWiki/Render.pm index 05132a8a8..a90d202ee 100644 --- a/IkiWiki/Render.pm +++ b/IkiWiki/Render.pm @@ -262,12 +262,13 @@ sub render ($$) { } } -sub prune ($) { +sub prune ($;$) { my $file=shift; + my $up_to=shift; unlink($file); my $dir=dirname($file); - while (rmdir($dir)) { + while ((! defined $up_to || $dir =~ m{^\Q$up_to\E\/}) && rmdir($dir)) { $dir=dirname($dir); } } @@ -447,7 +448,7 @@ sub remove_del (@) { } foreach my $old (@{$oldrenderedfiles{$page}}) { - prune($config{destdir}."/".$old); + prune($config{destdir}."/".$old, $config{destdir}); } foreach my $source (keys %destsources) { @@ -537,7 +538,7 @@ sub remove_unrendered () { foreach my $file (@{$oldrenderedfiles{$page}}) { if (! grep { $_ eq $file } @{$renderedfiles{$page}}) { debug(sprintf(gettext("removing %s, no longer built by %s"), $file, $page)); - prune($config{destdir}."/".$file); + prune($config{destdir}."/".$file, $config{destdir}); } } } @@ -800,6 +801,14 @@ sub refresh () { derender_internal($file); } + run_hooks(build_affected => sub { + my %affected = shift->(); + while (my ($page, $message) = each %affected) { + next unless exists $pagesources{$page}; + render($pagesources{$page}, $message); + } + }); + my ($backlinkchanged, $linkchangers)=calculate_changed_links($changed, $del, $oldlink_targets); @@ -821,8 +830,13 @@ sub refresh () { run_hooks(delete => sub { shift->(@$del, @$internal_del) }); } if (%rendered) { - run_hooks(change => sub { shift->(keys %rendered) }); + run_hooks(rendered => sub { shift->(keys %rendered) }); + run_hooks(change => sub { shift->(keys %rendered) }); # back-compat } + my %all_changed = map { $_ => 1 } + @$new, @$changed, @$del, + @$internal_new, @$internal_changed, @$internal_del; + run_hooks(changes => sub { shift->(keys %all_changed) }); } sub clean_rendered { @@ -831,7 +845,7 @@ sub clean_rendered { remove_unrendered(); foreach my $page (keys %oldrenderedfiles) { foreach my $file (@{$oldrenderedfiles{$page}}) { - prune($config{destdir}."/".$file); + prune($config{destdir}."/".$file, $config{destdir}); } } } diff --git a/IkiWiki/Wrapper.pm b/IkiWiki/Wrapper.pm index c39aa2ef7..06be36dfc 100644 --- a/IkiWiki/Wrapper.pm +++ b/IkiWiki/Wrapper.pm @@ -93,12 +93,53 @@ EOF # memory, a pile up of processes could cause thrashing # otherwise. The fd of the lock is stored in # IKIWIKI_CGILOCK_FD so unlockwiki can close it. - $pre_exec=<<"EOF"; + # + # A lot of cgi wrapper processes can potentially build + # up and clog an otherwise unloaded web server. To + # partially avoid this, when a GET comes in and the lock + # is already held, rather than blocking a html page is + # constructed that retries. This is enabled by setting + # cgi_overload_delay. + if (defined $config{cgi_overload_delay} && + $config{cgi_overload_delay} =~/^[0-9]+/) { + my $i=int($config{cgi_overload_delay}); + $pre_exec.="#define CGI_OVERLOAD_DELAY $i\n" + if $i > 0; + my $msg=gettext("Please wait"); + $msg=~s/"/\\"/g; + $pre_exec.='#define CGI_PLEASE_WAIT_TITLE "'.$msg."\"\n"; + if (defined $config{cgi_overload_message} && length $config{cgi_overload_message}) { + $msg=$config{cgi_overload_message}; + $msg=~s/"/\\"/g; + } + $pre_exec.='#define CGI_PLEASE_WAIT_BODY "'.$msg."\"\n"; + } + $pre_exec.=<<"EOF"; lockfd=open("$config{wikistatedir}/cgilock", O_CREAT | O_RDWR, 0666); - if (lockfd != -1 && lockf(lockfd, F_LOCK, 0) == 0) { - char *fd_s=malloc(8); - sprintf(fd_s, "%i", lockfd); - setenv("IKIWIKI_CGILOCK_FD", fd_s, 1); + if (lockfd != -1) { +#ifdef CGI_OVERLOAD_DELAY + char *request_method = getenv("REQUEST_METHOD"); + if (request_method && strcmp(request_method, "GET") == 0) { + if (lockf(lockfd, F_TLOCK, 0) == 0) { + set_cgilock_fd(lockfd); + } + else { + printf("Content-Type: text/html\\nRefresh: %i; URL=%s\\n\\n%s

%s

", + CGI_OVERLOAD_DELAY, + getenv("REQUEST_URI"), + CGI_PLEASE_WAIT_TITLE, + CGI_PLEASE_WAIT_BODY); + exit(0); + } + } + else if (lockf(lockfd, F_LOCK, 0) == 0) { + set_cgilock_fd(lockfd); + } +#else + if (lockf(lockfd, F_LOCK, 0) == 0) { + set_cgilock_fd(lockfd); + } +#endif } EOF } @@ -140,6 +181,12 @@ void addenv(char *var, char *val) { newenviron[i++]=s; } +set_cgilock_fd (int lockfd) { + char *fd_s=malloc(8); + sprintf(fd_s, "%i", lockfd); + setenv("IKIWIKI_CGILOCK_FD", fd_s, 1); +} + int main (int argc, char **argv) { int lockfd=-1; char *s; @@ -214,7 +261,7 @@ $set_background_command EOF my @cc=exists $ENV{CC} ? possibly_foolish_untaint($ENV{CC}) : 'cc'; - push @cc, possibly_foolish_untaint($ENV{CFLAGS}) if exists $ENV{CFLAGS}; + push @cc, split(' ', possibly_foolish_untaint($ENV{CFLAGS})) if exists $ENV{CFLAGS}; if (system(@cc, "$wrapper.c", "-o", "$wrapper.new") != 0) { #translators: The parameter is a C filename. error(sprintf(gettext("failed to compile %s"), "$wrapper.c")); diff --git a/Makefile.PL b/Makefile.PL index 69ba5e5ef..ef29a950c 100755 --- a/Makefile.PL +++ b/Makefile.PL @@ -75,7 +75,7 @@ underlay_install: install -d $(DESTDIR)$(PREFIX)/share/ikiwiki for dir in `cd underlays && $(FIND) . -follow -type d`; do \ install -d $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir; \ - for file in `$(FIND) underlays/$$dir -follow -maxdepth 1 -type f -not -name \\*.full.js -not -name \\*.full.css`; do \ + for file in `$(FIND) underlays/$$dir -follow -maxdepth 1 -type f ! -name \\*.full.js ! -name \\*.full.css`; do \ cp -pRL $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir 2>/dev/null || \ install -m 644 $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir; \ done; \ diff --git a/auto-blog.setup b/auto-blog.setup index 0eb83ded6..5617daf9e 100644 --- a/auto-blog.setup +++ b/auto-blog.setup @@ -36,7 +36,7 @@ IkiWiki::Setup::Automator->import( cgiurl => "http://$domain/~$ENV{USER}/$wikiname_short/ikiwiki.cgi", cgi_wrapper => "$ENV{HOME}/public_html/$wikiname_short/ikiwiki.cgi", adminemail => "$ENV{USER}\@$domain", - add_plugins => [qw{goodstuff websetup comments blogspam calendar sidebar}], + add_plugins => [qw{goodstuff websetup comments blogspam calendar sidebar trail}], disable_plugins => [qw{}], libdir => "$ENV{HOME}/.ikiwiki", rss => 1, diff --git a/debian/changelog b/debian/changelog index c8db76966..d1d132e1e 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,138 @@ +ikiwiki (3.20121213) UNRELEASED; urgency=low + + * htmlscrubber: Allow the bitcoin URI scheme. + * htmlscrubber: Allow the URI schemes of major VCS's. + * aggregate: When run with --aggregate, if an aggregation is already + running, don't go on and --refresh. + * trail: Avoid excess dependencies between pages in the trail + and the page defining the trail. Thanks, smcv. + * opendiscussion: Don't allow editing discussion pages if discussion pages + are disabled. (smcv) + * poll: Add expandable option to allow users to easily add new choices to + a poll. + * trail: Avoid massive slowdown caused by pagetemplate hook when displaying + dynamic cgi pages, which cannot use trail anyway. + + -- Joey Hess Sat, 22 Dec 2012 16:15:24 -0400 + +ikiwiki (3.20121212) unstable; urgency=low + + * filecheck: Fix bug that prevented File::MimeInfo::Magic from ever + being used. + * openid: Display openid in Preferences page as a comment, so it can be + selected in all browsers. + + -- Joey Hess Tue, 11 Dec 2012 12:12:12 -0400 + +ikiwiki (3.20121017) unstable; urgency=low + + * recentchangesdiff: fix further breakage to the template from 3.20120725 + + -- Joey Hess Tue, 16 Oct 2012 20:49:27 -0400 + +ikiwiki (3.20121016) unstable; urgency=low + + * monochrome: New theme, contributed by Jon Dowland. + * rst: Ported to python 3, while still also being valid python 2. + Thanks, W. Trevor King + * Try to avoid a situation in which so many ikiwiki cgi wrapper programs + are running, all waiting on some long-running thing like a site rebuild, + that it prevents the web server from doing anything else. The current + approach only avoids this problem for GET requests; if multiple cgi's + run GETs on a site at the same time, one will display a "please wait" + page for a configurable number of seconds, which then redirects to retry. + To enable this protection, set cgi_overload_delay to the number of + seconds to wait. This is not enabled by default. + * Add back a 1em margin between archivepage divs. + * recentchangesdiff: Correct broken template that resulted in duplicate + diff icons being displayed, and bloated the recentchanges page with + inline diffs when the configuration should have not allowed them. + + -- Joey Hess Tue, 16 Oct 2012 15:14:19 -0400 + +ikiwiki (3.20120725) unstable; urgency=low + + * recentchangesdiff: When diffurl is not set, provide inline diffs + in the recentchanges page, with visibility toggleable via javascript. + Thanks, Antoine Beaupré + * Split CFLAGS into words when building wrapper. Closes: #682237 + * osm: Avoid calling urlto before generated files are registered. + Thanks, Philippe Gauthier and Antoine Beaupré + * osm: Add osm_openlayers_url configuration setting. + Thanks, Genevieve + * osm: osm_layers can be used to configured the layers displayed on the map. + Thanks, Antoine Beaupré + * comments: Remove ipv6 address specific code. + + -- Joey Hess Sat, 25 Aug 2012 10:58:42 -0400 + +ikiwiki (3.20120629) unstable; urgency=low + + * mirrorlist: Add mirrorlist_use_cgi setting that avoids usedirs or + other config differences by linking to the mirror's CGI. (intrigeri) + + -- Joey Hess Fri, 29 Jun 2012 10:16:08 -0400 + +ikiwiki (3.20120516) unstable; urgency=high + + * meta: Security fix; add missing sanitization of author and authorurl. + CVE-2012-0220 Thanks, Raúl Benencia + + -- Joey Hess Wed, 16 May 2012 19:51:27 -0400 + +ikiwiki (3.20120419) unstable; urgency=low + + * Remove dead link from plugins/teximg. Closes: #664885 + * inline: When the pagenames list includes pages that do not exist, skip + them. + * meta: Export author information in html tag. Closes: #664779 + Thanks, Martin Michlmayr + * notifyemail: New plugin, sends email notifications about new and + changed pages, and allows subscribing to comments. + * Added a "changes" hook. Renamed the "change" hook to "rendered", but + the old hook name is called for now for back-compat. + * meta: Support keywords header. Closes: #664780 + Thanks, Martin Michlmayr + * passwordauth: Fix url in password recovery email to be absolute. + * httpauth: When it's the only auth method, avoid a pointless and + confusing signin form, and go right to the httpauthurl. + * rename: Allow rename to be started not from the edit page; return to + the renamed page in this case. + * remove: Support removing of pages in the transient underlay. (smcv) + * inline, trail: The pagenames parameter is now a list of absolute + pagenames, not relative wikilink type names. This is necessary to fix + a bug, and makes pagenames more consistent with the pagespec used + in the pages parameter. (smcv) + * link: Fix renaming wikilinks that contain embedded urls. + * graphviz: Handle self-links. + * trail: Improve CSS, also display trail links at bottom of page, + and a bug fix. (smcv) + + -- Joey Hess Thu, 19 Apr 2012 15:32:07 -0400 + +ikiwiki (3.20120319) unstable; urgency=low + + * osm: New plugin to embed an OpenStreetMap into a wiki page. + Supports waypoints, tags, and can even draw paths matching + wikilinks between pages containing waypoints. + Thanks to Blars Blarson and Antoine Beaupré, as well as the worldwide + OpenStreetMap community for this utter awesomeness. + * trail: New plugin to add navigation trails through pages via Next and + Previous links. Trails can easily be added to existing inlines by setting + trail=yes in the inline. + Thanks to Simon McVittie for his persistance developing this feature. + * Fix a snail mail address. Closes: #659158 + * openid-jquery.js: Update URL of Wordpress favicon. Closes: #660549 + * Drop the version attribute on the generator tag in Atom feeds + to make builds more reproducible. Closes: #661569 (Paul Wise) + * shortcut: Support Wikipedia's form of url-encoding for unicode + characters, which involves mojibake. Closes: #661198 + * Add a few missing jquery UI icons to attachment upload widget underlay. + * URI escape filename when generating the diffurl. + * Add build-affected hook. Used by trail. + + -- Joey Hess Mon, 19 Mar 2012 14:24:43 -0400 + ikiwiki (3.20120202) unstable; urgency=low * mdwn: Added nodiscount setting, which can be used to avoid using the diff --git a/debian/compat b/debian/compat index 7f8f011eb..ec635144f 100644 --- a/debian/compat +++ b/debian/compat @@ -1 +1 @@ -7 +9 diff --git a/debian/control b/debian/control index 9403dfb44..9c6ab7ad9 100644 --- a/debian/control +++ b/debian/control @@ -1,7 +1,7 @@ Source: ikiwiki Section: web Priority: optional -Build-Depends: perl, debhelper (>= 7.0.50) +Build-Depends: perl, debhelper (>= 9) Build-Depends-Indep: dpkg-dev (>= 1.9.0), libxml-simple-perl, libtext-markdown-discount-perl, libtimedate-perl, libhtml-template-perl, @@ -10,7 +10,7 @@ Build-Depends-Indep: dpkg-dev (>= 1.9.0), libxml-simple-perl, libfile-chdir-perl, libyaml-libyaml-perl, python-support Maintainer: Joey Hess Uploaders: Josh Triplett -Standards-Version: 3.9.2 +Standards-Version: 3.9.3 Homepage: http://ikiwiki.info/ Vcs-Git: git://git.ikiwiki.info/ @@ -38,7 +38,7 @@ Suggests: viewvc | gitweb | viewcvs, libsearch-xapian-perl, libsparkline-php, texlive, dvipng, libtext-wikicreole-perl, libsort-naturally-perl, libtext-textile-perl, libhighlight-perl, po4a (>= 0.35-1), gettext, libnet-inet6glue-perl, - libtext-multimarkdown-perl + libtext-multimarkdown-perl, libxml-writer-perl Conflicts: ikiwiki-plugin-table Replaces: ikiwiki-plugin-table Provides: ikiwiki-plugin-table diff --git a/debian/copyright b/debian/copyright index 74418281d..e1a81932b 100644 --- a/debian/copyright +++ b/debian/copyright @@ -1,4 +1,4 @@ -Format: http://dep.debian.net/deps/dep5/ +Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Source: native package Files: * @@ -153,6 +153,14 @@ Files: IkiWiki/Plugin/rsync.pm Copyright: © 2009 Amitai Schlair License: BSD-2-clause +Files: IkiWiki/Plugin/osm.pm +Copyright: © 2011 Blars Blarson, Antoine Beaupré +License: GPL-2 + +Files: IkiWiki/Plugin/trail.pm +Copyright: 2009-2012 Simon McVittie +License: GPL-2+ + Files: doc/logo/* Copyright: © 2006 Recai Oktaş License: GPL-2+ @@ -240,6 +248,10 @@ Files: underlays/themes/goldtype/* Copyright: © Lars Wirzenius License: GPL-2+ +Files: underlays/themes/monochrome/* +Copyright: © 2012 Jon Dowland +License: GPL-2+ + License: BSD-2-clause Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions diff --git a/doc/anchor.mdwn b/doc/anchor.mdwn index 012e52fa0..12d193fe9 100644 --- a/doc/anchor.mdwn +++ b/doc/anchor.mdwn @@ -1,3 +1,11 @@ ikiwiki works with anchors in various situations. +You can insert anchors directly in the body of a page and it will be used on the resulting HTML, for example: + + + +... will make the link [[anchor#anchor]] work.. + + + This page accumulates links to the concept of anchors. diff --git a/doc/basewiki/sandbox.mdwn b/doc/basewiki/sandbox.mdwn index c66534fc2..e76bdb8d1 100644 --- a/doc/basewiki/sandbox.mdwn +++ b/doc/basewiki/sandbox.mdwn @@ -30,3 +30,5 @@ Bulleted list * item [[ikiwiki/WikiLink]] + +[[!calendar type="month" pages="blog/*"]] diff --git a/doc/branches.mdwn b/doc/branches.mdwn index b7b9164ac..232f2ce6a 100644 --- a/doc/branches.mdwn +++ b/doc/branches.mdwn @@ -20,6 +20,6 @@ Long-lived branches in the main git repository: * `ignore` gets various branches merged to it that [[Joey]] wishes to ignore when looking at everyone's unmerged changes. * `pristine-tar` contains deltas that - [pristine-tar](http://kitenet.net/~joey/code/pristine-tar) + [pristine-tar](http://joeyh.name/code/pristine-tar) can use to recreate released tarballs of ikiwiki * `setup` contains the ikiwiki.setup file for this site diff --git a/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn b/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn new file mode 100644 index 000000000..81a5abf28 --- /dev/null +++ b/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn @@ -0,0 +1,28 @@ +If you wish to install ikiwiki in your home directory (for example because you don't have root access), you need to set environment variables (such as PATH and PERL5LIB) to point to these directories that contain your personal copy of IkiWiki. + +The CGI wrapper remembers PATH, but not the environment variable PERL5LIB. Consequently, it will look for plugins and so on in the usual system directories, not in your personal copy. This is particularly insidious if you have a system copy of a different version installed, as your CGI wrapper may then load in code from this version. + +I think the CGI wrapper should remember PERL5LIB too. + +-- Martin + +Thank's a lot for pointing me to this location in the code. I was looking it for some time. + +This brutal patch implement your solution as a temporary fix. + + *** Wrapper.pm.old 2012-08-25 16:41:41.000000000 +0200 + --- Wrapper.pm 2012-10-01 17:33:17.582956524 +0200 + *************** + *** 149,154 **** + --- 149,155 ---- + $envsave + newenviron[i++]="HOME=$ENV{HOME}"; + newenviron[i++]="PATH=$ENV{PATH}"; + + newenviron[i++]="PERL5LIB=$ENV{PERL5LIB}"; + newenviron[i++]="WRAPPED_OPTIONS=$configstring"; + + #ifdef __TINYC__ + +As I am not sure that remembering `PERL5LIB` is a good idea, I think that a prettier solution will be to add a config variable (let's say `cgi_wrapper_perllib`) which, if fixed, contains the `PERL5LIB` value to include in the wrapper, or another (let's say `cgi_wrapper_remember_libdir`), which, if fixed, remember the current `PERL5LIB`. + +-- Bruno diff --git a/doc/bugs/CamelCase_and_Recent_Changes_create_spurious_Links.mdwn b/doc/bugs/CamelCase_and_Recent_Changes_create_spurious_Links.mdwn new file mode 100644 index 000000000..de95fb7d3 --- /dev/null +++ b/doc/bugs/CamelCase_and_Recent_Changes_create_spurious_Links.mdwn @@ -0,0 +1,11 @@ +Hi folks, + +This is a fairly fresh wiki. I recently noticed the Links: section the the bottom looked like this: + +Links: index recentchanges/change 0b2f03d3d21a3bb21f6de75d8711c73df227e17c recentchanges/change 1c5b830b15c4f2f0cc97ecc0adfd60a1f1578918 recentchanges/change 20b20b91b90b28cdf2563eb959a733c6dfebea7a recentchanges/change 3377cedd66380ed416f59076d69f546bf12ae1e4 recentchanges/change 4c53d778870ea368931e7df2a40ea67d00130202 recentchanges/change 7a9f3c441a9ec7e189c9df322851afa21fd8b00c recentchanges/change 7dcaea1be47308ee27a18f893ff232a8370e348a recentchanges/change 963245d4e127159e12da436dea30941ec371c6be recentchanges/change cd489ff4abde8dd611f7e42596b93953b38b9e1c ... + +All of those "recentchanges/ change xxxxxxx" links are clickable, but all yield 404 when clicked. + +When I disable the CamelCase plugin and rebuild the wiki, all the Links other than index disappear, as they should. Re-enable CamelCase, and they're back. + +This is a very simple wiki. Just fresh, only one page other than index (this one), and nothing at all fancy/weird about it. diff --git a/doc/bugs/Existing_Discussion_pages_appear_as_non-existing.mdwn b/doc/bugs/Existing_Discussion_pages_appear_as_non-existing.mdwn new file mode 100644 index 000000000..9ba4ede6e --- /dev/null +++ b/doc/bugs/Existing_Discussion_pages_appear_as_non-existing.mdwn @@ -0,0 +1,5 @@ +If you look at [[todo/org mode]], the link to the Discussion page is not there (has a question mark), as if it didn't exist. But--through the search--I discovered that the Discussion page does exist actually: [[todo/org mode/Discussion]]. + +So, there is a bug that prevents a link to the existing Discussion page from appearing in the correct way on the corresponding main page. --Ivan Z. + +Perhaps, this has something to do with the same piece of code/logic (concerning case-sensitivity) as the fixed [[bugs/unwanted discussion links on discussion pages]]? --Ivan Z. diff --git a/doc/bugs/Linkmap_doesn__39__t_support_multiple_linkmaps_on_a_single_page.mdwn b/doc/bugs/Linkmap_doesn__39__t_support_multiple_linkmaps_on_a_single_page.mdwn new file mode 100644 index 000000000..a0645477e --- /dev/null +++ b/doc/bugs/Linkmap_doesn__39__t_support_multiple_linkmaps_on_a_single_page.mdwn @@ -0,0 +1,3 @@ +If I use the linkmap directive twice on a single page, I get the same image appearing in both locations, even though the parameters for the two directives may have been different. + +-- Martin diff --git a/doc/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting.mdwn b/doc/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting.mdwn new file mode 100644 index 000000000..4c7b12e8c --- /dev/null +++ b/doc/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting.mdwn @@ -0,0 +1,11 @@ +Say you are commenting on this report. The Navbar on top will look like + +[ikiwiki](http://ikiwiki.info/)/ [bugs](http://ikiwiki.info/bugs/)/ commenting on Navbar does not link to page being commented on while commenting + +while either of those two options would be better: + +[ikiwiki](http://ikiwiki.info/)/ [bugs](http://ikiwiki.info/bugs/)/ commenting on [Navbar does not link to page being commented on while commenting](http://ikiwiki.info/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting/) + +[ikiwiki](http://ikiwiki.info/)/ [bugs](http://ikiwiki.info/bugs/)/ [Navbar does not link to page being commented on while commenting](http://ikiwiki.info/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting/) / New comment + +-- RichiH diff --git a/doc/bugs/Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8.mdwn b/doc/bugs/Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8.mdwn deleted file mode 100644 index ab4dc8953..000000000 --- a/doc/bugs/Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8.mdwn +++ /dev/null @@ -1,13 +0,0 @@ -I'm writing [pykipandoc plugin](https://github.com/temmen/pykipandoc/blob/master/pykipandoc), that work at least as pandoc-iki. - -It works in compile mode, editing pages in web mode however results in - - pandoc: : hGetContents: invalid argument (Invalid or incomplete multibyte or wide character) - -I think that is because HTTP POST request building editpage doesn't correctly manage utf-8 contents: see strange chars in this form-data name="editcontent"? - - This principle has guided pandoc’s decisions in finding syntax for tables, footnotes, and other extensions. - -Please, any advice can be sent to [GitHub pykipandoc](https://github.com/temmen/pykipandoc) (some other info there on the [README](https://github.com/temmen/pykipandoc/blob/master/README.md)) and to [temmenel(at)gmail(dot)com](mailto:temmenel@gmail.com). - -¡Thank you all! diff --git a/doc/bugs/Remove_redirect_pages_from_inline_pages.mdwn b/doc/bugs/Remove_redirect_pages_from_inline_pages.mdwn new file mode 100644 index 000000000..a43bd408f --- /dev/null +++ b/doc/bugs/Remove_redirect_pages_from_inline_pages.mdwn @@ -0,0 +1,15 @@ +[[!tag bugs wishlist]] + + +I accidentally made a typo spelling "surprises" and changed my URL from + + +to + + +Using the meta redir. However the meta redir now appears in the index of + +Any ideas how to handle this situation? + +> Well, you can adjust the inline's pagespec to exclude it, or even tag it +> with a tag that the pagespec is adjusted to exclude. --[[Joey]] diff --git a/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__.mdwn b/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__.mdwn new file mode 100644 index 000000000..e93f4e546 --- /dev/null +++ b/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__.mdwn @@ -0,0 +1,47 @@ +Saving a wiki page in ikwiki or +ikiwiki --setup wiki.setup --rebuild takes a **dozen minutes** on a tiny tiny wiki (10 user-added pages)! + +I profiled ikiwiki with [[!cpan Devel::SmallProf]] : see [[users/mathdesc]] for details. + +And I came to the conclusion that [[plugins/filecheck]] on attachment was the only cause. +It always go the fallback code using time-consuming file even there it's look like it's +not successful. + +
+ # Get the mime type.
+        #
+        # First, try File::Mimeinfo. This is fast, but doesn't recognise
+        # all files.
+        eval q{use File::MimeInfo::Magic};                    
+        my $mimeinfo_ok=! $@;                                     
+        my $mimetype;
+        if ($mimeinfo_ok) {
+                my $mimetype=File::MimeInfo::Magic::magic($file);
+        }                                                         
+        
+        # Fall back to using file, which has a more complete
+        # magic database.
+        if (! defined $mimetype) {
+                open(my $file_h, "-|", "file", "-bi", $file); 
+                $mimetype=<$file_h>;                                 
+                chomp $mimetype;                            
+                close $file_h;                   
+        }
+        if (! defined $mimetype || $mimetype !~s /;.*//) {
+                # Fall back to default value.
+                $mimetype=File::MimeInfo::Magic::default($file)
+                        if $mimeinfo_ok; 
+                if (! defined $mimetype) {
+                        $mimetype="unknown";
+                }                                                  
+        }        
+
+ +I found on [[plugins/filecheck/discussion/]] what [[users/DavidBremner/]] described as : +> no way to detect text/plain using File::MimeInfo::Magic::magic() +But I can't figure out if my issue is boarder and includes this or not.. + +Any ideas , solve :) more that welcome. + +> [[done]], as isbear noted in [[discussion]], there was a bug that +> prevented File::MimeInfo::Magic from ever being used. --[[Joey]] diff --git a/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__/discussion.mdwn b/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__/discussion.mdwn new file mode 100644 index 000000000..629aba71e --- /dev/null +++ b/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__/discussion.mdwn @@ -0,0 +1,141 @@ +##Foreword : +Disabling of filecheck is not actually possible because btw it cause the attachment.pm to malfunction and +any of pagespec that could contain a *mimetype* condition. + +attachment.pm imports "statically" filecheck so actually disabling it should be *interdicted* . + +
+sub import {
+        add_underlay("attachment");
+        add_underlay("javascript");
+        add_underlay("jquery");
+        hook(type => "getsetup", id => "attachment", call => \&getsetup);
+        hook(type => "checkconfig", id => "attachment", call => \&checkconfig);
+        hook(type => "formbuilder_setup", id => "attachment", call => \&formbuilder_setup);
+        hook(type => "formbuilder", id => "attachment", call => \&formbuilder, last => 1);
+        IkiWiki::loadplugin("filecheck");
+}
+
+ +---- + +## How bad is it ? + +So I tried on three pages to inline !mimetype(image/*) while I allowed attachment of mimetype(image/*) + +My profiling tests in the bug report shows that most of the time is spend in the "Fallback using file" block code, +I tried to comment that block and see how it'll perform. Obviously this is much much faster ... but is the mimetype +discovered using only *File::MimeInfo* ? + + +Dumping some strings before return to STDERR, rebuilding . This is just a [[!toggle id="code-test" text="dumpdebug adding"]] + +[[!toggleable id="code-test" text=""" +
+sub match_mimetype ($$;@) {
+        my $page=shift;
+        my $wanted=shift;
+
+        my %params=@_;
+        my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page});
+        if (! defined $file) {
+                return IkiWiki::ErrorReason->new("file does not exist");
+        }
+
+        # Get the mime type.
+        #
+        # First, try File::Mimeinfo. This is fast, but doesn't recognise
+        # all files.
+        eval q{use File::MimeInfo::Magic};
+        my $mimeinfo_ok=! $@;
+        my $mimetype;
+        print STDERR " --- match_mimetype (".$file.")\n";
+        if ($mimeinfo_ok) {
+                my $mimetype=File::MimeInfo::Magic::magic($file);
+        }
+
+        # Fall back to using file, which has a more complete
+        # magic database.
+        #if (! defined $mimetype) {
+        #       open(my $file_h, "-|", "file", "-bi", $file);
+        #       $mimetype=<$file_h>;
+        #       chomp $mimetype;
+        #       close $file_h;
+        #}
+
+        if (! defined $mimetype || $mimetype !~s /;.*//) {
+                # Fall back to default value.
+                $mimetype=File::MimeInfo::Magic::default($file)
+                        if $mimeinfo_ok;
+                if (! defined $mimetype) {
+                        $mimetype="unknown";
+                }
+        }
+
+        my $regexp=IkiWiki::glob2re($wanted);
+        if ($mimetype!~$regexp) {
+                 print STDERR " xxx MIME unknown ($mimetype - $wanted - $regexp ) \n";
+                return IkiWiki::FailReason->new("file MIME type is $mimetype, not $wanted");
+        }
+        else {
+                print STDERR " vvv MIME found\n";
+                return IkiWiki::SuccessReason->new("file MIME type is $mimetype");
+        }
+}
+
+"""]] + +The results dump to stderr (or a file called... 'say *mime*) looks like this : +
+--- match_mimetype (/usr/share/ikiwiki/attachment/ikiwiki/jquery.fileupload-ui.js)
+ xxx MIME unknown (text/plain - image/* - (?i-xsm:^image\/.*$) )
+ --- match_mimetype (/usr/share/ikiwiki/locale/fr/directives/ikiwiki/directive/fortune.mdwn)
+ xxx MIME unknown (text/plain - image/* - (?i-xsm:^image\/.*$) )
+ --- match_mimetype (/usr/share/ikiwiki/locale/fr/basewiki/shortcuts.mdwn)
+ xxx MIME unknown (text/plain - image/* - (?i-xsm:^image\/.*$) 
+ --- match_mimetype (/usr/share/ikiwiki/smiley/smileys/alert.png)
+ xxx MIME unknown (application/octet-stream - image/* - (?i-xsm:^image\/.*$) )
+ --- match_mimetype (/usr/share/ikiwiki/attachment/ikiwiki/images/ui-bg_flat_75_ffffff_40x100.png)
+ xxx MIME unknown (application/octet-stream - image/* - (?i-xsm:^image\/.*$) 
+
+ +--- prepend signals the file on analysis
+xxx prepend signals a returns failure : mime is unknown, the match is a failure
+vvv prepend signals a return success.
+ + +This is nasty-scary results ! Something missed me or this mime-filecheck is plain nuts ? + +*Question 1* : How many files have been analysed : **3055** (yet on a tiny tiny wiki) +
grep "^ --- " mime | wc -l
+3055
+
+ +*Question 2* : How many time it fails : *all the time* +
+ grep "^ xxx " mime | wc -l
+3055
+
+ +*Question 1bis* : Doh btw , how many files have been re-analysed ? ** 2835 ** OMG !! +
grep "^ --- " mime | sort -u | wc -l
+220
+
+ +## Conclusion + +- Only the system command *file -bi* works. While it is **should** be easy on the cpu , it's also hard on the I/O -> VM :( +- Something nasty with the mime implementation and/or my system configuration -> Hints ? :D +- Need to cache during the rebuild : a same page needs not being rechecked for its mime while it's locked ! + + +--mathdesc + +> > if ($mimeinfo_ok) { +> > my $mimetype=File::MimeInfo::Magic::magic($file); +> > } +> +> That seems strange to me, `my` restricts scope of $mimetype to enclosing if block, thus, assigned value will be dropped - I think, it is the problem. +> Try removing that stray `my`. +> +> --isbear diff --git a/doc/bugs/Underscores_in_links_don__39__t_appear.mdwn b/doc/bugs/Underscores_in_links_don__39__t_appear.mdwn new file mode 100644 index 000000000..b25dfb7fe --- /dev/null +++ b/doc/bugs/Underscores_in_links_don__39__t_appear.mdwn @@ -0,0 +1,18 @@ +Observed behavior: + +When I create a link like \[[cmd_test]] , the link appears as 'cmd test'. + +Expected behavior: + +I would like to be able to create links with underscores. I realize this is a feature, and I searched for ways to escape the underscore so it would appear, but I didn't find any. + +> as a workaround, you can use \[[cmd\_\_95\_\_test|cmd_test]] (which will link to a page named "cmd test" at the url location "cmd\_test") or \[[cmd\_\_95\_\_test]] (which will link to a page named "cmd\_test" at the url location "cmd\_\_95\_\_test"). i would, from my limited understanding of ikiwiki internals, consider the bug valid, and suggest that +> +> * explicit link text be not subject to de-escaping (why should it; this would be the short term solution) +> * escaped page names never be used in user visible parts of ikiwiki (in my opinion, a user should not need to know about those internals, especially as they are configuration dependant (wiki_file_regexp)) +> +> note that in [[ikiwiki/wikilink]], that very behavior is documented; it says that "\[[foo\_bar|Sandbox]]" will show as "foo bar". (although you can't tell that apart from "foo\_bar" easily because it's a hyperlink). +> +> i assume that this behavior stems from times when wikilinks and [[ikiwiki/directive]]s were not distinguished by \[[ vs \[[! but by the use of whitespace in directives, so whitespace had to be avoided in wikilinks. +> +> --[[chrysn]] diff --git a/doc/bugs/W3MMode_still_uses_http:__47____47__localhost__63__.mdwn b/doc/bugs/W3MMode_still_uses_http:__47____47__localhost__63__.mdwn index 3c28e379b..34eecef8c 100644 --- a/doc/bugs/W3MMode_still_uses_http:__47____47__localhost__63__.mdwn +++ b/doc/bugs/W3MMode_still_uses_http:__47____47__localhost__63__.mdwn @@ -22,3 +22,13 @@ Of course, the next time I rerun ikiwiki --setup, it will overwrite my wrapper-w I made a logfile of all the args, env, and stdin/stdout to/from my wrapper. If you're interested, I'll email it to you. I wasn't able to attach it here. -- [[terry|tjgolubi]] + +I confirm that the supplied w3mmode setup appears not to work. When I try to edit a page and save it, w3m tries to access an URL beginning http://localhost/ . The HTML source of the edit page contains a BASE URL beginning with http://localhost. It should not. Maybe this is a result of changes a while back, where use of absolute URLs was enforced in various places in Ikiwiki. + +-- Martin + +The problem is that IkiWiki::CGI::cgitemplate() and IkiWiki::CGI::redirect() use Perl's CGI::url() to determine the absolute URL of the CGI script when it is being executed. url() generates an URL beginning http://localhost. As w3m's serverless CGI mode is rather unusual, presumably there's no provision for the URL of a CGI script beginning file:///, even if there's a way to specify that. + +A quick workaround might be to force the use of $config{url} instead of $cgi->url as a base for URLs when w3mmode is set. + +-- Martin diff --git a/doc/bugs/__91__SOLVED__93___Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8.mdwn b/doc/bugs/__91__SOLVED__93___Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8.mdwn new file mode 100644 index 000000000..7282a71b8 --- /dev/null +++ b/doc/bugs/__91__SOLVED__93___Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8.mdwn @@ -0,0 +1,11 @@ +import os +os.environment['LANG'] = 'it_IT.utf-8' + +Suona plausibile? + +[GitHub pykipandoc](https://github.com/temmen/pykipandoc) -- Temmen + +> The place to put contrib plugins is in [[plugins/contrib]]. +> +> Closing this bug report as whatever it is that was fixed is apparently not an ikiwiki +> bug.. I guess. [[done]] --[[Joey]] diff --git a/doc/bugs/Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8/discussion.mdwn b/doc/bugs/__91__SOLVED__93___Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8/discussion.mdwn similarity index 100% rename from doc/bugs/Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8/discussion.mdwn rename to doc/bugs/__91__SOLVED__93___Pandoc_plugin_and_UTF-8:_IkiWiki_and_UTF-8/discussion.mdwn diff --git a/doc/bugs/bug_in_cgiurl_port.mdwn b/doc/bugs/bug_in_cgiurl_port.mdwn new file mode 100644 index 000000000..373657814 --- /dev/null +++ b/doc/bugs/bug_in_cgiurl_port.mdwn @@ -0,0 +1,15 @@ +I think there's a bug in the code that determines if the cgiurl is relative +to the url. If one has a different port than the other, they're not +relative, and I hear Fil encountered an issue where the wrong port was then +used. --[[Joey]] + +> I tested, setting cgiurl to a nonstandard port. After rebuilding, +> pages used the full url. So I don't see a bug here, or am missing +> something from my memory of the report (which was done the bad way, on +> IRC). [[done]] --[[Joey]] + +> > Sorry about wittering on IRC instead of reporting proper bugs. +> > +> > The setup I have is nginx in front of apache, so that nginx is listening on port 80, apache is on port 81, and ikiwiki is being served by apache. After upgrading to 3.20120203 (backported to squeeze) I found that the URLs in the edit page all have the port set as :81 ... but now that I look at it more closely, that is the case for several ikiwiki-hosting controlled sites, but not for a few other sites that are also on the same machine, so it must be some difference between the settings for the sites, either in ikiwiki, or apache, or perhaps even nginx. Anyway, on the affected sites, explicitly including a port :80 in the cgiurl fixes the problem. + +> > So, for the moment, this bug report is a bit useless, until I find out what is causing the ikiwiki-hosting sites to be beffuddled, so it should probably stay closed -[[fil]] diff --git a/doc/bugs/cannot_clone_documented_git_repo.mdwn b/doc/bugs/cannot_clone_documented_git_repo.mdwn new file mode 100644 index 000000000..4f2ec66f3 --- /dev/null +++ b/doc/bugs/cannot_clone_documented_git_repo.mdwn @@ -0,0 +1,16 @@ + smcv@vasks:~$ git clone git://git.ikiwiki.info/ + Cloning into git.ikiwiki.info... + fatal: read error: Connection reset by peer + +I tried this from a UK consumer ISP, my virtual server in the +UK, and vasks (aka alioth.debian.org) in the Netherlands, +with the same results. I can't update my clone from `origin` +either; for the moment I'm using the github mirror instead. +--[[smcv]] + +> Strange.. The git-daemon was not running, but one child was running +> waiting on an upload-pack, but not accepting new connections. Nothing +> in the logs about what happened to the parent. The monitor that checks +> services are running was satisfied with the child.. I've made it +> restart if the parent pid is no longer running, which should avoid +> this problem in the future. --[[Joey]] [[done]] diff --git a/doc/bugs/conditional_preprocess_during_scan.mdwn b/doc/bugs/conditional_preprocess_during_scan.mdwn index 23b9fd2cc..1ba142331 100644 --- a/doc/bugs/conditional_preprocess_during_scan.mdwn +++ b/doc/bugs/conditional_preprocess_during_scan.mdwn @@ -1,4 +1,4 @@ -[[!template id=gitbranch branch=GiuseppeBilotta/scanif author="Giuseppe Bilotta"]] +[[!template id=gitbranch branch=GiuseppeBilotta/scanif author="[[GiuseppeBilotta]]"]] When a directive that should be run during scan preprocessing is inside an if directive, it doesn't get called because the if preprocessing does diff --git a/doc/bugs/definition_lists_should_be_bold.mdwn b/doc/bugs/definition_lists_should_be_bold.mdwn new file mode 100644 index 000000000..a72206b8c --- /dev/null +++ b/doc/bugs/definition_lists_should_be_bold.mdwn @@ -0,0 +1,27 @@ +Definition lists do not look great here... + +Here is an example. + +
+
this is a term
+
and this is its definition.
+
+ +(This wiki doesn't support Markdown's extended definition lists, but still, this is valid markup.) + +I believe `
` should be made bold. I have added this to my `local.css`, and I would hate to add this all the time forever: + + /* definition lists look better with the term in bold */ + dt + { + font-weight: bold; + } + +:) How does that look? I can provide a patch for the base wiki if you guys really want... ;) -- [[anarcat]] + +> What you dislike seems to be the default rendering of definition lists by +> browsers. I don't think it's ikiwiki's place to override browser defaults +> for standard markup in the document body, at least not in the default +> antitheme. --[[Joey]] + +> > How about in the actiontab theme then? :) diff --git a/doc/bugs/feeds_get_removed_in_strange_conditions.mdwn b/doc/bugs/feeds_get_removed_in_strange_conditions.mdwn new file mode 100644 index 000000000..deec208ba --- /dev/null +++ b/doc/bugs/feeds_get_removed_in_strange_conditions.mdwn @@ -0,0 +1,57 @@ +For some time now, in circumstances that I've had enormous troubles +trying to track, I've seen feeds getting removed by ikiwiki when +apparently unrelated pages got changed, with the message: + +> removing somepath/somepage/somefeed, no longer built by some/unrelated/page + +I've finally been able to find how and why it happens. The situation is +the following: + +* page A has an inline directive that (directly) generates a feed F +* page B inlines A, thus (indirectly) generating F again +* page B is rendered after page A + +The feed removal happens when changes are made to prevent B from +inlining A; for example, because B is a tag page and A is untagged B, or +because B includes A through a pagespec that no longer matches A. In +this case, this happens: + +* page A is built, rendering F +* page B is built, _not_ rendering F, which it used to render +* F is removed because it is not built by B anymore + +Note that although this issue is triggered (for me) from the changes I +proposed last year to allow feed generation from nested inlines +coalescing it to be page-based instead of destpage-based +(bb8f76a4a04686def8cc6f21bcca80cb2cc3b2c9 and +72c8f01b36c841b0e83a2ad7ad1365b9116075c5) there is potential for it +popping up in other cases. + +Specifically, the logic for the removal of dependent pages currently +relies on the assumption that each output has a single generator. My +changes caused this assumption to be violated, hence the error, but +other cases may pop up for other plugins in the future. + +I have a [patch] fixing this issue (for feeds specifically, i.e. only +the problem I am actually having) on top of my `mystuff` branch, but +since that also has heaps of other unrelated stuff, you may want to just +[pick it from my gitweb][gw]. + +[gw]: (http://git.oblomov.eu/ikiwiki/patch/671cb26cf50643827f258270d9ac8ad0b1388a65) + +The patch changes the `will_render()` for feeds to be based on the page +rather than on the destpage, matching the fact that for nested inlines +it's the inner page that is ultimately responsible for generating the +feed. + +I've noticed that it requires at least _two_ full rebuilds before the +index is again in a sensible state. (On the first rebuild, all feeds +from nested inlines are actually _removed_.) + +While the patch is needed because there are legitimate cases in which +nested feeds are needed (for example, I have an index page that inlines +index pages for subsection of my site, and I want _those_ feed from +being visible), there are other cases when one may want to skip feed +generation from nested inlines. + +--[[GiuseppeBilotta]] diff --git a/doc/bugs/find_gnuism.mdwn b/doc/bugs/find_gnuism.mdwn index 89eee7816..65ee10657 100644 --- a/doc/bugs/find_gnuism.mdwn +++ b/doc/bugs/find_gnuism.mdwn @@ -3,3 +3,5 @@ Whoops, somehow missed a spot on the last incarnation of this branch. `find -not` doesn't work on NetBSD and `find !` runs equivalently for me. Fixed in 9659272e25fac37f896991dab01a05b4f4c85ccb. + +> [[done]] --[[Joey]] diff --git a/doc/bugs/graphviz_demo_generates_empty_graph.mdwn b/doc/bugs/graphviz_demo_generates_empty_graph.mdwn new file mode 100644 index 000000000..5b96f148e --- /dev/null +++ b/doc/bugs/graphviz_demo_generates_empty_graph.mdwn @@ -0,0 +1,15 @@ +The following code in our sandbox generates an empty graph: + + [[!graph src="""" + google [ href="http://google.com/" ] + sandbox [ href=\[[SandBox]] ] + help [ href=\[[ikiwiki/formatting]] ] + newpage [ href=\[[NewPage]] ] + + google -> sandbox -> help -> newpage -> help -> google; + """"]] + +It is the exact same thing that on the [[ikiwiki/directive/graph/]] directive documentation, from the [[plugins/graphviz]] plugin. This is ikiwiki 3.20120203 on Debian wheezy and graphviz is installed (2.26.3-10). Note that the first demo actually works. See --[[anarcat]] + +> Looking at the example shows too many double quoted. [[fixed|done]] +> --[[Joey]] diff --git a/doc/bugs/ipv6_address_in_comments.mdwn b/doc/bugs/ipv6_address_in_comments.mdwn new file mode 100644 index 000000000..90391650a --- /dev/null +++ b/doc/bugs/ipv6_address_in_comments.mdwn @@ -0,0 +1,19 @@ +If I make a comment from an ipv4 address +I see the commenter's ipv4 address logged in the comment file. + +If I make a comment from an ipv6 address +I see nothing. + +There is a sanity check in /usr/share/perl5/IkiWiki/Plugin/comments.pm +line 447 (according to today's version) there is an ipv4 specific regexp. + +I removed the regexp and used the value without this added check and it fixed +the problem for me. Not sure if this is the best solution. --[[cstamas]] + +[[patch]] + +[[!tag ipv6]] + +> [[done]] --[[Joey]] + +> > Thank you! --[[cstamas]] diff --git a/doc/bugs/jquery-ui.min.css_missing_some_image_files.mdwn b/doc/bugs/jquery-ui.min.css_missing_some_image_files.mdwn new file mode 100644 index 000000000..dd026f4ec --- /dev/null +++ b/doc/bugs/jquery-ui.min.css_missing_some_image_files.mdwn @@ -0,0 +1,14 @@ +This is very minor. Noticed in nginx's logs that jquery-ui.min.css (the attachment plugin uses this) keeps referencing some png files that are not available in public_html/mywiki/ikiwiki/images/ These should be included in underlays/attachment/ikiwiki/images/ in the source repo and seem to be copied from /usr/local/share/ikiwiki/attachment/ikiwiki/images/ when I compile a new wiki. The complete list of images jquery-ui.min.css is looking for can be found here. https://github.com/jquery/jquery-ui/tree/1.8.14/themes/base/images + +> Do you have a list of files that are *actually* used when ikiwiki is +> running? I don't want to include a lot of files that jquery only +> uses in other situations. The currently included files are exactly those +> that I see it try to use. --[[Joey]] + +Fair enough. These 3 files are the only ones that appear consistently in nginx error logs. +ui-bg_glass_75_dadada_1x400.png +ui-icons_454545_256x240.png +ui-bg_glass_95_fef1ec_1x400.png + +> Hmm, that's most of the missing ones. I just added them all. [[done]] +> --[[Joey]] diff --git a/doc/bugs/linkmap_displays_underscore_escapes.mdwn b/doc/bugs/linkmap_displays_underscore_escapes.mdwn new file mode 100644 index 000000000..f74ca5119 --- /dev/null +++ b/doc/bugs/linkmap_displays_underscore_escapes.mdwn @@ -0,0 +1,18 @@ +[[ikiwiki/directive/linkmap]]s display the file name instead of the pagetitle, showing unsightly underscore escapes and underscores instead of blanks to users. + +the attached [[!taglink patch]] fixes this; from its commit message: + + display the pagetitle() in linkmaps + + without this patch, linkmaps display underscores and underscore escape + sequences in the rendered output. + + this introduces a pageescape function, which invoces pagetitle() to get + rid of underscore escapes and wraps the resulting utf8 string + appropriately for inclusion in a dot file (using dot's html encoding + because it can represent the '\"' dyad properly, and because it doesn't + need special-casing of newlines). + +the output will look much better (at least in my wikis) with the "[[bugs/pagetitle function does not respect meta titles]]" issue fixed. + +the patch is stored in [[the patch.pl]] as created by git-format-patch. (btw, what's the preferred way to send patches, apart from creating a git branch somewhere?) diff --git a/doc/bugs/linkmap_displays_underscore_escapes/the_patch.pl b/doc/bugs/linkmap_displays_underscore_escapes/the_patch.pl new file mode 100644 index 000000000..6b56c553e --- /dev/null +++ b/doc/bugs/linkmap_displays_underscore_escapes/the_patch.pl @@ -0,0 +1,68 @@ +From efbb1121ffdc146f5c9a481a51f23ad151b9f240 Mon Sep 17 00:00:00 2001 +From: chrysn +Date: Thu, 15 Mar 2012 14:38:42 +0100 +Subject: [PATCH] display the pagetitle() in linkmaps + +without this patch, linkmaps display underscores and underscore escape +sequences in the rendered output. + +this introduces a pageescape function, which invoces pagetitle() to get +rid of underscore escapes and wraps the resulting utf8 string +appropriately for inclusion in a dot file (using dot's html encoding +because it can represent the '\"' dyad properly, and because it doesn't +need special-casing of newlines). +--- + IkiWiki/Plugin/linkmap.pm | 17 +++++++++++++++-- + 1 files changed, 15 insertions(+), 2 deletions(-) + +diff --git a/IkiWiki/Plugin/linkmap.pm b/IkiWiki/Plugin/linkmap.pm +index ac26e07..b5ef1a1 100644 +--- a/IkiWiki/Plugin/linkmap.pm ++++ b/IkiWiki/Plugin/linkmap.pm +@@ -5,6 +5,7 @@ use warnings; + use strict; + use IkiWiki 3.00; + use IPC::Open2; ++use HTML::Entities; + + sub import { + hook(type => "getsetup", id => "linkmap", call => \&getsetup); +@@ -22,6 +23,18 @@ sub getsetup () { + + my $mapnum=0; + ++sub pageescape { ++ my $item = shift; ++ # encoding explicitly in case ikiwiki is configured to accept <> or & ++ # in file names ++ my $title = pagetitle($item, 1); ++ # it would not be necessary to encode *all* the html entities (<> would ++ # be sufficient, &" probably a good idea), as dot accepts utf8, but it ++ # isn't bad either ++ $title = encode_entities($title); ++ return("<$title>"); ++} ++ + sub preprocess (@) { + my %params=@_; + +@@ -63,7 +76,7 @@ sub preprocess (@) { + my $show=sub { + my $item=shift; + if (! $shown{$item}) { +- print OUT "\"$item\" [shape=box,href=\"$mapitems{$item}\"];\n"; ++ print OUT pageescape($item)." [shape=box,href=\"$mapitems{$item}\"];\n"; + $shown{$item}=1; + } + }; +@@ -74,7 +87,7 @@ sub preprocess (@) { + foreach my $endpoint ($item, $link) { + $show->($endpoint); + } +- print OUT "\"$item\" -> \"$link\";\n"; ++ print OUT pageescape($item)." -> ".pageescape($link).";\n"; + } + } + print OUT "}\n"; +-- +1.7.9.1 diff --git a/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn b/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn new file mode 100644 index 000000000..26945ee07 --- /dev/null +++ b/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn @@ -0,0 +1,34 @@ +The [[ikiwiki/directive/listdirectives]]` directive doesn't register a link between the page and the subpages. This is a problem because then the [[ikiwiki/directive/orphans]] directive then marks the directives as orphans... Maybe it is a but with the orphans directive however... A simple workaround is to exclude those files from the orphans call... --[[anarcat]] + +> There's a distinction between wikilinks (matched by `link()`, +> `backlink()` etc.) and other constructs that produce a +> hyperlink. Some directives count as a wikilink (like `tag`) +> but many don't (notably `inline`, `map`, `listdirectives`, +> and `orphans` itself). As documented in +> [[ikiwiki/directive/orphans]], orphans will tend to list +> pages that are only matched by inlines/maps, too. +> +> The rule of thumb seems to be that a link to a particular +> page counts as a wikilink, but a directive that lists +> pages matching some pattern does not; so I think +> `listdirectives` is working as intended here. +> `orphans` itself obviously shouldn't count as a wikilink, +> because that would defeat the point of it :-) +> +> Anything that uses a [[ikiwiki/pagespec]] to generate links, +> like `inline` and `map`, can't generate wikilinks, because +> wikilinks are gathered during the scan phase, and pagespecs +> can't be matched until after the scan phase has finished +> (otherwise, it'd be non-deterministic whether all wikilinks +> had been seen yet, and `link()` in pagespecs wouldn't work +> predictably). +> +> I suggest just using something like: +> +> \[[!orphans pages="* and !blog/* and !ikiwiki/directive/*"]] +> +> This wiki's example of listing [[plugins/orphans]] has a +> more elaborate pagespec, which avoids bugs, todo items etc. +> as well. +> +> --[[smcv]] diff --git a/doc/bugs/must_save_before_uploading_more_than_one_attachment.mdwn b/doc/bugs/must_save_before_uploading_more_than_one_attachment.mdwn index 20d5dc8e6..bd5ddc6d5 100644 --- a/doc/bugs/must_save_before_uploading_more_than_one_attachment.mdwn +++ b/doc/bugs/must_save_before_uploading_more_than_one_attachment.mdwn @@ -26,7 +26,19 @@ Is this a problem on my site or does anyone else see this? >>> The right fix would probably be for `do=create` to allow replacing a page >>> in the transient underlay without complaining (like the behaviour that ->>> `do=edit` normally has). That wouldn't help you unless [[plugins/autoindex]] +>>> `do=edit` normally has). + +>>>> ... which it turns out it already does. --[[smcv]] + +>>> That wouldn't help you unless [[plugins/autoindex]] >>> defaulted to making transient pages (`autoindex_commit => 0`), but if we >>> can fix [[removal_of_transient_pages]] then maybe that default can change? >>> --[[smcv]] + +>>>> It turns out that with `autoindex_commit => 0`, the failure mode is +>>>> different. The transient map is created when you attach the +>>>> attachment. When you save the page, it's written into the srcdir, +>>>> the map is deleted from the transientdir, and the ctime/mtime +>>>> in the indexdb are those of the file in the srcdir, but for some +>>>> reason the HTML output isn't re-generated (despite a refresh +>>>> happening). --[[smcv]] diff --git a/doc/bugs/nonexistent_pages_in_inline_pagenames_do_not_add_a_dependency.mdwn b/doc/bugs/nonexistent_pages_in_inline_pagenames_do_not_add_a_dependency.mdwn new file mode 100644 index 000000000..486be0363 --- /dev/null +++ b/doc/bugs/nonexistent_pages_in_inline_pagenames_do_not_add_a_dependency.mdwn @@ -0,0 +1,44 @@ +In commit aaa72a3a8, Joey noted: + +> bestlink returns '' if no existing page matches a link. This propigated +> through inline and other plugins, causing uninitialized value warnings, and +> in some cases (when filecheck was enabled) making the whole directive fail. +> +> Skipping the empty results fixes that, but this is papering over another +> problem: If the missing page is later added, there is not dependency +> information to know that the inline needs to be updated. Perhaps smcv will +> fix that later. + +Potential ways this could be addressed: + +* Add a presence dependency on everything the reference could match: + so if the `inline` is on `a/b/c` and the missing page is `m`, + add a `$depends_simple` `$DEPEND_PRESENCE` dependency on `a/b/c/m`, + `a/b/m`, `a/m`, `m` and (if configured) `$config{userdir}/m` + +* Make the page names in `\[[!inline pagenames=...]]` count as wikilinks, + changing the behaviour of `link()` and backlinks, but causing appropriate + rebuilds via the special cases in `IkiWiki::Render` + +* Extend the special cases in `IkiWiki::Render` to consider a superset of + wikilinks, to which `pagenames` would add its named pages, without + affecting `link()` and backlinks + +(Note that `\[[!inline pages=...]]` cannot count as wikilinks, because +pagespecs can contain `link()`, so can't be evaluated until we know what +wikilinks exist, at which point it's too late to add more wikilinks.) + +I think the presence dependency is probably the cleanest approach? +--[[smcv]] + +> I think it was possibly a mistake to use wikilink style lookup for +> `pagenames`. --[[Joey]] + +[[!tag patch]] [[!template id=gitbranch branch=smcv/literal-pagenames author="[[smcv]]"]] +>> I used the linking rules to make references to +>> "nearby" pages convenient, but if you'd prefer "absolute" +>> semantics, my `ready/literal-pagenames` branch does that. For +>> my main use-case for `pagenames` ([[plugins/contrib/album]]) +>> it's fine either way. --[[smcv]] + +>>> Ok, [[merged|done]]. I think it's more consistent this way. --[[Joey]] diff --git a/doc/bugs/opendiscussion_should_respect_the_discussion_option.mdwn b/doc/bugs/opendiscussion_should_respect_the_discussion_option.mdwn index e4bc736e3..cacd2b73b 100644 --- a/doc/bugs/opendiscussion_should_respect_the_discussion_option.mdwn +++ b/doc/bugs/opendiscussion_should_respect_the_discussion_option.mdwn @@ -1,6 +1,11 @@ +[[!template id=gitbranch branch=smcv/ready/less-open author="[[smcv]]"]] +[[!tag patch]] + The [[plugins/opendiscussion]] plugin allows pages named according to the `discussionpage` setting to be edited anonymously, even if `discussion => 0` is set. (If it respected the `discussion` option, the combination of `opendiscussion` and `moderatedcomments` might be good for blogs.) + +[[done]] --[[smcv]] diff --git a/doc/bugs/opendiscussion_should_respect_the_discussion_option/discussion.mdwn b/doc/bugs/opendiscussion_should_respect_the_discussion_option/discussion.mdwn new file mode 100644 index 000000000..a5c951671 --- /dev/null +++ b/doc/bugs/opendiscussion_should_respect_the_discussion_option/discussion.mdwn @@ -0,0 +1,26 @@ +This would be great to see fixed. It's perplexing to have discussion => 0 in my configuration, not have any discussion links on my site, but still be able to add a discussion page by URL hacking something like this: /cgi-bin/ikiwiki/ikiwiki.cgi?page=posts%2Fdiscussion&do=edit. + +spammers have figured that little trick out so I am consitently getting spammed checked into my git repository. + +I'm not really sure if this patch introduced other problems, but it seems to have fixed my site: + + 0 mcclelland@chavez:~/.ikiwiki/IkiWiki/Plugin$ diff -u /usr/share/perl5/IkiWiki/Plugin/opendiscussion.pm opendiscussion.pm + --- /usr/share/perl5/IkiWiki/Plugin/opendiscussion.pm 2012-05-07 11:31:24.000000000 -0400 + +++ opendiscussion.pm 2012-07-29 17:49:28.000000000 -0400 + @@ -25,7 +25,7 @@ + my $cgi=shift; + my $session=shift; + + - return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i; + + return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i && $config{discussion}; + return "" if pagespec_match($page, "postcomment(*)"); + return undef; + } + 1 mcclelland@chavez:~/.ikiwiki/IkiWiki/Plugin$ + +If libdir is configured to be ~/.ikiwiki in your ikiwiki.settings file, and you are running Debian, you can do the following: + + mkdir -p ~/.ikiwiki/IkiWiki/Plugin + cp /usr/share/perl5/IkiWiki/Plugin/opendiscussion.pm ~/.ikiwiki/IkiWiki/Plugin/ + +And then apply the patch above to ~/.ikiwiki/Ikiwiki/Plugin/opendiscussion.pm. diff --git a/doc/bugs/osm_KML_maps_do_not_display_properly_on_google_maps.mdwn b/doc/bugs/osm_KML_maps_do_not_display_properly_on_google_maps.mdwn new file mode 100644 index 000000000..2b20240c4 --- /dev/null +++ b/doc/bugs/osm_KML_maps_do_not_display_properly_on_google_maps.mdwn @@ -0,0 +1,14 @@ +[[!template id=gitbranch branch=anarcat/master author="[[anarcat]]"]] + +I know this sounds backwards, but it seems to me that the KML-generated map should be displayable on google maps. KML is the standard Google uses for google maps, and since we use it, we should interoperate with them. God knows why this is failing, but it is and should probably be fixed for the sake of interoperability: -- [[users/anarcat]] + +> The KML only needs a Document tag because it uses "shared styles" -- don't ask me what this is. Here is a [[patch]]: [[https://reseaulibre.deuxpi.ca/0001-Add-Document-tag-to-OSM-plugin-KML-output.patch]] --[[deuxpi]] + +> > I applied the patch to my master branch and tested it on the above URL: it works... mostly. The icons for the elements on the actual map seem incorrect (some are the proper icons, some others are the ugly default blue pin of google maps, weird) but I think this is a step in the right direction. Thus, this should be merged. -- [[anarcat]] + +>>> I've cherry-picked this patch, but from the description it does not +>>> sound "fixed" enough to close this bug. (OTOH, perhaps only google can +>>> fix it, so it people are happy with the state of affairs I won't insist +>>> this bug be left open.) --[[Joey]] + +> > > > I am happy with this right now, so let's mark this as [[done]]. I do agree this seems like a google bug, so let's move on. --[[anarcat]] diff --git a/doc/bugs/osm_KML_maps_icon_path_have_a_trailing_slash.mdwn b/doc/bugs/osm_KML_maps_icon_path_have_a_trailing_slash.mdwn new file mode 100644 index 000000000..0677d0e74 --- /dev/null +++ b/doc/bugs/osm_KML_maps_icon_path_have_a_trailing_slash.mdwn @@ -0,0 +1,32 @@ +This is not a problem on Apache webservers because they, oddly enough, ignore trailing slashes on paths (maybe some `PATH_INFO` magic, no idea). But basically, in our wiki, the paths to the icon tags are generated with a trailing slash. An excerpt of our [KML file](http://wiki.reseaulibre.ca/map/pois.kml): + + + +Notice the trailing `/` after the `icon.png`. This breaks display on nginx - the file that gets served isn't the icon, but the frontpage for some reason. I followed the [[setup instructions|tips/dot cgi]] for Nginx that I just had to write because there weren't any, so maybe I screwed up some part, but it does seem to me that the trailing slash is wrong regardless. + +(Also notice how the style tag is being turned over backwards by the HTML sanitizer here, cute. :P) + +I wrote a crude hack for this, but this strikes me as a similar problem to the one we found in [[bugs/osm linkto() usage breaks map rendering]]. However, I am at a loss how to fix this cleanly because we cannot `will_render()` the tag icons, as they are already generated out there! Weird. Anyways, here's the stupid [[patch]]: + +[[!format diff """ +diff --git a/IkiWiki/Plugin/osm.pm b/IkiWiki/Plugin/osm.pm +index a7baa5f..c9650d0 100644 +--- a/IkiWiki/Plugin/osm.pm ++++ b/IkiWiki/Plugin/osm.pm +@@ -192,6 +192,7 @@ sub process_waypoint { + } + } + $icon = urlto($icon, $dest, 1); ++ $icon =~ s!/*$!!; # hack - urlto shouldn't be appending a slash in the first place + $tag = '' unless $tag; + register_rendered_files($map, $page, $dest); + $pagestate{$page}{'osm'}{$map}{'waypoints'}{$name} = { +"""]] + +I'm not writing this to a branch out of sheer shame of my misunderstanding. ;) There also may be a workaround that could be done in Nginx too. --[[anarcat]] diff --git a/doc/bugs/osm_linkto__40____41___usage_breaks_map_rendering.mdwn b/doc/bugs/osm_linkto__40____41___usage_breaks_map_rendering.mdwn new file mode 100644 index 000000000..89c08b73c --- /dev/null +++ b/doc/bugs/osm_linkto__40____41___usage_breaks_map_rendering.mdwn @@ -0,0 +1,23 @@ +[[!template id=gitbranch branch=anarcat/master author="[[anarcat]]"]] + +Under some circumstances that remain unclear to me, the usage of `urlto()` in the revised version of the [[plugins/osm]] plugin break the map totally. The javascript console in Chromium tells me the following: + + GET http://mesh.openisp.ca/map/pois.kml/ 404 (Not Found) + +Indeed, that URL yields a 404. The proper URL is . --[[anarcat]] + +## Proposed solution + +The problem seems to be caused by `urlto()` being called for the `osm` +directive before the generated files are registered with `will_render()` +from the `waypoint` directive. Proposed patch adds a function that is +called from the `preprocess` hook for both directives that registers the +files. + +Here is a [[patch]] to IkiWiki/Plugin/osm.pm: + +--[[deuxpi]] + +I confirm the patch works, and I added it to my master branch. --[[anarcat]] + +> [[applied|done]]. Thanks guys. --[[Joey]] diff --git a/doc/bugs/osm_sometimes_looses_some_nodes.mdwn b/doc/bugs/osm_sometimes_looses_some_nodes.mdwn new file mode 100644 index 000000000..9de1b4e23 --- /dev/null +++ b/doc/bugs/osm_sometimes_looses_some_nodes.mdwn @@ -0,0 +1,5 @@ +I have heard repeated reports on that editing a page that has a waypoint in it will sometimes make that waypoint disappear from the main map. I have yet to understand why that happens or how, but multiple users have reported that. + +A workaround is to rebuild the whole wiki, although sometimes re-editing the same page will bring the waypoint back on the map. + +I have been able to reproduce this by simply creating a new node. It will not show up on the map until the wiki is rebuilt or the node is resaved. -- [[anarcat]] diff --git a/doc/bugs/pagetitle_function_does_not_respect_meta_titles.mdwn b/doc/bugs/pagetitle_function_does_not_respect_meta_titles.mdwn index c6e3cd4fd..15d28f989 100644 --- a/doc/bugs/pagetitle_function_does_not_respect_meta_titles.mdwn +++ b/doc/bugs/pagetitle_function_does_not_respect_meta_titles.mdwn @@ -279,3 +279,11 @@ So, looking at your meta branch: --[[Joey]] >>>> for the po plugin, because I want to merge the po plugin soon. >>>> If #2 gets tackled later, we will certianly have all kinds of fun. >>>> no matter what is done for the po plugin. --[[Joey]] + +>>>>> For the record: I've gotten used to the lack of this feature, +>>>>> and it now seems much less important to me than it was when +>>>>> initially developing the po plugin. So, I'm hereby officially +>>>>> removing this from my plate. If anyone else wants to start from +>>>>> scratch, or from my initial work, I'm happy to review the +>>>>> po-related part of things -- just drop me an email in this +>>>>> case. --[[intrigeri]] diff --git a/doc/bugs/removal_of_transient_pages.mdwn b/doc/bugs/removal_of_transient_pages.mdwn index 2667a2b83..4843b5900 100644 --- a/doc/bugs/removal_of_transient_pages.mdwn +++ b/doc/bugs/removal_of_transient_pages.mdwn @@ -25,3 +25,49 @@ pages, until this is fixed. --[[Joey]] >>>> to affect by web edits. The `-f` check seems rather redundant, >>>> surely if it's in `%pagesources` ikiwiki has already verified it's >>>> safe. --[[Joey]] + +---- + +[[!template id=gitbranch branch=smcv/ready/transient-rm author="[[Simon McVittie|smcv]]"]] + +Here's a branch. It special-cases the `$transientdir`, but in such a way +that the special case could easily be extended to other locations where +deletion should be allowed. + +It also changes `IkiWiki::prune()` to optionally stop pruning empty +parent directories at the point where you'd expect it to (for instance, +previously it would remove the `$transientdir` itself, if it turns out +to be empty), and updates callers. + +The new `prune` API looks like this: + + IkiWiki::prune("$config{srcdir}/$file", $config{srcdir}); + +with the second argument optional. I wonder whether it ought to look +more like `writefile`: + + IkiWiki::prune($config{srcdir}, $file); + +although that would be either an incompatible change to internal API +(forcing all callers to update to 2-argument), or being a bit +inconsistent between the one-and two-argument forms. Thoughts? + +--[[smcv]] + +> I've applied the branch as-is, so this bug is [[done]]. +> `prune` is not an exported API so changing it would be ok.. +> I think required 2-argument would be better, but have not checked +> all the call sites to see if the `$file` is available split out +> as that would need. --[[Joey]] + +[[!template id=gitbranch branch=smcv/ready/prune author="[[Simon McVittie|smcv]]"]] + +>> Try this, then? I had to make some changes to `attachment` +>> to make the split versions available. I suggest reviewing +>> patch-by-patch. +>> +>> I also tried to fix a related bug which I found while testing it: +>> the special case for renaming held attachments didn't seem to work. +>> (`smcv/wip/rename-held`.) Unfortunately, it seems that with that +>> change, the held attachment is committed to the `srcdir` when you +>> rename it, which doesn't seem to be the intention either? --[[smcv]] diff --git a/doc/bugs/renaming_a_page_destroyed_some_links.mdwn b/doc/bugs/renaming_a_page_destroyed_some_links.mdwn new file mode 100644 index 000000000..fd7a80bd4 --- /dev/null +++ b/doc/bugs/renaming_a_page_destroyed_some_links.mdwn @@ -0,0 +1,12 @@ +When renaming a page here, ikiwiki destroyed unrelated links from unrelated pages. You can see the effect [here](http://mesh.openisp.ca/recentchanges/#diff-dc8dfa96efd3a4d649f571c3aa776f20b3ce0131), or by checking out the git tree (`git://mesh.openisp.ca/ +`) and looking at commit `dc8dfa96efd3a4d649f571c3aa776f20b3ce0131`. + +The renamed page was `configuration/bat-hosts` to `configuration/batman/bat-hosts` and the deleted links were ``\[[AUR | https://aur.archlinux.org/]]` and `\[[CHANGELOG|http://svn.dd-wrt.com:8000/browser/src/router/batman-adv/CHANGELOG]]`. --[[anarcat]] + +> Nevermind that, that commit was unrelated to the rename and probably an operator error. - No, actually, I just reproduced this again - see [another example](http://mesh.openisp.ca/recentchanges/#diff-d67dc2f0fdc149b13122fd6cba887a01c693e949). + +>> Looks like these all involve the wacky wikilink form that includes an +>> external url in the link. Fixed rename code to know about those. +>> [[done]] --[[Joey]] + +>>> Phew!!! Thanks a *lot* for that one, it was really annoying! :) --[[anarcat]] diff --git a/doc/bugs/toc_displays_headings_from_sidebar.mdwn b/doc/bugs/toc_displays_headings_from_sidebar.mdwn new file mode 100644 index 000000000..469ca8a33 --- /dev/null +++ b/doc/bugs/toc_displays_headings_from_sidebar.mdwn @@ -0,0 +1,3 @@ +The [[/ikiwiki/directive/toc]] directive scrapes all headings from the page, including those in the sidebar. So, if the sidebar includes navigational headers, every page with a table of contents will display those navigational headers before the headers in that page's content. + +I'd like some way to exclude the sidebar from the table of contents. As discussed via Jabber, perhaps toc could have a config option to ignore headers inside a nav tag or a tag with id="sidebar". diff --git a/doc/bugs/trail_excess_dependencies.mdwn b/doc/bugs/trail_excess_dependencies.mdwn new file mode 100644 index 000000000..f806a62eb --- /dev/null +++ b/doc/bugs/trail_excess_dependencies.mdwn @@ -0,0 +1,95 @@ +I've just modified the trail plugin to use only presence, and not +content dependencies. Using content dependencies, particularly to the page +that defines the trail, meant that every time that page changed, *every* +page in the trail gets rebuilt. This leads to users setting up sites that +have horrible performance, if the trail is defined in, for example, the top +page of a blog. + +Unfortunatly, this change to presence dependencies has +introduced a bug. Now when an existing trail is removed, the pages in the +trail don't get rebuilt to remove the trail (both html display and state). + +> Actually, this particular case is usually OK. Suppose a trail `untrail` +> contains `untrail/a` (as is the case in the regression +> test I'm writing), and you build the wiki, then edit `untrail` to no +> longer be a trail, and refresh. `untrail` has changed, so it is +> rendered. Assuming that the template of either `untrail` or another +> changed page happens to contain the `TRAILS` variable (which is not +> guaranteed, but is highly likely), `I::P::t::prerender` +> is invoked. It notices that `untrail/a` was previously a trail +> member and is no longer, and rebuilds it with the diagnostic +> "building untrail/a, its previous or next page has changed". +> +> Strictly speaking, I should change `I::P::t::build_affected` +> so it calls `prerender`, so we're guaranteed to have done the +> recalculation. Fixed in my branch. --[[smcv]] + +I think that to fix this bug, the plugin should use a hook to +force rebuilding of all the pages that were in the trail, when +the trail is removed (or changed). + +> The case of "the trail is changed" is still broken: +> if the order of items changes, or the trail is removed, +> then the logic above means it's OK, but if you +> change the `\[[!meta title]]` of the trail, or anything else +> used in the prev/up/next bar, the items won't show that +> change. Fixed in my branch. --[[smcv]] + +There's a difficulty in doing that: The needsbuild hook runs before the scan +hook, so before it has a chance to see if the trail directive is still there. +It'd need some changes to ikiwiki's hooks. + +> That's what `build_affected` is for, and trail already used it. --s + +(An improvement in this area would probably simplify other plugins, which +currently abuse the needsbuild hook to unset state, to handle the case +where the directive that resulted in that state is removed.) + +I apologise for introducing a known bug, but the dependency mess was too +bad to leave as-is. And I have very little time (and regrettably, even less +power) to deal with it right now. :( --[[Joey]] + +[[!template id=gitbranch branch=smcv/ready/trail author="[[Simon_McVittie|smcv]]"]] +[[!tag patch]] + +> I believe my `ready/trail` branch fixes this. There are regression tests. +> +> Here is an analysis of how the trail pages interdepend. +> +> * If *trail* contains a page *member* which does exist, *member* depends +> on *trail*. This is so that if the trail directive is deleted from +> *trail*, or if *trail*'s "friendly" title or trail settings are changed, +> the trail navigation bar in *member* will pick up that change. This is +> now only a presence dependency, which isn't enough to make those happen +> correctly. [Edited to add: actually, the title is the only thing that +> can affect *member* without affecting the order of members.] +> +> * If *trail* contains consecutive pages *m1* and *m2* in that order, +> *m1* and *m2* depend on each other. This is so that if one's +> "friendly" title changes, the other is rebuilt. This is now only +> a presence dependency, which isn't enough to make those happen +> correctly. In my branch, I explicitly track the "friendly" title +> for every page that's edited and is involved in a trail somehow. +> +> * If *trail* has *member* in its `pagenames` but there is no page called +> *member*, then *trail* must be rebuilt if *member* is created. This +> was always a presence dependency, and is fine. +> +> In addition, the `trail` plugin remembers the maps +> { trail => next item in that trail } and { trail => previous item in +> that trail } for each page. If either changes, the page gets rebuilt +> by `build_affected`, with almost the same logic as is used to update +> pages that link to a changed page. My branch extends this to track the +> "friendly title" of each page involved in a trail, either by being +> the trail itself or a member (or both). +> +> I think it's true to say that the trail always depends on every member, +> even if it doesn't display them. This might mean that we can use +> "render the trail page" as an opportunity to work out whether any of +> its members are also going to need re-rendering? +> [Edited to add: actually, I didn't need this to be true, but I made the +> regression test check it anyway.] +> +> --[[smcv]] + +>>> Thanks **very** much! [[done]] --[[Joey]] diff --git a/doc/bugs/trail_shows_on_cgi_pages.mdwn b/doc/bugs/trail_shows_on_cgi_pages.mdwn new file mode 100644 index 000000000..74f329fbc --- /dev/null +++ b/doc/bugs/trail_shows_on_cgi_pages.mdwn @@ -0,0 +1,3 @@ +When commenting on, or I think editing, a page that uses the trail +plugin, the trail is displayed across the top of the page. This should not +happen, probably. --[[Joey]] diff --git a/doc/bugs/trail_test_suite_failures.mdwn b/doc/bugs/trail_test_suite_failures.mdwn new file mode 100644 index 000000000..a3b7159ec --- /dev/null +++ b/doc/bugs/trail_test_suite_failures.mdwn @@ -0,0 +1,97 @@ +[[!template id=gitbranch branch=smcv/trail author=smcv]] [[!tag patch]] + +`t/trail.t` has some test suite failures. This is after applying +[[smcv]]'s patch that fixed some races that caused it to fail +sometimes. These remaining failures may also be intermittant, +although I can get them reliably on my laptop. I've added some debugging +output, which seems to point to an actual bug in the plugin AFAICS. --[[Joey]] + +> I can reproduce this reliably at 0a23666ddd but not 3.20120203. Bisecting +> indicates that it regressed in aaa72a3a80f, "inline: When the pagenames list +> includes pages that do not exist, skip them". +> +> I don't think this is the bug noted in the commit message - the inline +> containing `sorting/new` uses `pages`, not `pagenames`. --[[smcv]] + +>> It seems you removed `trail` support from `inline` in that commit. +>> Assuming that wasn't intentional, this is fixed in `smcv/trail`. +>> --[[smcv]] + +>>> Looks like a bad merge of some kind. pulled, [[done]] --[[Joey]] + +
+ok 71 - expected n=sorting/end p=sorting/beginning in sorting/middle.html
+not ok 72 - expected n=sorting/new p=sorting/middle in sorting/end.html
+#   Failed test 'expected n=sorting/new p=sorting/middle in sorting/end.html'
+#   at t/trail.t line 13.
+#          got: 'n=sorting/linked2 p=sorting/middle'
+#     expected: 'n=sorting/new p=sorting/middle'
+not ok 73 - expected n=sorting/old p=sorting/end in sorting/new.html
+#   Failed test 'expected n=sorting/old p=sorting/end in sorting/new.html'
+#   at t/trail.t line 13.
+#          got: undef
+#     expected: 'n=sorting/old p=sorting/end'
+not ok 74 - expected n=sorting/ancient p=sorting/new in sorting/old.html
+#   Failed test 'expected n=sorting/ancient p=sorting/new in sorting/old.html'
+#   at t/trail.t line 13.
+#          got: undef
+#     expected: 'n=sorting/ancient p=sorting/new'
+not ok 75 - expected n=sorting/linked2 p=sorting/old in sorting/ancient.html
+#   Failed test 'expected n=sorting/linked2 p=sorting/old in sorting/ancient.html'
+#   at t/trail.t line 13.
+#          got: undef
+#     expected: 'n=sorting/linked2 p=sorting/old'
+not ok 76 - expected n= p=sorting/ancient in sorting/linked2.html
+#   Failed test 'expected n= p=sorting/ancient in sorting/linked2.html'
+#   at t/trail.t line 13.
+#          got: 'n= p=sorting/end'
+#     expected: 'n= p=sorting/ancient'
+ok 77
+
+ +Here, the "new" page does not seem to be included into the trail as expected. +Looking at the rendered page, there is no trail directive output on it either. +--[[Joey]] + +
+ok 90
+not ok 91 - expected n=sorting/new p= in sorting/old.html
+#   Failed test 'expected n=sorting/new p= in sorting/old.html'
+#   at t/trail.t line 13.
+#          got: undef
+#     expected: 'n=sorting/new p='
+not ok 92 - expected n=sorting/middle p=sorting/old in sorting/new.html
+#   Failed test 'expected n=sorting/middle p=sorting/old in sorting/new.html'
+#   at t/trail.t line 13.
+#          got: undef
+#     expected: 'n=sorting/middle p=sorting/old'
+not ok 93 - expected n=sorting/linked2 p=sorting/new in sorting/middle.html
+#   Failed test 'expected n=sorting/linked2 p=sorting/new in sorting/middle.html'
+#   at t/trail.t line 13.
+#          got: 'n=sorting/linked2 p='
+#     expected: 'n=sorting/linked2 p=sorting/new'
+ok 94 - expected n=sorting/linked p=sorting/middle in sorting/linked2.html
+ok 95 - expected n=sorting/end p=sorting/linked2 in sorting/linked.html
+ok 96 - expected n=sorting/a/c p=sorting/linked in sorting/end.html
+ok 97 - expected n=sorting/beginning p=sorting/end in sorting/a/c.html
+ok 98 - expected n=sorting/a/b p=sorting/a/c in sorting/beginning.html
+not ok 99 - expected n=sorting/ancient p=sorting/beginning in sorting/a/b.html
+#   Failed test 'expected n=sorting/ancient p=sorting/beginning in sorting/a/b.html'
+#   at t/trail.t line 13.
+#          got: 'n=sorting/z/a p=sorting/beginning'
+#     expected: 'n=sorting/ancient p=sorting/beginning'
+not ok 100 - expected n=sorting/z/a p=sorting/a/b in sorting/ancient.html
+#   Failed test 'expected n=sorting/z/a p=sorting/a/b in sorting/ancient.html'
+#   at t/trail.t line 13.
+#          got: undef
+#     expected: 'n=sorting/z/a p=sorting/a/b'
+not ok 101 - expected n= p=sorting/ancient in sorting/z/a.html
+#   Failed test 'expected n= p=sorting/ancient in sorting/z/a.html'
+#   at t/trail.t line 13.
+#          got: 'n= p=sorting/a/b'
+#     expected: 'n= p=sorting/ancient'
+ok 102
+
+ +Haven't investigated, but looks like the same sort of problem, a +page expected to be in the trail isn't. --[[Joey]] diff --git a/doc/bugs/transient_autocreated_tagbase_is_not_transient_autoindexed.mdwn b/doc/bugs/transient_autocreated_tagbase_is_not_transient_autoindexed.mdwn index 3eb1542d3..702608831 100644 --- a/doc/bugs/transient_autocreated_tagbase_is_not_transient_autoindexed.mdwn +++ b/doc/bugs/transient_autocreated_tagbase_is_not_transient_autoindexed.mdwn @@ -1,7 +1,31 @@ mkdir -p ikiwiki-tag-test/raw/a_dir/ ikiwiki-tag-test/rendered/ - echo '[[!taglink a_tag]]' > ikiwiki-tag-test/raw/a_dir/a_page.mdwn + echo '\[[!taglink a_tag]]' > ikiwiki-tag-test/raw/a_dir/a_page.mdwn ikiwiki --verbose --plugin tag --plugin autoindex --plugin mdwn --set autoindex_commit=0 --set tagbase=tag --set tag_autocreate=1 --set tag_autocreate_commit=0 ikiwiki-tag-test/raw/ ikiwiki-tag-test/rendered/ ls -al ikiwiki-tag-test/raw/.ikiwiki/transient/ ls -al ikiwiki-tag-test/rendered/tag/ Shouldn't `ikiwiki-tag-test/raw/.ikiwiki/transient/tag.mdwn` and `ikiwiki-tag-test/rendered/tag/index.html` exist? + +[[!tag patch]] +[[!template id=gitbranch branch=smcv/ready/autoindex author=smcv]] +[[!template id=gitbranch branch=smcv/ready/autoindex-more-often author=smcv]] + +> To have a starting point to (maybe) change this, my `ready/autoindex` +> branch adds a regression test for the current behaviour, both with +> and without `autoindex_commit` enabled. It also fixes an unnecessary +> and potentially harmful special case for the transient directory. +> +> The fact that files in underlays (including transient files) don't +> trigger autoindexing is deliberate. However, this is the second +> request to change this behaviour: the first was +> [[!debbug 611068]], which has a patch from Tuomas Jormola. +> On that bug report, Joey explains why it's undesirable +> for the original behaviour of autoindex (when the +> index isn't transient). +> +> I'm not sure whether the same reasoning still applies when the +> index is transient, though (`autoindex_commit => 0`), +> because the index pages won't be cluttering up people's +> git repositories any more? My `autoindex-more` branch changes +> the logic so it will do what you want in the `autoindex_commit => 0` +> case, and amends the appropriate regression test. --[[smcv]] diff --git a/doc/bugs/wiki_links_still_processed_inside_code_blocks.mdwn b/doc/bugs/wiki_links_still_processed_inside_code_blocks.mdwn index b2a8b0632..9f0a1d102 100644 --- a/doc/bugs/wiki_links_still_processed_inside_code_blocks.mdwn +++ b/doc/bugs/wiki_links_still_processed_inside_code_blocks.mdwn @@ -46,4 +46,22 @@ and have it render like: > there should give some strong hints how to fix this bug, though I haven't > tried to apply the method yet. --[[Joey]] +>> As far, as I can see, smileys bug is solved by checking for code/pre. In +>> this case, however, this is not applicable. WikiLinks/directives *should* be +>> expanded before passing text to formatter, as their expansion may contain +>> markup. Directives should be processed before, as they may provide *partial* +>> markup (eg `template` ones), that have no sense except when in the page +>> cotext. Links should be processed before, because, at least multimarkdown may +>> try to expand them as anchor-links. +>> +>> For now, my partial solution is to restrict links to not have space at the +>> start, this way in many cases escaping in code may be done in natural way +>> and not break copypastability. For example, shell 'if \[[ condition ]];' +>> will work fine with this. +>> +>> Maybe directives can also be restricted to only be allowed on the line by +>> themselves (not separated by blank lines, however) or something similar. +>> +>> --[[isbear]] + [[!debbug 487397]] diff --git a/doc/bugs/wrong_link_in_recentchanges_when_reverting_an_ikiwiki_outside_git_root.mdwn b/doc/bugs/wrong_link_in_recentchanges_when_reverting_an_ikiwiki_outside_git_root.mdwn index bf311c198..5f7450b79 100644 --- a/doc/bugs/wrong_link_in_recentchanges_when_reverting_an_ikiwiki_outside_git_root.mdwn +++ b/doc/bugs/wrong_link_in_recentchanges_when_reverting_an_ikiwiki_outside_git_root.mdwn @@ -1,3 +1,8 @@ in ikiwiki instances that don't reside in the git root directory (the only ones i know of are ikiwiki itself), reverts show the wrong link in the recentchanges (for example, in the ikiwiki main repository's 4530430 and its revert, the main index page was edited, but the revert shows doc/index as a link). the expected behavior is to compensate for the modified root directory (i.e., show index instead of doc/index). + +> This seems to work OK now - commit 84c4ca33 and its reversion both +> appear correctly in [[recentchanges]]. Looking at git history, +> Joey [[fixed this|done]] in commit 1b6c1895 before 3.20120203. +> --[[smcv]] diff --git a/doc/bugs/yaml:xs_codependency_not_listed.mdwn b/doc/bugs/yaml:xs_codependency_not_listed.mdwn new file mode 100644 index 000000000..f136d8b12 --- /dev/null +++ b/doc/bugs/yaml:xs_codependency_not_listed.mdwn @@ -0,0 +1,13 @@ +YAML:XS is not listed as a dep in the spec file which results in + +``` +HOME=/home/me /usr/bin/perl -Iblib/lib ikiwiki.in -dumpsetup ikiwiki.setup +Can't locate YAML/XS.pm in @INC (@INC contains: . blib/lib /usr/local/lib64/perl5 /usr/local/share/perl5 /usr/lib64/perl5/vendor_perl /usr/share/perl5/vendor_perl /usr/lib64/perl5 /usr/share/perl5) at (eval 39) line 2. +BEGIN failed--compilation aborted at (eval 39) line 2. +make: *** [ikiwiki.setup] Error 2 +error: Bad exit status from /var/tmp/rpm-tmp.Sgq2QK (%build) +``` + +when trying to build + +> Ok, added. [[done]] --[[Joey]] diff --git a/doc/contact.mdwn b/doc/contact.mdwn index 486a4d186..7d31ddf10 100644 --- a/doc/contact.mdwn +++ b/doc/contact.mdwn @@ -4,7 +4,7 @@ ikiwiki's own wiki. ikiwiki provides a [[bug_tracker|bugs]], a [[TODO_list|TODO]], and "discussion" sub-pages for every page, as well as a [[forum]] for general questions and discussion. ikiwiki developers monitor [[RecentChanges]] closely, via the webpage, email, -[CIA](http://cia.navi.cx), and IRC, and respond in a timely fashion. +and IRC, and respond in a timely fashion. You could also drop by the IRC channel `#ikiwiki` on [OFTC](http://www.oftc.net/) (`irc.oftc.net`), or use the diff --git a/doc/convert.mdwn b/doc/convert.mdwn index 871cd31fe..fd4fbeac3 100644 --- a/doc/convert.mdwn +++ b/doc/convert.mdwn @@ -3,5 +3,7 @@ to convert it to ikiwiki? Various tools and techniques have been developed to handle such conversions. * [[tips/convert_mediawiki_to_ikiwiki]] -* [[tips/convert_MoinMoin_and_TWiki_to_ikiwiki]] +* [[tips/convert_moinmoin_to_ikiwiki]] * [[tips/convert_blogger_blogs_to_ikiwiki]] + +In addition, [[JoshTriplett]] has written scripts to convert Twiki sites, see [his page](/users/JoshTriplett) for more information. diff --git a/doc/css_market.mdwn b/doc/css_market.mdwn index 3f5627028..c9c6694e7 100644 --- a/doc/css_market.mdwn +++ b/doc/css_market.mdwn @@ -10,6 +10,10 @@ included in ikiwiki for easy use. Feel free to add your own stylesheets here. (Upload as wiki pages; wiki gnomes will convert them to css files..) +* **[lessish.css](https://raw.github.com/spiffin/ikiwiki_lessish/master/lessish.css)**, contributed by [[Spiffin]], + A responsive stylesheet based on the [Less CSS Framework](http://lessframework.com). + Links: [PNG preview](https://github.com/spiffin/ikiwiki_lessish/blob/master/lessish_preview.png) and [GitHub repo](https://github.com/spiffin/ikiwiki_lessish). + * **[[css_market/zack.css]]**, contributed by [[StefanoZacchiroli]], customized mostly for *blogging purposes*, can be seen in action on [zack's blog](http://upsilon.cc/~zack/blog/) diff --git a/doc/examples/blog/posts.mdwn b/doc/examples/blog/posts.mdwn index 08e014838..2bd0f1d6f 100644 --- a/doc/examples/blog/posts.mdwn +++ b/doc/examples/blog/posts.mdwn @@ -1,3 +1,3 @@ Here is a full list of posts to the [[blog|index]]. -[[!inline pages="page(./posts/*) and !*/Discussion" archive=yes feedshow=10 quick=yes]] +[[!inline pages="page(./posts/*) and !*/Discussion" archive=yes feedshow=10 quick=yes trail=yes]] diff --git a/doc/examples/softwaresite/bugs/hghg.mdwn b/doc/examples/softwaresite/bugs/hghg.mdwn new file mode 100644 index 000000000..cece64126 --- /dev/null +++ b/doc/examples/softwaresite/bugs/hghg.mdwn @@ -0,0 +1 @@ +hghg diff --git a/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__.mdwn b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__.mdwn new file mode 100644 index 000000000..c0b896515 --- /dev/null +++ b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__.mdwn @@ -0,0 +1,3 @@ +Is anyone successfull mirroring feeds from ikiwiki to identi.ca (or another status.net instance)? How did you set up your feed? + +When I try to, identi.ca presents me with an error about no "author ID URI" being found in the feed. Indeed the ikiwiki-generated atom feed only has got a global "author" - I presume identi.ca requires author information in each entry. Is it possible to set up ikiwiki's feed that way? diff --git a/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_1_8a5acbb6234104b607c8c4cf16124ae4._comment b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_1_8a5acbb6234104b607c8c4cf16124ae4._comment new file mode 100644 index 000000000..1d710d153 --- /dev/null +++ b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_1_8a5acbb6234104b607c8c4cf16124ae4._comment @@ -0,0 +1,8 @@ +[[!comment format=mdwn + username="Franek" + ip="188.99.178.40" + subject="[[!meta author="..." + date="2012-05-19T14:51:42Z" + content=""" +Adding [[!meta author=\"me\"]] to the entries and/or the feedpage does not help. +"""]] diff --git a/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_2_155e5823860a91989647ede8b5c9224a._comment b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_2_155e5823860a91989647ede8b5c9224a._comment new file mode 100644 index 000000000..6c709b3f0 --- /dev/null +++ b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_2_155e5823860a91989647ede8b5c9224a._comment @@ -0,0 +1,16 @@ +[[!comment format=mdwn + username="Franek" + ip="188.99.178.40" + subject="Further enquiries" + date="2012-05-20T10:46:07Z" + content=""" +I did some more experiments setting not only \"[[!meta author=...\", but also \"authorurl\" globally and per-entry in various combinations, with no success. As far as I could see, \"authorurl\" had no effect on the atom feed whatsoever. + +It seems that identi.ca wants a feed to have an field with a subfield, as described here: [[http://www.atomenabled.org/developers/syndication/#person]] . Is there a way to achieve this with ikiwiki inline-feeds? + +I also found two old and unresolved status.net bugreports on the matter: + +[[http://status.net/open-source/issues/2840]] + +[[http://status.net/open-source/issues/2839]] +"""]] diff --git a/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_3_317f1202a3da1bfc845d4becbac4bba8._comment b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_3_317f1202a3da1bfc845d4becbac4bba8._comment new file mode 100644 index 000000000..6bda93433 --- /dev/null +++ b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_3_317f1202a3da1bfc845d4becbac4bba8._comment @@ -0,0 +1,10 @@ +[[!comment format=mdwn + username="Franek" + ip="92.74.26.119" + subject="kind of solved, but another problem comes up" + date="2012-05-26T19:31:19Z" + content=""" +The templates atompage.tmpl and/or atomitem.tmpl appear to be what would have to be altered to satisfy identi.ca. I did that on my system, just hard-coding a element into for testing. In one respect, it worked: identi.ca does not complain about the missing author uri any more. In another, it did not, another error comes up now: \"Internal server error\" and something like \"could not add feed\". + +I do not know where to go from this very unspecific error message. I guess I am going to try something like twitterfeed.com, for now. +"""]] diff --git a/doc/forum/Attachment_and_sub-directory.mdwn b/doc/forum/Attachment_and_sub-directory.mdwn new file mode 100644 index 000000000..91d7aee27 --- /dev/null +++ b/doc/forum/Attachment_and_sub-directory.mdwn @@ -0,0 +1,5 @@ +Hi. + +If I create a page and attach a file to the page, ikiwiki creates a sub-directory with the page name and places the attachment in the sub-directory regardless of usedirs setup. Is there any setup not to create the sub-directory and to place the attachment in the same directory where the page is, so that I can edit and properly *preview* at a local machine using third-party markdown editors? + +Thanks in advance. diff --git a/doc/forum/Background_picture_and_css.mdwn b/doc/forum/Background_picture_and_css.mdwn new file mode 100644 index 000000000..827100984 --- /dev/null +++ b/doc/forum/Background_picture_and_css.mdwn @@ -0,0 +1,8 @@ +Is it possible to put two different background pictures into the right and left sides of the following ikiwiki css? + +[lessish css theme](https://raw.github.com/spiffin/ikiwiki_lessish/master/lessish.css) + +Is it also possible to have a background like this: [http://ysharifi.wordpress.com/](http://ysharifi.wordpress.com/) +or this [tex.stackexchange.com](tex.stackexchange.com) + +I am not a css expert so, it would be nice if you could provide some details. diff --git a/doc/forum/CGI_script_and_HTTPS.mdwn b/doc/forum/CGI_script_and_HTTPS.mdwn new file mode 100644 index 000000000..2f255002d --- /dev/null +++ b/doc/forum/CGI_script_and_HTTPS.mdwn @@ -0,0 +1,29 @@ +Dear ikiwiki folks, + +using Debian Wheezy and ikiwiki 3.20120629 for some reason when accessing the site using HTTP (and not HTTPS), going to Edit, so executing the CGI script, all URLs are prepended with HTTPS, which I do not want. + + + +Trying to look at the source, I guess it is originating from `IkiWiki/CGI.pm`. + + sub printheader ($) { + my $session=shift; + + if (($ENV{HTTPS} && lc $ENV{HTTPS} ne "off") || $config{sslcookie}) { + print $session->header(-charset => 'utf-8', + -cookie => $session->cookie(-httponly => 1, -secure => 1)); + } + else { + print $session->header(-charset => 'utf-8', + -cookie => $session->cookie(-httponly => 1)); + } + } + +Does it check if HTTPS is enabled in the environment? During `ikiwiki --setup example.setup` or when the CGI script is run when the site is accessed (for example in an Apache environment)? + +Can this somehow be disabled in ikiwiki. Reading the code I guess I could somehow set `HTTPS = off` somewhere in the `VirtualHost` section of the Apache configuration. + + +Thanks, + +--[[PaulePanter]] diff --git a/doc/forum/CGI_script_and_HTTPS/comment_1_3f8ef438ca7de11635d4e40080e7baa9._comment b/doc/forum/CGI_script_and_HTTPS/comment_1_3f8ef438ca7de11635d4e40080e7baa9._comment new file mode 100644 index 000000000..03f1032e9 --- /dev/null +++ b/doc/forum/CGI_script_and_HTTPS/comment_1_3f8ef438ca7de11635d4e40080e7baa9._comment @@ -0,0 +1,43 @@ +[[!comment format=mdwn + username="http://smcv.pseudorandom.co.uk/" + nickname="smcv" + subject="comment 1" + date="2012-11-05T11:27:02Z" + content=""" +IkiWiki generates self-referential URLs using the `url` and `cgiurl` +configuration parameters, and the `urlto()` and `cgiurl()` functions; +the code you quoted isn't involved (it's choosing whether to set +HTTPS-only cookies or not, rather than choosing how to generate +self-referential URLs). + +If you want your wiki to be accessible via both HTTP and HTTPS, and use +whichever the user first requested, you should set both `url` and +`cgiurl` to the same URI scheme and hostname with no port specified, +either both `http` or both `https`, for instance: + + url: http://www.example.com/ + cgiurl: http://www.example.com/ikiwiki.cgi + +or + + url: https://example.org/wiki/ + cgiurl: https://example.org/cgi-bin/ikiwiki + +(or the Perl-syntax equivalents if you're not using a YAML +setup file). + +If you use one of those, IkiWiki will attempt to generate +path-only links, like \"/wiki/\" and \"/cgi-bin/ikiwiki?...\", +whenever it's valid to do so. A visitor using HTTP will stay +on HTTP and a visitor using HTTPS will stay on HTTPS. + +The choice of `http` or `https` for the `url` and `cgiurl` +still matters when a URL *must* be absolute, such as in an +RSS feed. + +I improved this code in late 2010 for this todo item: +[[todo/want_to_avoid_ikiwiki_using_http_or_https_in_urls_to_allow_serving_both]]. +It's possible that it has regressed (that's happened +a couple of times). If it has, please quote your exact +`url` and `cgiurl` configuration. +"""]] diff --git a/doc/forum/Calendar:_listing_multiple_entries_per_day.mdwn b/doc/forum/Calendar:_listing_multiple_entries_per_day.mdwn index 51c320067..251cd6d9f 100644 --- a/doc/forum/Calendar:_listing_multiple_entries_per_day.mdwn +++ b/doc/forum/Calendar:_listing_multiple_entries_per_day.mdwn @@ -4,7 +4,7 @@ I'd very much like to be able to list my blog posts on a daily basis (used for l There was some effort to do this as detailed here. -[[http://ikiwiki.info/todo/Set_arbitrary_date_to_be_used_by_calendar_plugin/]] +[[todo/Set_arbitrary_date_to_be_used_by_calendar_plugin]] I had a quick go at doing something similar on Debian Stable (Ikiwiki 3.0) but alas my Ikiwiki fu is not strong enough. @@ -15,3 +15,5 @@ I'm not sure how I go about swapping the link on the day number to a link to, I and a suitable whilst loop looks to be all that's needed... Any pointers appreciated. + +A [[!taglink patch]] has been proposed in [comment](#comment-d6f94e2b779d1c038b6359aad79ed14b) diff --git a/doc/forum/Calendar:_listing_multiple_entries_per_day/comment_4_4be39c2043821848d4b25d0bf946a718._comment b/doc/forum/Calendar:_listing_multiple_entries_per_day/comment_4_4be39c2043821848d4b25d0bf946a718._comment new file mode 100644 index 000000000..a71276d6b --- /dev/null +++ b/doc/forum/Calendar:_listing_multiple_entries_per_day/comment_4_4be39c2043821848d4b25d0bf946a718._comment @@ -0,0 +1,15 @@ +[[!comment format=mdwn + username="http://joey.kitenet.net/" + nickname="joey" + subject="comment 4" + date="2012-02-21T17:23:00Z" + content=""" +To be clear, this patch creates a `yyyy/mm/dd` file for each day, listing the posts for that day, so the calendar can link to it rather than a random single post. + +While a valid solution certainly, that's a lot of added pages! Especially a high overhead for such a minor UI point as this. + +Surely something interesting could be done with javascript or some other form of UI to make clicking on a day in a calendar that has multiple posts present a list of them? That would have essentially no overhead, since the calendar plugin already has a list of the posts made on a given day. + +Ikiwiki already does something similar to deal with the case where a page has a great many backlinks.. It makes a UI element that, if hovered over, pops up a display of all the rest. This is done quite simply in the `page.tmpl` +using the popup and balloon CSS classes. Calendar could also use this. +"""]] diff --git a/doc/forum/Calendar:_listing_multiple_entries_per_day/comment_5_de545ebb6376066674ef2aaae4757b9c._comment b/doc/forum/Calendar:_listing_multiple_entries_per_day/comment_5_de545ebb6376066674ef2aaae4757b9c._comment new file mode 100644 index 000000000..fef852066 --- /dev/null +++ b/doc/forum/Calendar:_listing_multiple_entries_per_day/comment_5_de545ebb6376066674ef2aaae4757b9c._comment @@ -0,0 +1,97 @@ +[[!comment format=mdwn + username="spalax" + subject="Popup listing multiple entries per day" + date="2012-06-08T00:56:06Z" + content=""" +[[!tag patch]] + +Hello, +here is a patch that: + +- if there is a single entry in one day, does not change anything (compared to the previous version of the calendar plugin); +- if there are several entries, when mouse passes over the day, displays a popup listing all the entries of that day. + +That's all. No new pages for each day, takes as little space as it took before, and only a few lines more in the source. + +The only thing I am not totally happy with is the CSS. We have to say that the text is aligned on the left (otherwise, it is aligned on the right, as is each day of the calendar), but I do not know which place is the more sensible to put that line of CSS in. + +Regards, +-- Louis + + + diff --git a/IkiWiki/Plugin/calendar.pm b/IkiWiki/Plugin/calendar.pm + index d443198..2c9ed79 100644 + --- a/IkiWiki/Plugin/calendar.pm + +++ b/IkiWiki/Plugin/calendar.pm + @@ -86,8 +86,11 @@ sub format_month (@) { + my $year = $date[5] + 1900; + my $mtag = sprintf(\"%02d\", $month); + + - # Only one posting per day is being linked to. + - $linkcache{\"$year/$mtag/$mday\"} = $p; + + # Several postings per day + + if (! $linkcache{\"$year/$mtag/$mday\"}) { + + $linkcache{\"$year/$mtag/$mday\"} = []; + + } + + push(@{$linkcache{\"$year/$mtag/$mday\"}}, $p); + } + + my $pmonth = $params{month} - 1; + @@ -221,11 +224,36 @@ EOF + $tag='month-calendar-day-link'; + } + $calendar.=qq{\t\t}; + - $calendar.=htmllink($params{page}, $params{destpage}, + - $linkcache{$key}, + - noimageinline => 1, + - linktext => $day, + - title => pagetitle(IkiWiki::basename($linkcache{$key}))); + + if ( scalar(@{$linkcache{$key}}) == 1) { + + # Only one posting on this page + + my $page = $linkcache{$key}[0]; + + $calendar.=htmllink($params{page}, $params{destpage}, + + $page, + + noimageinline => 1, + + linktext => $day, + + title => pagetitle(IkiWiki::basename($page))); + + } else { + + $calendar.=qq{}; + + } + $calendar.=qq{\n}; + } + else { + diff --git a/doc/style.css b/doc/style.css + old mode 100644 + new mode 100755 + index 6e2afce..4149229 + --- a/doc/style.css + +++ b/doc/style.css + @@ -316,6 +316,7 @@ div.progress-done { + .popup .paren, + .popup .expand { + display: none; + + text-align: left; + } + .popup:hover .balloon, + .popup:focus .balloon { + +"""]] diff --git a/doc/forum/Can_I_have_different_favicons_for_each_folder__63__/comment_2_b8ccd3c29249eca73766f567bce12569._comment b/doc/forum/Can_I_have_different_favicons_for_each_folder__63__/comment_2_b8ccd3c29249eca73766f567bce12569._comment new file mode 100644 index 000000000..0c8ca3bce --- /dev/null +++ b/doc/forum/Can_I_have_different_favicons_for_each_folder__63__/comment_2_b8ccd3c29249eca73766f567bce12569._comment @@ -0,0 +1,8 @@ +[[!comment format=mdwn + username="Franek" + ip="178.7.43.64" + subject="comment 2" + date="2012-06-25T09:58:03Z" + content=""" +I did as you suggested (finally) and created a simple modification of the [[plugins/favicon]] plugin: [[plugins/contrib/localfavicon]]. It checks for the \"localfavicon\" option, and if it is set, it uses bestlink() to determine which favicon to use for each page; if not, it behaves just like the original favicon plugin. +"""]] diff --git a/doc/forum/Can_not_advance_past_first_page_of_results_using_search_plugin.mdwn b/doc/forum/Can_not_advance_past_first_page_of_results_using_search_plugin.mdwn new file mode 100644 index 000000000..1a9391e48 --- /dev/null +++ b/doc/forum/Can_not_advance_past_first_page_of_results_using_search_plugin.mdwn @@ -0,0 +1,26 @@ +I'm using the [[/plugins/search/]] plugin and it correctly displays the first page of results, but the "Next" button doesn't work. + +If I search for "linux", for example, I see "1-10 of exactly 65 matches" and this in my browser's address bar: https://example.com/ikiwiki.cgi?P=linux + +Then, I scroll down and click "Next" and I see. . . + +> Although this page is encrypted, the information you have entered is to be sent over an unencrypted connection and could easily be read by a third party. +> +> Are you sure you want to continue sending this information? + +. . . then I click "Continue" but I'm stuck on the first page of search results (it still says "1-10 of exactly 65 matches") and I have the following in my browser's address bar: + +https://example.com/ikiwiki.cgi?P=linux&DEFAULTOP=or&%253E=Next&DB=default&FMT=query&xP=Zlinux&xDB=default&xFILTERS=--O + +I noticed that if I change what's in the address bar to the following, I **can** advance to page 2 (it shows "11-20 of exactly 65 matches"). That is to say, I'm removing "25" from "%253E" as a work around: + +https://example.com/ikiwiki.cgi?P=linux&DEFAULTOP=or&%3E=Next&DB=default&FMT=query&xP=Zlinux&xDB=default&xFILTERS=--O + +Based on this output, I might need to make a change to "searchquery.tmpl", which is under [[/templates]]. . . + + [wikiuser@ikiwiki1 ~]$ grep -r DEFAULTOP /usr/share/ikiwiki + /usr/share/ikiwiki/templates/searchquery.tmpl: