From: Simon McVittie Date: Fri, 12 Sep 2014 20:57:04 +0000 (+0100) Subject: Merge branch 'ready/chrysn/linkmapenhancement' X-Git-Url: https://sipb.mit.edu/gitweb.cgi/ikiwiki.git/commitdiff_plain/669a5f5ecc0d50daf96d8dc2af219c94c8625fb4?hp=efbb1121ffdc146f5c9a481a51f23ad151b9f240 Merge branch 'ready/chrysn/linkmapenhancement' --- diff --git a/.gitignore b/.gitignore index fe1c3d441..8528fe9be 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,8 @@ ikiwiki.out ikiwiki-transition.out ikiwiki-calendar.out pm_to_blib +/MYMETA.json +/MYMETA.yml *.man /po/cover_db po/po2wiki_stamp diff --git a/IkiWiki.pm b/IkiWiki.pm index 2a83777e6..49ac97196 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -14,7 +14,7 @@ use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase %pagestate %wikistate %renderedfiles %oldrenderedfiles %pagesources %delpagesources %destsources %depends %depends_simple @mass_depends %hooks %forcerebuild %loaded_plugins %typedlinks - %oldtypedlinks %autofiles}; + %oldtypedlinks %autofiles @underlayfiles $lastrev}; use Exporter q{import}; our @EXPORT = qw(hook debug error htmlpage template template_depends @@ -22,7 +22,7 @@ our @EXPORT = qw(hook debug error htmlpage template template_depends htmllink readfile writefile pagetype srcfile pagename displaytime strftime_utf8 will_render gettext ngettext urlto targetpage add_underlay pagetitle titlepage linkpage newpagefile - inject add_link add_autofile + inject add_link add_autofile useragent %config %links %pagestate %wikistate %renderedfiles %pagesources %destsources %typedlinks); our $VERSION = 3.00; # plugin interface version, next is ikiwiki version @@ -118,6 +118,29 @@ sub getsetup () { safe => 0, rebuild => 0, }, + cgi_overload_delay => { + type => "string", + default => '', + example => "10", + description => "number of seconds to delay CGI requests when overloaded", + safe => 1, + rebuild => 0, + }, + cgi_overload_message => { + type => "string", + default => '', + example => "Please wait", + description => "message to display when overloaded (may contain html)", + safe => 1, + rebuild => 0, + }, + only_committed_changes => { + type => "boolean", + default => 0, + description => "enable optimization of only refreshing committed changes?", + safe => 1, + rebuild => 0, + }, rcs => { type => "string", default => '', @@ -497,6 +520,21 @@ sub getsetup () { safe => 0, rebuild => 0, }, + cookiejar => { + type => "string", + default => { file => "$ENV{HOME}/.ikiwiki/cookies" }, + description => "cookie control", + safe => 0, # hooks into perl module internals + rebuild => 0, + }, + useragent => { + type => "string", + default => undef, + example => "Wget/1.13.4 (linux-gnu)", + description => "set custom user agent string for outbound HTTP requests e.g. when fetching aggregated RSS feeds", + safe => 0, + rebuild => 0, + }, } sub defaultconfig () { @@ -705,6 +743,7 @@ sub debug ($) { } my $log_open=0; +my $log_failed=0; sub log_message ($$) { my $type=shift; @@ -715,9 +754,18 @@ sub log_message ($$) { Sys::Syslog::openlog('ikiwiki', '', 'user'); $log_open=1; } - return eval { - Sys::Syslog::syslog($type, "[$config{wikiname}] %s", join(" ", @_)); + eval { + # keep a copy to avoid editing the original config repeatedly + my $wikiname = $config{wikiname}; + utf8::encode($wikiname); + Sys::Syslog::syslog($type, "[$wikiname] %s", join(" ", @_)); }; + if ($@) { + print STDERR "failed to syslog: $@" unless $log_failed; + $log_failed=1; + print STDERR "@_\n"; + } + return $@; } elsif (! $config{cgi}) { return print "@_\n"; @@ -1092,6 +1140,11 @@ sub cgiurl (@) { join("&", map $_."=".uri_escape_utf8($params{$_}), keys %params); } +sub cgiurl_abs (@) { + eval q{use URI}; + URI->new_abs(cgiurl(@_), $config{cgiurl}); +} + sub baseurl (;$) { my $page=shift; @@ -1412,7 +1465,7 @@ sub preprocess ($$$;$$) { # consider it significant. my @params; while ($params =~ m{ - (?:([-\w]+)=)? # 1: named parameter key? + (?:([-.\w]+)=)? # 1: named parameter key? (?: """(.*?)""" # 2: triple-quoted value | @@ -1455,7 +1508,7 @@ sub preprocess ($$$;$$) { push @params, $val, ''; } } - if ($preprocessing{$page}++ > 3) { + if ($preprocessing{$page}++ > 8) { # Avoid loops of preprocessed pages preprocessing # other pages that preprocess them, etc. return "[[!$command ". @@ -1509,7 +1562,7 @@ sub preprocess ($$$;$$) { ( # 4: the parameters.. \s+ # Must have space if parameters present (?: - (?:[-\w]+=)? # named parameter key? + (?:[-.\w]+=)? # named parameter key? (?: """.*?""" # triple-quoted value | @@ -1537,7 +1590,7 @@ sub preprocess ($$$;$$) { \s+ ( # 4: the parameters.. (?: - (?:[-\w]+=)? # named parameter key? + (?:[-.\w]+=)? # named parameter key? (?: """.*?""" # triple-quoted value | @@ -1753,7 +1806,8 @@ sub enable_commit_hook () { sub loadindex () { %oldrenderedfiles=%pagectime=(); - if (! $config{rebuild}) { + my $rebuild=$config{rebuild}; + if (! $rebuild) { %pagesources=%pagemtime=%oldlinks=%links=%depends= %destsources=%renderedfiles=%pagecase=%pagestate= %depends_simple=%typedlinks=%oldtypedlinks=(); @@ -1765,7 +1819,8 @@ sub loadindex () { open ($in, "<", "$config{wikistatedir}/indexdb") || return; } else { - $config{gettime}=1; # first build + # gettime on first build + $config{gettime}=1 unless defined $config{gettime}; return; } } @@ -1793,10 +1848,16 @@ sub loadindex () { foreach my $src (keys %$pages) { my $d=$pages->{$src}; - my $page=pagename($src); + my $page; + if (exists $d->{page} && ! $rebuild) { + $page=$d->{page}; + } + else { + $page=pagename($src); + } $pagectime{$page}=$d->{ctime}; $pagesources{$page}=$src; - if (! $config{rebuild}) { + if (! $rebuild) { $pagemtime{$page}=$d->{mtime}; $renderedfiles{$page}=$d->{dest}; if (exists $d->{links} && ref $d->{links}) { @@ -1846,6 +1907,8 @@ sub loadindex () { foreach my $page (keys %renderedfiles) { $destsources{$_}=$page foreach @{$renderedfiles{$page}}; } + $lastrev=$index->{lastrev}; + @underlayfiles=@{$index->{underlayfiles}} if ref $index->{underlayfiles}; return close($in); } @@ -1867,6 +1930,7 @@ sub saveindex () { my $src=$pagesources{$page}; $index{page}{$src}={ + page => $page, ctime => $pagectime{$page}, mtime => $pagemtime{$page}, dest => $renderedfiles{$page}, @@ -1886,11 +1950,7 @@ sub saveindex () { } if (exists $pagestate{$page}) { - foreach my $id (@plugins) { - foreach my $key (keys %{$pagestate{$page}{$id}}) { - $index{page}{$src}{state}{$id}{$key}=$pagestate{$page}{$id}{$key}; - } - } + $index{page}{$src}{state}=$pagestate{$page}; } } @@ -1902,6 +1962,9 @@ sub saveindex () { } } + $index{lastrev}=$lastrev; + $index{underlayfiles}=\@underlayfiles; + $index{version}="3"; my $ret=Storable::nstore_fd(\%index, $out); return if ! defined $ret || ! $ret; @@ -2253,6 +2316,14 @@ sub add_autofile ($$$) { $autofiles{$file}{generator}=$generator; } +sub useragent () { + return LWP::UserAgent->new( + cookie_jar => $config{cookiejar}, + env_proxy => 1, # respect proxy env vars + agent => $config{useragent}, + ); +} + sub sortspec_translate ($$) { my $spec = shift; my $reverse = shift; @@ -2389,6 +2460,19 @@ sub pagespec_match ($$;@) { return $sub->($page, @params); } +# e.g. @pages = sort_pages("title", \@pages, reverse => "yes") +# +# Not exported yet, but could be in future if it is generally useful. +# Note that this signature is not the same as IkiWiki::SortSpec::sort_pages, +# which is "more internal". +sub sort_pages ($$;@) { + my $sort = shift; + my $list = shift; + my %params = @_; + $sort = sortspec_translate($sort, $params{reverse}); + return IkiWiki::SortSpec::sort_pages($sort, @$list); +} + sub pagespec_match_list ($$;@) { my $page=shift; my $pagespec=shift; @@ -2755,12 +2839,12 @@ sub match_user ($$;@) { my $user=shift; my %params=@_; - my $regexp=IkiWiki::glob2re($user); - if (! exists $params{user}) { return IkiWiki::ErrorReason->new("no user specified"); } + my $regexp=IkiWiki::glob2re($user); + if (defined $params{user} && $params{user}=~$regexp) { return IkiWiki::SuccessReason->new("user is $user"); } @@ -2800,8 +2884,10 @@ sub match_ip ($$;@) { if (! exists $params{ip}) { return IkiWiki::ErrorReason->new("no IP specified"); } + + my $regexp=IkiWiki::glob2re(lc $ip); - if (defined $params{ip} && lc $params{ip} eq lc $ip) { + if (defined $params{ip} && lc $params{ip}=~$regexp) { return IkiWiki::SuccessReason->new("IP is $ip"); } else { diff --git a/IkiWiki/CGI.pm b/IkiWiki/CGI.pm index 62383b6fd..cb83319e6 100644 --- a/IkiWiki/CGI.pm +++ b/IkiWiki/CGI.pm @@ -110,11 +110,23 @@ sub decode_cgi_utf8 ($) { } } +sub safe_decode_utf8 ($) { + my $octets = shift; + # call decode_utf8 on >= 5.20 only if it's not already decoded, + # otherwise it balks, on < 5.20, always call it + if ($] < 5.02 || !Encode::is_utf8($octets)) { + return decode_utf8($octets); + } + else { + return $octets; + } +} + sub decode_form_utf8 ($) { if ($] >= 5.01) { my $form = shift; foreach my $f ($form->field) { - my @value=map { decode_utf8($_) } $form->field($f); + my @value=map { safe_decode_utf8($_) } $form->field($f); $form->field(name => $f, value => \@value, force => 1, @@ -131,7 +143,7 @@ sub needsignin ($$) { if (! defined $session->param("name") || ! userinfo_get($session->param("name"), "regdate")) { - $session->param(postsignin => $ENV{QUERY_STRING}); + $session->param(postsignin => $q->query_string); cgi_signin($q, $session); cgi_savesession($session); exit; @@ -351,7 +363,8 @@ sub cgi_getsession ($) { { FileName => "$config{wikistatedir}/sessions.db" }) }; if (! $session || $@) { - error($@." ".CGI::Session->errstr()); + my $error = $@; + error($error." ".CGI::Session->errstr()); } umask($oldmask); diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index 5e22609c9..fbf88c627 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -58,21 +58,12 @@ sub getsetup () { safe => 1, rebuild => 0, }, - cookiejar => { - type => "string", - example => { file => "$ENV{HOME}/.ikiwiki/cookies" }, - safe => 0, # hooks into perl module internals - description => "cookie control", - }, } sub checkconfig () { if (! defined $config{aggregateinternal}) { $config{aggregateinternal}=1; } - if (! defined $config{cookiejar}) { - $config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" }; - } # This is done here rather than in a refresh hook because it # needs to run before the wiki is locked. @@ -113,8 +104,7 @@ sub launchaggregation () { my @feeds=needsaggregate(); return unless @feeds; if (! lockaggregate()) { - debug("an aggregation process is already running"); - return; + error("an aggregation process is already running"); } # force a later rebuild of source pages $IkiWiki::forcerebuild{$_->{sourcepage}}=1 @@ -201,7 +191,7 @@ sub migrate_to_internal { if (-e $oldoutput) { require IkiWiki::Render; debug("removing output file $oldoutput"); - IkiWiki::prune($oldoutput); + IkiWiki::prune($oldoutput, $config{destdir}); } } @@ -523,11 +513,8 @@ sub aggregate (@) { } $feed->{feedurl}=pop @urls; } - my $res=URI::Fetch->fetch($feed->{feedurl}, - UserAgent => LWP::UserAgent->new( - cookie_jar => $config{cookiejar}, - ), - ); + my $ua=useragent(); + my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua); if (! $res) { $feed->{message}=URI::Fetch->errstr; $feed->{error}=1; @@ -566,7 +553,9 @@ sub aggregate (@) { }; } if ($@) { - $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)"; + # gettext can clobber $@ + my $error = $@; + $feed->{message}=gettext("feed crashed XML::Feed!")." ($error)"; $feed->{error}=1; debug($feed->{message}); next; @@ -594,6 +583,7 @@ sub aggregate (@) { feed => $feed, copyright => $f->copyright, title => defined $entry->title ? decode_entities($entry->title) : "untitled", + author => defined $entry->author ? decode_entities($entry->author) : "", link => $entry->link, content => (defined $c && defined $c->body) ? $c->body : "", guid => defined $entry->id ? $entry->id : time."_".$feed->{name}, @@ -640,12 +630,12 @@ sub add_page (@) { -e "$config{srcdir}/".htmlfn($page.$c)) { $c++ } + $page=$page.$c; $guid->{page}=$page; eval { write_page($feed, $guid, $mtime, \%params) }; if ($@) { # assume failure was due to a too long filename - # (or o $c=""; $page=$feed->{dir}."/item"; while (exists $IkiWiki::pagecase{lc $page.$c} || @@ -653,6 +643,7 @@ sub add_page (@) { -e "$config{srcdir}/".htmlfn($page.$c)) { $c++ } + $page=$page.$c; $guid->{page}=$page; write_page($feed, $guid, $mtime, \%params); @@ -686,11 +677,16 @@ sub write_page ($$$$$) { $template=template($feed->{template}, blind_cache => 1); }; if ($@) { - print STDERR gettext("failed to process template:")." $@"; + # gettext can clobber $@ + my $error = $@; + print STDERR gettext("failed to process template:")." $error"; return; } $template->param(title => $params{title}) if defined $params{title} && length($params{title}); + $template->param(author => $params{author}) + if defined $params{author} && length($params{author} + && $params{author} ne $feed->{name}); $template->param(content => wikiescape(htmlabs($params{content}, defined $params{base} ? $params{base} : $feed->{feedurl}))); $template->param(name => $feed->{name}); diff --git a/IkiWiki/Plugin/amazon_s3.pm b/IkiWiki/Plugin/amazon_s3.pm index cfd8cd347..a9da6bf12 100644 --- a/IkiWiki/Plugin/amazon_s3.pm +++ b/IkiWiki/Plugin/amazon_s3.pm @@ -232,8 +232,9 @@ sub writefile ($$$;$$) { } # This is a wrapper around the real prune. -sub prune ($) { +sub prune ($;$) { my $file=shift; + my $up_to=shift; my @keys=IkiWiki::Plugin::amazon_s3::file2keys($file); @@ -250,7 +251,7 @@ sub prune ($) { } } - return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file); + return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file, $up_to); } 1 diff --git a/IkiWiki/Plugin/attachment.pm b/IkiWiki/Plugin/attachment.pm index 5a180cd5c..d56dd18ad 100644 --- a/IkiWiki/Plugin/attachment.pm +++ b/IkiWiki/Plugin/attachment.pm @@ -148,7 +148,7 @@ sub formbuilder (@) { $f=Encode::decode_utf8($f); $f=~s/^$page\///; if (IkiWiki::isinlinableimage($f) && - UNIVERSAL::can("IkiWiki::Plugin::img", "import")) { + IkiWiki::Plugin::img->can("import")) { $add.='[[!img '.$f.' align="right" size="" alt=""]]'; } else { @@ -229,8 +229,10 @@ sub attachment_store { check_canattach($session, $final_filename, $tempfile); }; if ($@) { - json_response($q, $form, $dest."/".$filename, $@); - error $@; + # save error in case called functions clobber $@ + my $error = $@; + json_response($q, $form, $dest."/".$filename, $error); + error $error; } # Move the attachment into holding directory. @@ -274,19 +276,19 @@ sub attachments_save { foreach my $filename (glob("$dir/*")) { $filename=Encode::decode_utf8($filename); next unless -f $filename; - my $destdir=$config{srcdir}."/". - linkpage(IkiWiki::possibly_foolish_untaint( - attachment_location($form->field('page')))); + my $destdir=linkpage(IkiWiki::possibly_foolish_untaint( + attachment_location($form->field('page')))); + my $absdestdir=$config{srcdir}."/".$destdir; my $destfile=IkiWiki::basename($filename); - my $dest=$destdir.$destfile; + my $dest=$absdestdir.$destfile; unlink($dest); - IkiWiki::prep_writefile($destfile, $destdir); + IkiWiki::prep_writefile($destfile, $absdestdir); rename($filename, $dest); - push @attachments, $dest; + push @attachments, $destdir.$destfile; } return unless @attachments; require IkiWiki::Render; - IkiWiki::prune($dir); + IkiWiki::prune($dir, $config{wikistatedir}."/attachments"); # Check the attachments in and trigger a wiki refresh. if ($config{rcs}) { diff --git a/IkiWiki/Plugin/blogspam.pm b/IkiWiki/Plugin/blogspam.pm index d32c2f169..e48ed729f 100644 --- a/IkiWiki/Plugin/blogspam.pm +++ b/IkiWiki/Plugin/blogspam.pm @@ -53,6 +53,7 @@ sub checkconfig () { eval q{ use RPC::XML; use RPC::XML::Client; + $RPC::XML::ENCODING = 'utf-8'; }; error $@ if $@; } diff --git a/IkiWiki/Plugin/bzr.pm b/IkiWiki/Plugin/bzr.pm index 72552abcc..e2b102dee 100644 --- a/IkiWiki/Plugin/bzr.pm +++ b/IkiWiki/Plugin/bzr.pm @@ -195,7 +195,7 @@ sub rcs_add ($) { sub rcs_remove ($) { my ($file) = @_; - my @cmdline = ("bzr", "rm", "--force", "--quiet", "$config{srcdir}/$file"); + my @cmdline = ("bzr", "rm", "--quiet", "$config{srcdir}/$file"); if (system(@cmdline) != 0) { warn "'@cmdline' failed: $!"; } @@ -302,7 +302,7 @@ sub extract_timestamp (@) { open (my $out, "-|", @_); my @log = bzr_log($out); - if (length @log < 1) { + if (length(scalar(@log)) < 1) { return 0; } diff --git a/IkiWiki/Plugin/calendar.pm b/IkiWiki/Plugin/calendar.pm index d443198f6..682bfb6fb 100644 --- a/IkiWiki/Plugin/calendar.pm +++ b/IkiWiki/Plugin/calendar.pm @@ -86,8 +86,10 @@ sub format_month (@) { my $year = $date[5] + 1900; my $mtag = sprintf("%02d", $month); - # Only one posting per day is being linked to. - $linkcache{"$year/$mtag/$mday"} = $p; + if (! $linkcache{"$year/$mtag/$mday"}) { + $linkcache{"$year/$mtag/$mday"} = []; + } + push(@{$linkcache{"$year/$mtag/$mday"}}, $p); } my $pmonth = $params{month} - 1; @@ -221,11 +223,27 @@ EOF $tag='month-calendar-day-link'; } $calendar.=qq{\t\t}; - $calendar.=htmllink($params{page}, $params{destpage}, - $linkcache{$key}, - noimageinline => 1, - linktext => $day, - title => pagetitle(IkiWiki::basename($linkcache{$key}))); + $calendar.=qq{}; $calendar.=qq{\n}; } else { diff --git a/IkiWiki/Plugin/comments.pm b/IkiWiki/Plugin/comments.pm index 91a482ed6..98ae13810 100644 --- a/IkiWiki/Plugin/comments.pm +++ b/IkiWiki/Plugin/comments.pm @@ -35,6 +35,7 @@ sub import { # Load goto to fix up user page links for logged-in commenters IkiWiki::loadplugin("goto"); IkiWiki::loadplugin("inline"); + IkiWiki::loadplugin("transient"); } sub getsetup () { @@ -90,17 +91,31 @@ sub getsetup () { safe => 0, rebuild => 0, }, + comments_allowformats => { + type => 'string', + default => '', + example => 'mdwn txt', + description => 'Restrict formats for comments to (no restriction if empty)', + safe => 1, + rebuild => 0, + }, + } sub checkconfig () { $config{comments_commit} = 1 unless defined $config{comments_commit}; + if (! $config{comments_commit}) { + $config{only_committed_changes}=0; + } $config{comments_pagespec} = '' unless defined $config{comments_pagespec}; $config{comments_closed_pagespec} = '' unless defined $config{comments_closed_pagespec}; $config{comments_pagename} = 'comment_' unless defined $config{comments_pagename}; + $config{comments_allowformats} = '' + unless defined $config{comments_allowformats}; } sub htmlize { @@ -128,12 +143,18 @@ sub safeurl ($) { } } +sub isallowed ($) { + my $format = shift; + return ! $config{comments_allowformats} || $config{comments_allowformats} =~ /\b$format\b/; +} + sub preprocess { my %params = @_; my $page = $params{page}; my $format = $params{format}; - if (defined $format && ! exists $IkiWiki::hooks{htmlize}{$format}) { + if (defined $format && (! exists $IkiWiki::hooks{htmlize}{$format} || + ! isallowed($format))) { error(sprintf(gettext("unsupported page format %s"), $format)); } @@ -301,7 +322,8 @@ sub editcomment ($$) { my @buttons = (POST_COMMENT, PREVIEW, CANCEL); my $form = CGI::FormBuilder->new( - fields => [qw{do sid page subject editcontent type author url}], + fields => [qw{do sid page subject editcontent type author + email url subscribe anonsubscribe}], charset => 'utf-8', method => 'POST', required => [qw{editcontent}], @@ -331,7 +353,7 @@ sub editcomment ($$) { my @page_types; if (exists $IkiWiki::hooks{htmlize}) { - foreach my $key (grep { !/^_/ } keys %{$IkiWiki::hooks{htmlize}}) { + foreach my $key (grep { !/^_/ && isallowed($_) } keys %{$IkiWiki::hooks{htmlize}}) { push @page_types, [$key, $IkiWiki::hooks{htmlize}{$key}{longname} || $key]; } } @@ -346,18 +368,35 @@ sub editcomment ($$) { $form->field(name => "type", value => $type, force => 1, type => 'select', options => \@page_types); - $form->tmpl_param(username => $session->param('name')); + my $username=$session->param('name'); + $form->tmpl_param(username => $username); + + $form->field(name => "subscribe", type => 'hidden'); + $form->field(name => "anonsubscribe", type => 'hidden'); + if (IkiWiki::Plugin::notifyemail->can("subscribe")) { + if (defined $username) { + $form->field(name => "subscribe", type => "checkbox", + options => [gettext("email replies to me")]); + } + elsif (IkiWiki::Plugin::passwordauth->can("anonuser")) { + $form->field(name => "anonsubscribe", type => "checkbox", + options => [gettext("email replies to me")]); + } + } if ($config{comments_allowauthor} and ! defined $session->param('name')) { $form->tmpl_param(allowauthor => 1); $form->field(name => 'author', type => 'text', size => '40'); + $form->field(name => 'email', type => 'text', size => '40'); $form->field(name => 'url', type => 'text', size => '40'); } else { $form->tmpl_param(allowauthor => 0); $form->field(name => 'author', type => 'hidden', value => '', force => 1); + $form->field(name => 'email', type => 'hidden', value => '', + force => 1); $form->field(name => 'url', type => 'hidden', value => '', force => 1); } @@ -399,6 +438,16 @@ sub editcomment ($$) { $page)); } + # There's no UI to get here, but someone might construct the URL, + # leading to a comment that exists in the repository but isn't + # shown + if (!pagespec_match($page, $config{comments_pagespec}, + location => $page)) { + error(sprintf(gettext( + "comments on page '%s' are not allowed"), + $page)); + } + if (pagespec_match($page, $config{comments_closed_pagespec}, location => $page)) { error(sprintf(gettext( @@ -425,10 +474,7 @@ sub editcomment ($$) { $content .= " nickname=\"$nickname\"\n"; } elsif (defined $session->remote_addr()) { - my $ip = $session->remote_addr(); - if ($ip =~ m/^([.0-9]+)$/) { - $content .= " ip=\"$1\"\n"; - } + $content .= " ip=\"".$session->remote_addr()."\"\n"; } if ($config{comments_allowauthor}) { @@ -490,6 +536,20 @@ sub editcomment ($$) { if ($form->submitted eq POST_COMMENT && $form->validate) { IkiWiki::checksessionexpiry($cgi, $session); + + if (IkiWiki::Plugin::notifyemail->can("subscribe")) { + my $subspec="comment($page)"; + if (defined $username && + length $form->field("subscribe")) { + IkiWiki::Plugin::notifyemail::subscribe( + $username, $subspec); + } + elsif (length $form->field("email") && + length $form->field("anonsubscribe")) { + IkiWiki::Plugin::notifyemail::anonsubscribe( + $form->field("email"), $subspec); + } + } $postcomment=1; my $ok=IkiWiki::check_content(content => $form->field('editcontent'), @@ -506,8 +566,8 @@ sub editcomment ($$) { $postcomment=0; if (! $ok) { - $location=unique_comment_location($page, $content, $config{srcdir}, "._comment_pending"); - writefile("$location._comment_pending", $config{srcdir}, $content); + $location=unique_comment_location($page, $content, $IkiWiki::Plugin::transient::transientdir, "._comment_pending"); + writefile("$location._comment_pending", $IkiWiki::Plugin::transient::transientdir, $content); # Refresh so anything that deals with pending # comments can be updated. @@ -575,7 +635,8 @@ sub editcomment ($$) { sub getavatar ($) { my $user=shift; - + return undef unless defined $user; + my $avatar; eval q{use Libravatar::URL}; if (! $@) { @@ -631,10 +692,17 @@ sub commentmoderation ($$) { } my $page=IkiWiki::dirname($f); - my $file="$config{srcdir}/$f"; + my $filedir=$IkiWiki::Plugin::transient::transientdir; + my $file="$filedir/$f"; if (! -e $file) { # old location - $file="$config{wikistatedir}/comments_pending/".$f; + $file="$config{srcdir}/$f"; + $filedir=$config{srcdir}; + if (! -e $file) { + # older location + $file="$config{wikistatedir}/comments_pending/".$f; + $filedir="$config{wikistatedir}/comments_pending"; + } } if ($action eq 'Accept') { @@ -649,7 +717,7 @@ sub commentmoderation ($$) { } require IkiWiki::Render; - IkiWiki::prune($file); + IkiWiki::prune($file, $filedir); } } @@ -748,6 +816,8 @@ sub comments_pending () { chdir($origdir) || die "chdir $origdir: $!"; }; + $find_comments->($IkiWiki::Plugin::transient::transientdir, "._comment_pending"); + # old location $find_comments->($config{srcdir}, "._comment_pending"); # old location $find_comments->("$config{wikistatedir}/comments_pending/", diff --git a/IkiWiki/Plugin/conditional.pm b/IkiWiki/Plugin/conditional.pm index 026078b3c..b450f1a0a 100644 --- a/IkiWiki/Plugin/conditional.pm +++ b/IkiWiki/Plugin/conditional.pm @@ -4,7 +4,6 @@ package IkiWiki::Plugin::conditional; use warnings; use strict; use IkiWiki 3.00; -use UNIVERSAL; sub import { hook(type => "getsetup", id => "conditional", call => \&getsetup); @@ -34,11 +33,15 @@ sub preprocess_if (@) { # An optimisation to avoid needless looping over every page # for simple uses of some of the tests. $params{test} =~ /^([\s\!()]*((enabled|sourcepage|destpage|included)\([^)]*\)|(and|or))[\s\!()]*)+$/) { - add_depends($params{page}, "($params{test}) and $params{page}"); $result=pagespec_match($params{page}, $params{test}, location => $params{page}, sourcepage => $params{page}, destpage => $params{destpage}); + my $i = $result->influences; + foreach my $k (keys %$i) { + # minor optimization: influences are always simple dependencies + $IkiWiki::depends_simple{$params{page}}{lc $k} |= $i->{$k}; + } } else { $result=pagespec_match_list($params{page}, $params{test}, diff --git a/IkiWiki/Plugin/cvs.pm b/IkiWiki/Plugin/cvs.pm index 788f51167..841aec914 100644 --- a/IkiWiki/Plugin/cvs.pm +++ b/IkiWiki/Plugin/cvs.pm @@ -216,14 +216,12 @@ sub rcs_add ($) { while ($file = pop @files_to_add) { if (@files_to_add == 0) { - # file cvs_runcvs('add', cvs_keyword_subst_args($file)) || - warn("cvs add $file failed\n"); + warn("cvs add file $file failed\n"); } else { - # directory cvs_runcvs('add', $file) || - warn("cvs add $file failed\n"); + warn("cvs add dir $file failed\n"); } } } @@ -316,7 +314,9 @@ sub rcs_recentchanges ($) { $oldrev =~ s/INITIAL/0/; $newrev =~ s/\(DEAD\)//; my $diffurl = defined $config{diffurl} ? $config{diffurl} : ""; - my $epage = uri_escape_utf8($page); + my $epage = join('/', + map { uri_escape_utf8($_) } split('/', $page) + ); $diffurl=~s/\[\[file\]\]/$epage/g; $diffurl=~s/\[\[r1\]\]/$oldrev/g; $diffurl=~s/\[\[r2\]\]/$newrev/g; @@ -396,11 +396,15 @@ sub rcs_diff ($;$) { my @cvsps = `env TZ=UTC cvsps -q --cvs-direct -z 30 -g -s $rev`; my $blank_lines_seen = 0; + # skip log, get to the diff while (my $line = shift @cvsps) { $blank_lines_seen++ if ($line =~ /^$/); last if $blank_lines_seen == 2; } + @cvsps = @cvsps[0..$maxlines-1] + if defined $maxlines && @cvsps > $maxlines; + if (wantarray) { return @cvsps; } @@ -491,24 +495,53 @@ sub cvs_keyword_subst_args ($) { my $filemime = File::MimeInfo::default($file); # if (-T $file) { - if (defined($filemime) && $filemime eq 'text/plain') { - return ($file); - } - else { - return ('-kb', $file); - } + defined($filemime) && $filemime eq 'text/plain' + ? return ('-kkv', $file) + : return ('-kb', $file); } sub cvs_runcvs(@) { my @cmd = @_; unshift @cmd, 'cvs', '-Q'; - local $CWD = $config{srcdir}; + # CVS can't operate outside a srcdir, so we're always setting $CWD. + # "local $CWD" restores the previous value when we go out of scope. + # Usually that's correct. But if we're removing the last file from + # a directory, the post-commit hook will exec in a working directory + # that's about to not exist (CVS will prune it). + # + # chdir() manually here, so we can selectively not chdir() back. + + my $oldcwd = $CWD; + chdir($config{srcdir}); + + eval q{ + use IPC::Open3; + use Symbol qw(gensym); + use IO::File; + }; + error($@) if $@; + + my $cvsout = ''; + my $cvserr = ''; + local *CATCHERR = IO::File->new_tmpfile; + my $pid = open3(gensym(), \*CATCHOUT, ">&CATCHERR", @cmd); + while (my $l = ) { + $cvsout .= $l + unless 1; + } + waitpid($pid, 0); + my $ret = $? >> 8; + seek CATCHERR, 0, 0; + while (my $l = ) { + $cvserr .= $l + unless $l =~ /^cvs commit: changing keyword expansion /; + } + + print STDOUT $cvsout; + print STDERR $cvserr; - open(my $savedout, ">&STDOUT"); - open(STDOUT, ">", "/dev/null"); - my $ret = system(@cmd); - open(STDOUT, ">&", $savedout); + chdir($oldcwd) if -d $oldcwd; return ($ret == 0) ? 1 : 0; } diff --git a/IkiWiki/Plugin/editpage.pm b/IkiWiki/Plugin/editpage.pm index 54051c58c..3047869c4 100644 --- a/IkiWiki/Plugin/editpage.pm +++ b/IkiWiki/Plugin/editpage.pm @@ -39,7 +39,7 @@ sub refresh () { } if ($delete) { debug(sprintf(gettext("removing old preview %s"), $file)); - IkiWiki::prune("$config{destdir}/$file"); + IkiWiki::prune("$config{destdir}/$file", $config{destdir}); } } elsif (defined $mtime) { @@ -64,7 +64,8 @@ sub cgi_editpage ($$) { decode_cgi_utf8($q); - my @fields=qw(do rcsinfo subpage from page type editcontent editmessage); + my @fields=qw(do rcsinfo subpage from page type editcontent + editmessage subscribe); my @buttons=("Save Page", "Preview", "Cancel"); eval q{use CGI::FormBuilder}; error($@) if $@; @@ -157,6 +158,17 @@ sub cgi_editpage ($$) { noimageinline => 1, linktext => "FormattingHelp")); + my $cansubscribe=IkiWiki::Plugin::notifyemail->can("subscribe") + && IkiWiki::Plugin::comments->can("import") + && defined $session->param('name'); + if ($cansubscribe) { + $form->field(name => "subscribe", type => "checkbox", + options => [gettext("email comments to me")]); + } + else { + $form->field(name => "subscribe", type => 'hidden'); + } + my $previewing=0; if ($form->submitted eq "Cancel") { if ($form->field("do") eq "create" && defined $from) { @@ -388,10 +400,12 @@ sub cgi_editpage ($$) { eval { writefile($file, $config{srcdir}, $content) }; $config{cgi}=1; if ($@) { + # save $@ in case a called function clobbers it + my $error = $@; $form->field(name => "rcsinfo", value => rcs_prepedit($file), force => 1); my $mtemplate=template("editfailedsave.tmpl"); - $mtemplate->param(error_message => $@); + $mtemplate->param(error_message => $error); $form->tmpl_param("message", $mtemplate->output); $form->field("editcontent", value => $content, force => 1); $form->tmpl_param("page_select", 0); @@ -448,6 +462,12 @@ sub cgi_editpage ($$) { # caches and get the most recent version of the page. redirect($q, $baseurl."?updated"); } + + if ($cansubscribe && length $form->field("subscribe")) { + my $subspec="comment($page)"; + IkiWiki::Plugin::notifyemail::subscribe( + $session->param('name'), $subspec); + } } exit; diff --git a/IkiWiki/Plugin/edittemplate.pm b/IkiWiki/Plugin/edittemplate.pm index 061242fd8..c2a8da29f 100644 --- a/IkiWiki/Plugin/edittemplate.pm +++ b/IkiWiki/Plugin/edittemplate.pm @@ -130,13 +130,34 @@ sub filltemplate ($$) { $template=template("/".$template_page); }; if ($@) { + # gettext can clobber $@ + my $error = $@; # Indicate that the earlier preprocessor directive set # up a template that doesn't work. - return "[[!pagetemplate ".gettext("failed to process template:")." $@]]"; + return "[[!edittemplate ".gettext("failed to process template:")." $error]]"; } $template->param(name => $page); + if ($template->query(name => 'uuid')) { + my $uuid; + if (open(my $fh, "<", "/proc/sys/kernel/random/uuid")) { + $uuid = <$fh>; + chomp $uuid; + close $fh; + } + else { + eval { + require UUID::Tiny; + $uuid = UUID::Tiny::create_uuid_as_string(UUID::Tiny::UUID_V4()); + }; + } + $template->param(uuid => $uuid); + } + + my $time = time(); + $template->param(time => IkiWiki::date_3339($time)); + return $template->output; } diff --git a/IkiWiki/Plugin/filecheck.pm b/IkiWiki/Plugin/filecheck.pm index 4f4e67489..cdea5c706 100644 --- a/IkiWiki/Plugin/filecheck.pm +++ b/IkiWiki/Plugin/filecheck.pm @@ -48,7 +48,6 @@ sub getsetup () { plugin => { safe => 1, rebuild => undef, - section => "misc", }, } @@ -140,7 +139,7 @@ sub match_mimetype ($$;@) { my $mimeinfo_ok=! $@; my $mimetype; if ($mimeinfo_ok) { - my $mimetype=File::MimeInfo::Magic::magic($file); + $mimetype=File::MimeInfo::Magic::magic($file); } # Fall back to using file, which has a more complete diff --git a/IkiWiki/Plugin/git.pm b/IkiWiki/Plugin/git.pm index 535cd5fe0..75b89e476 100644 --- a/IkiWiki/Plugin/git.pm +++ b/IkiWiki/Plugin/git.pm @@ -29,6 +29,8 @@ sub import { hook(type => "rcs", id => "rcs_receive", call => \&rcs_receive); hook(type => "rcs", id => "rcs_preprevert", call => \&rcs_preprevert); hook(type => "rcs", id => "rcs_revert", call => \&rcs_revert); + hook(type => "rcs", id => "rcs_find_changes", call => \&rcs_find_changes); + hook(type => "rcs", id => "rcs_get_current_rev", call => \&rcs_get_current_rev); } sub checkconfig () { @@ -341,8 +343,8 @@ sub parse_diff_tree ($) { my $dt_ref = shift; # End of stream? - return if !defined @{ $dt_ref } || - !defined @{ $dt_ref }[0] || !length @{ $dt_ref }[0]; + return if ! @{ $dt_ref } || + !defined $dt_ref->[0] || !length $dt_ref->[0]; my %ci; # Header line. @@ -462,20 +464,63 @@ sub git_commit_info ($;$) { return wantarray ? @ci : $ci[0]; } -sub git_sha1 (;$) { - # Return head sha1sum (of given file). - my $file = shift || q{--}; +sub rcs_find_changes ($) { + my $oldrev=shift; + # Note that git log will sometimes show files being added that + # don't exist. Particularly, git merge -s ours can result in a + # merge commit where some files were not really added. + # This is why the code below verifies that the files really + # exist. + my @raw_lines = run_or_die('git', 'log', + '--pretty=raw', '--raw', '--abbrev=40', '--always', '-c', + '--no-renames', , '--reverse', + '-r', "$oldrev..HEAD", '--', '.'); + + # Due to --reverse, we see changes in chronological order. + my %changed; + my %deleted; + my $nullsha = 0 x 40; + my $newrev=$oldrev; + while (my $ci = parse_diff_tree(\@raw_lines)) { + $newrev=$ci->{sha1}; + foreach my $i (@{$ci->{details}}) { + my $file=$i->{file}; + if ($i->{sha1_to} eq $nullsha) { + if (! -e "$config{srcdir}/$file") { + delete $changed{$file}; + $deleted{$file}=1; + } + } + else { + if (-e "$config{srcdir}/$file") { + delete $deleted{$file}; + $changed{$file}=1; + } + } + } + } + + return (\%changed, \%deleted, $newrev); +} + +sub git_sha1_file ($) { + my $file=shift; + git_sha1("--", $file); +} + +sub git_sha1 (@) { # Ignore error since a non-existing file might be given. my ($sha1) = run_or_non('git', 'rev-list', '--max-count=1', 'HEAD', - '--', $file); - if ($sha1) { + '--', @_); + if (defined $sha1) { ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now } - else { - debug("Empty sha1sum for '$file'."); - } - return defined $sha1 ? $sha1 : q{}; + return defined $sha1 ? $sha1 : ''; +} + +sub rcs_get_current_rev () { + git_sha1(); } sub rcs_update () { @@ -491,7 +536,7 @@ sub rcs_prepedit ($) { # This will be later used in rcs_commit if a merge is required. my ($file) = @_; - return git_sha1($file); + return git_sha1_file($file); } sub rcs_commit (@) { @@ -502,7 +547,7 @@ sub rcs_commit (@) { # Check to see if the page has been changed by someone else since # rcs_prepedit was called. - my $cur = git_sha1($params{file}); + my $cur = git_sha1_file($params{file}); my ($prev) = $params{token} =~ /^($sha1_pattern)$/; # untaint if (defined $cur && defined $prev && $cur ne $prev) { @@ -553,7 +598,13 @@ sub rcs_commit_helper (@) { # Force git to allow empty commit messages. # (If this version of git supports it.) my ($version)=`git --version` =~ /git version (.*)/; - if ($version ge "1.5.4") { + if ($version ge "1.7.8") { + push @opts, "--allow-empty-message", "--no-edit"; + } + if ($version ge "1.7.2") { + push @opts, "--allow-empty-message"; + } + elsif ($version ge "1.5.4") { push @opts, '--cleanup=verbatim'; } else { @@ -567,7 +618,7 @@ sub rcs_commit_helper (@) { # So we should ignore its exit status (hence run_or_non). if (run_or_non('git', 'commit', '-m', $params{message}, '-q', @opts)) { if (length $config{gitorigin_branch}) { - run_or_cry('git', 'push', $config{gitorigin_branch}); + run_or_cry('git', 'push', $config{gitorigin_branch}, $config{gitmaster_branch}); } } @@ -618,7 +669,9 @@ sub rcs_recentchanges ($) { my @pages; foreach my $detail (@{ $ci->{'details'} }) { my $file = $detail->{'file'}; - my $efile = uri_escape_utf8($file); + my $efile = join('/', + map { uri_escape_utf8($_) } split('/', $file) + ); my $diffurl = defined $config{'diffurl'} ? $config{'diffurl'} : ""; $diffurl =~ s/\[\[file\]\]/$efile/go; diff --git a/IkiWiki/Plugin/graphviz.pm b/IkiWiki/Plugin/graphviz.pm index b9f997e04..d4018edaa 100644 --- a/IkiWiki/Plugin/graphviz.pm +++ b/IkiWiki/Plugin/graphviz.pm @@ -132,6 +132,7 @@ sub graph (@) { }, "text"); $p->parse($src); $p->eof; + $s=~s/\[ href= \]//g; # handle self-links $params{src}=$s; } else { diff --git a/IkiWiki/Plugin/highlight.pm b/IkiWiki/Plugin/highlight.pm index 4e86207f1..ce919748a 100644 --- a/IkiWiki/Plugin/highlight.pm +++ b/IkiWiki/Plugin/highlight.pm @@ -60,14 +60,22 @@ sub checkconfig () { } if (! exists $config{filetypes_conf}) { - $config{filetypes_conf}= - ($data_dir ? $data_dir->getConfDir() : "/etc/highlight/") - . "filetypes.conf"; + if (! $data_dir ) { + $config{filetypes_conf}= "/etc/highlight/filetypes.conf"; + } elsif ( $data_dir -> can('searchFile') ) { + # 3.18 + + $config{filetypes_conf}= + $data_dir -> searchFile("filetypes.conf"); + } else { + # 3.9 + + $config{filetypes_conf}= + $data_dir -> getConfDir() . "/filetypes.conf"; + } } + # note that this is only used for old versions of highlight + # where $data_dir will not be defined. if (! exists $config{langdefdir}) { - $config{langdefdir}= - ($data_dir ? $data_dir->getLangPath("") - : "/usr/share/highlight/langDefs"); + $config{langdefdir}= "/usr/share/highlight/langDefs"; } if (exists $config{tohighlight} && read_filetypes()) { @@ -89,7 +97,7 @@ sub checkconfig () { id => $file, call => sub { my %params=@_; - highlight($langfile, $params{content}); + highlight($langfile, $file, $params{content}); }, longname => sprintf(gettext("Source code: %s"), $file), @opts, @@ -106,7 +114,7 @@ sub htmlizeformat { return; } - return Encode::decode_utf8(highlight($langfile, shift)); + return Encode::decode_utf8(highlight($langfile, $format, shift)); } my %ext2lang; @@ -147,17 +155,27 @@ sub read_filetypes () { } +sub searchlangdef { + my $lang=shift; + + if ($data_dir) { + return $data_dir->getLangPath($lang . ".lang"); + } else { + return "$config{langdefdir}/$lang.lang"; + } + +} # Given a filename extension, determines the language definition to # use to highlight it. sub ext2langfile ($) { my $ext=shift; - my $langfile="$config{langdefdir}/$ext.lang"; + my $langfile=searchlangdef($ext); return $langfile if exists $highlighters{$langfile}; read_filetypes() unless $filetypes_read; if (exists $ext2lang{$ext}) { - return "$config{langdefdir}/$ext2lang{$ext}.lang"; + return searchlangdef($ext2lang{$ext}); } # If a language only has one common extension, it will not # be listed in filetypes, so check the langfile. @@ -172,6 +190,7 @@ sub ext2langfile ($) { # Interface to the highlight C library. sub highlight ($$) { my $langfile=shift; + my $extorfile=shift; my $input=shift; eval q{use highlight}; @@ -200,7 +219,7 @@ sub highlight ($$) { $gen=$highlighters{$langfile}; } - return $gen->generateString($input); + return "
".$gen->generateString($input)."
"; } 1 diff --git a/IkiWiki/Plugin/htmlscrubber.pm b/IkiWiki/Plugin/htmlscrubber.pm index a58a27d52..36c012c73 100644 --- a/IkiWiki/Plugin/htmlscrubber.pm +++ b/IkiWiki/Plugin/htmlscrubber.pm @@ -29,6 +29,7 @@ sub import { "irc", "ircs", "lastfm", "ldaps", "magnet", "mms", "msnim", "notes", "rsync", "secondlife", "skype", "ssh", "sftp", "smb", "sms", "snews", "webcal", "ymsgr", + "bitcoin", "git", "svn", "bzr", "darcs", "hg" ); # data is a special case. Allow a few data:image/ types, # but disallow data:text/javascript and everything else. diff --git a/IkiWiki/Plugin/httpauth.pm b/IkiWiki/Plugin/httpauth.pm index cb488449d..76d574b2a 100644 --- a/IkiWiki/Plugin/httpauth.pm +++ b/IkiWiki/Plugin/httpauth.pm @@ -7,6 +7,7 @@ use strict; use IkiWiki 3.00; sub import { + hook(type => "checkconfig", id => "httpauth", call => \&checkconfig); hook(type => "getsetup", id => "httpauth", call => \&getsetup); hook(type => "auth", id => "httpauth", call => \&auth); hook(type => "formbuilder_setup", id => "httpauth", @@ -37,6 +38,19 @@ sub getsetup () { rebuild => 0, }, } + +sub checkconfig () { + if ($config{cgi} && defined $config{cgiauthurl} && + keys %{$IkiWiki::hooks{auth}} < 2) { + # There are no other auth hooks registered, so avoid + # the normal signin form, and jump right to httpauth. + require IkiWiki::CGI; + inject(name => "IkiWiki::cgi_signin", call => sub ($$) { + my $cgi=shift; + redir_cgiauthurl($cgi, $cgi->query_string()); + }); + } +} sub redir_cgiauthurl ($;@) { my $cgi=shift; diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm index bdab5793b..f578526cc 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm @@ -19,14 +19,14 @@ sub import { hook(type => "checkconfig", id => "inline", call => \&checkconfig); hook(type => "sessioncgi", id => "inline", call => \&sessioncgi); hook(type => "preprocess", id => "inline", - call => \&IkiWiki::preprocess_inline); + call => \&IkiWiki::preprocess_inline, scan => 1); hook(type => "pagetemplate", id => "inline", call => \&IkiWiki::pagetemplate_inline); hook(type => "format", id => "inline", call => \&format, first => 1); # Hook to change to do pinging since it's called late. # This ensures each page only pings once and prevents slow # pings interrupting page builds. - hook(type => "change", id => "inline", call => \&IkiWiki::pingurl); + hook(type => "rendered", id => "inline", call => \&IkiWiki::pingurl); } sub getopt () { @@ -155,6 +155,23 @@ sub preprocess_inline (@) { if (! exists $params{pages} && ! exists $params{pagenames}) { error gettext("missing pages parameter"); } + + if (! defined wantarray) { + # Running in scan mode: only do the essentials + + if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) { + # default to sorting age, the same as inline itself, + # but let the params override that + IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params); + } + + return; + } + + if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) { + scalar IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params); + } + my $raw=yesno($params{raw}); my $archive=yesno($params{archive}); my $rss=(($config{rss} || $config{allowrss}) && exists $params{rss}) ? yesno($params{rss}) : $config{rss}; @@ -169,6 +186,13 @@ sub preprocess_inline (@) { if (! exists $params{feedshow} && exists $params{show}) { $params{feedshow}=$params{show}; } + my $title; + if (exists $params{title}) { + $title = $params{title}; + } + else { + $title = $params{page} ne "index" ? pagetitle($params{page}) : $config{wikiname}; + } my $desc; if (exists $params{description}) { $desc = $params{description} @@ -194,8 +218,7 @@ sub preprocess_inline (@) { } } - @list = map { bestlink($params{page}, $_) } - split ' ', $params{pagenames}; + @list = split ' ', $params{pagenames}; if (yesno($params{reverse})) { @list=reverse(@list); @@ -204,6 +227,8 @@ sub preprocess_inline (@) { foreach my $p (@list) { add_depends($params{page}, $p, deptype($quick ? "presence" : "content")); } + + @list = grep { exists $pagesources{$_} } @list; } else { my $num=0; @@ -304,8 +329,12 @@ sub preprocess_inline (@) { my $ret=""; - if (length $config{cgiurl} && ! $params{preview} && (exists $params{rootpage} || - (exists $params{postform} && yesno($params{postform}))) && + my $postform = (exists $params{rootpage}); + if (exists $params{postform}) { + $postform = yesno($params{postform}); + } + + if (length $config{cgiurl} && ! $params{preview} && $postform && IkiWiki->can("cgi_editpage")) { # Add a blog post form, with feed buttons. my $formtemplate=template_depends("blogpost.tmpl", $params{page}, blind_cache => 1); @@ -366,7 +395,9 @@ sub preprocess_inline (@) { blind_cache => 1); }; if ($@) { - error sprintf(gettext("failed to process template %s"), $params{template}.".tmpl").": $@"; + # gettext can clobber $@ + my $error = $@; + error sprintf(gettext("failed to process template %s"), $params{template}.".tmpl").": $error"; } } my $needcontent=$raw || (!($archive && $quick) && $template->query(name => 'content')); @@ -447,7 +478,7 @@ sub preprocess_inline (@) { if (! $params{preview}) { writefile($rssp, $config{destdir}, genfeed("rss", - $config{url}."/".$rssp, $desc, $params{guid}, $params{page}, @feedlist)); + $config{url}."/".$rssp, $title, $desc, $params{guid}, $params{page}, @feedlist)); $toping{$params{destpage}}=1 unless $config{rebuild}; $feedlinks{$params{destpage}}.=qq{}; } @@ -457,7 +488,7 @@ sub preprocess_inline (@) { will_render($params{destpage}, $atomp); if (! $params{preview}) { writefile($atomp, $config{destdir}, - genfeed("atom", $config{url}."/".$atomp, $desc, $params{guid}, $params{page}, @feedlist)); + genfeed("atom", $config{url}."/".$atomp, $title, $desc, $params{guid}, $params{page}, @feedlist)); $toping{$params{destpage}}=1 unless $config{rebuild}; $feedlinks{$params{destpage}}.=qq{}; } @@ -593,9 +624,30 @@ sub absolute_urls ($$) { return $ret; } +sub genenclosure { + my $itemtemplate=shift; + my $url=shift; + my $file=shift; + + return unless $itemtemplate->query(name => "enclosure"); + + my $size=(srcfile_stat($file))[8]; + my $mime="unknown"; + eval q{use File::MimeInfo}; + if (! $@) { + $mime = mimetype($file); + } + $itemtemplate->param( + enclosure => $url, + type => $mime, + length => $size, + ); +} + sub genfeed ($$$$$@) { my $feedtype=shift; my $feedurl=shift; + my $feedtitle=shift; my $feeddesc=shift; my $guid=shift; my $page=shift; @@ -609,6 +661,7 @@ sub genfeed ($$$$$@) { foreach my $p (@pages) { my $u=URI->new(encode_utf8(urlto($p, "", 1))); my $pcontent = absolute_urls(get_inline_content($p, $page), $url); + my $fancy_enclosure_seen = 0; $itemtemplate->param( title => pagetitle(basename($p)), @@ -630,32 +683,23 @@ sub genfeed ($$$$$@) { $itemtemplate->param(mdate_822 => date_822($pagestate{$p}{meta}{updated})); $itemtemplate->param(mdate_3339 => date_3339($pagestate{$p}{meta}{updated})); } - } - if ($itemtemplate->query(name => "enclosure")) { - my $file=$pagesources{$p}; - my $type=pagetype($file); - if (defined $type) { - $itemtemplate->param(content => $pcontent); - } - else { - my $size=(srcfile_stat($file))[8]; - my $mime="unknown"; - eval q{use File::MimeInfo}; - if (! $@) { - $mime = mimetype($file); - } - $itemtemplate->param( - enclosure => $u, - type => $mime, - length => $size, - ); + if (exists $pagestate{$p}{meta}{enclosure}) { + my $absurl = $pagestate{$p}{meta}{enclosure}; + my $file = $pagestate{$p}{meta}{enclosurefile}; + genenclosure($itemtemplate, $absurl, $file); + $fancy_enclosure_seen = 1; } } - else { - $itemtemplate->param(content => $pcontent); + + my $file=$pagesources{$p}; + unless ($fancy_enclosure_seen || defined(pagetype($file))) { + genenclosure($itemtemplate, $u, $file); + $itemtemplate->param(simplepodcast => 1); } + $itemtemplate->param(content => $pcontent); + run_hooks(pagetemplate => sub { shift->(page => $p, destpage => $page, template => $itemtemplate); @@ -669,13 +713,14 @@ sub genfeed ($$$$$@) { my $template=template_depends($feedtype."page.tmpl", $page, blind_cache => 1); $template->param( - title => $page ne "index" ? pagetitle($page) : $config{wikiname}, + title => $feedtitle, wikiname => $config{wikiname}, pageurl => $url, content => $content, feeddesc => $feeddesc, guid => $guid, feeddate => date_3339($lasttime), + feeddate_822 => date_822($lasttime), feedurl => $feedurl, ); run_hooks(pagetemplate => sub { diff --git a/IkiWiki/Plugin/link.pm b/IkiWiki/Plugin/link.pm index ef01f1107..1ba28eafd 100644 --- a/IkiWiki/Plugin/link.pm +++ b/IkiWiki/Plugin/link.pm @@ -144,9 +144,9 @@ sub renamepage (@) { my $old=$params{oldpage}; my $new=$params{newpage}; - $params{content} =~ s{(?\n"; - if ($indent > 0) { - $map .= "\n"; + $map .= ($spaces x $indent) . "\n"; + if ($indent > 1) { + $map .= ($spaces x $indent) . "\n"; } + $indent--; } while ($depth < $indent) { - $indent--; - $map .= "\n"; - if ($indent > 0) { - $map .= "\n"; + $map .= ($spaces x $indent) . "\n"; + if ($indent > 1) { + $map .= ($spaces x $indent) . "\n"; } + $indent--; } my @bits=split("/", $item); my $p=""; - $indent++ unless length $parent; $p.="/".shift(@bits) for 1..$indent; while ($depth > $indent) { - if (@bits && !(length $parent && "/$parent" eq $p)) { + $indent++; + if ($indent > 1) { + $map .= ($spaces x $indent) . "
    \n"; + } + if ($depth > $indent) { + $p.="/".shift(@bits); $addparent=$p; $addparent=~s/^\///; - $map .= "
  • " + $map .= ($spaces x $indent) . "
  • \n"; + $map .= ($spaces x $indent) .htmllink($params{page}, $params{destpage}, "/".$common_prefix.$p, class => "mapparent", noimageinline => 1) @@ -130,14 +137,10 @@ sub preprocess (@) { else { $openli=0; } - $indent++; - $p.="/".shift(@bits) if @bits; - if ($indent > 1) { - $map .= "
      \n"; - } } - $map .= "\n" if $openli; - $map .= "
    • " + $map .= ($spaces x $indent) . "
    • \n" if $openli; + $map .= ($spaces x $indent) . "
    • \n"; + $map .= ($spaces x $indent) .htmllink($params{page}, $params{destpage}, "/".$common_prefix."/".$item, @linktext, @@ -147,9 +150,12 @@ sub preprocess (@) { $parent=$item; } while ($indent > 0) { + $map .= ($spaces x $indent) . "
    • \n"; $indent--; - $map .= "\n
    \n"; + $map .= ($spaces x $indent) . "
\n"; } + $map =~ s{\n *\n *
    \n}{\n}gs; + $map =~ s{}{}g; $map .= "\n"; return $map; } diff --git a/IkiWiki/Plugin/mdwn.pm b/IkiWiki/Plugin/mdwn.pm index 430194bff..014e78eea 100644 --- a/IkiWiki/Plugin/mdwn.pm +++ b/IkiWiki/Plugin/mdwn.pm @@ -92,8 +92,9 @@ sub htmlize (@) { $markdown_sub=\&Markdown::Markdown; } else { + my $error = $@; do "/usr/bin/markdown" || - error(sprintf(gettext("failed to load Markdown.pm perl module (%s) or /usr/bin/markdown (%s)"), $@, $!)); + error(sprintf(gettext("failed to load Markdown.pm perl module (%s) or /usr/bin/markdown (%s)"), $error, $!)); $markdown_sub=\&Markdown::Markdown; } } diff --git a/IkiWiki/Plugin/meta.pm b/IkiWiki/Plugin/meta.pm index 220fff9dc..e7b96bdf1 100644 --- a/IkiWiki/Plugin/meta.pm +++ b/IkiWiki/Plugin/meta.pm @@ -107,12 +107,12 @@ sub preprocess (@) { # fallthrough } elsif ($key eq 'license') { - push @{$metaheaders{$page}}, ''; + push @{$metaheaders{$page}}, ''; $pagestate{$page}{meta}{license}=$value; return ""; } elsif ($key eq 'copyright') { - push @{$metaheaders{$page}}, ''; + push @{$metaheaders{$page}}, ''; $pagestate{$page}{meta}{copyright}=$value; return ""; } @@ -121,6 +121,18 @@ sub preprocess (@) { add_link($page, $value); return ""; } + elsif ($key eq 'enclosure') { + my $link=bestlink($page, $value); + if (! length $link) { + error gettext("enclosure not found") + } + add_depends($page, $link, deptype("presence")); + + $value=urlto($link, $page, 1); + $pagestate{$page}{meta}{enclosure}=$value; + $pagestate{$page}{meta}{enclosurefile}=$link; + # fallthrough + } elsif ($key eq 'author') { $pagestate{$page}{meta}{author}=$value; if (exists $params{sortas}) { @@ -275,17 +287,23 @@ sub preprocess (@) { push @{$metaheaders{$page}}, ''; } - elsif ($key eq 'description') { - push @{$metaheaders{$page}}, ''; } elsif ($key eq 'name') { - push @{$metaheaders{$page}}, scrub('', $page, $destpage); } + elsif ($key eq 'keywords') { + # Make sure the keyword string is safe: only allow alphanumeric + # characters, space and comma and strip the rest. + $value =~ s/[^[:alnum:], ]+//g; + push @{$metaheaders{$page}}, ''; + } else { push @{$metaheaders{$page}}, scrub(' 1, + rebuild => 1, + }, +} + +sub checkconfig () { + if (! defined $config{mirrorlist_use_cgi}) { + $config{mirrorlist_use_cgi}=0; + } } sub pagetemplate (@) { @@ -46,7 +59,9 @@ sub mirrorlist ($) { join(", ", map { qq{{$_}."?do=goto&page=$page" : + $config{mirrorlist}->{$_}."/".urlto($page, "") ). qq{">$_} } keys %{$config{mirrorlist}} ). diff --git a/IkiWiki/Plugin/notifyemail.pm b/IkiWiki/Plugin/notifyemail.pm new file mode 100644 index 000000000..b50a22a00 --- /dev/null +++ b/IkiWiki/Plugin/notifyemail.pm @@ -0,0 +1,169 @@ +#!/usr/bin/perl +package IkiWiki::Plugin::notifyemail; + +use warnings; +use strict; +use IkiWiki 3.00; + +sub import { + hook(type => "formbuilder", id => "notifyemail", call => \&formbuilder); + hook(type => "getsetup", id => "notifyemail", call => \&getsetup); + hook(type => "changes", id => "notifyemail", call => \¬ify); +} + +sub getsetup () { + return + plugin => { + safe => 1, + rebuild => 0, + }, +} + +sub formbuilder (@) { + my %params=@_; + my $form=$params{form}; + return unless $form->title eq "preferences"; + my $session=$params{session}; + my $username=$session->param("name"); + $form->field(name => "subscriptions", size => 50, + fieldset => "preferences", + comment => "(".htmllink("", "", "ikiwiki/PageSpec", noimageinline => 1).")"); + if (! $form->submitted) { + $form->field(name => "subscriptions", force => 1, + value => getsubscriptions($username)); + } + elsif ($form->submitted eq "Save Preferences" && $form->validate && + defined $form->field("subscriptions")) { + setsubscriptions($username, $form->field('subscriptions')); + } +} + +sub getsubscriptions ($) { + my $user=shift; + eval q{use IkiWiki::UserInfo}; + error $@ if $@; + IkiWiki::userinfo_get($user, "subscriptions"); +} + +sub setsubscriptions ($$) { + my $user=shift; + my $subscriptions=shift; + eval q{use IkiWiki::UserInfo}; + error $@ if $@; + IkiWiki::userinfo_set($user, "subscriptions", $subscriptions); +} + +# Called by other plugins to subscribe the user to a pagespec. +sub subscribe ($$) { + my $user=shift; + my $addpagespec=shift; + my $pagespec=getsubscriptions($user); + setsubscriptions($user, + length $pagespec ? $pagespec." or ".$addpagespec : $addpagespec); +} + +# Called by other plugins to subscribe an email to a pagespec. +sub anonsubscribe ($$) { + my $email=shift; + my $addpagespec=shift; + if (IkiWiki::Plugin::passwordauth->can("anonuser")) { + my $user=IkiWiki::Plugin::passwordauth::anonuser($email); + if (! defined $user) { + error(gettext("Cannot subscribe your email address without logging in.")); + } + subscribe($user, $addpagespec); + } +} + +sub notify (@) { + my @files=@_; + return unless @files; + return if $config{rebuild}; + + eval q{use Mail::Sendmail}; + error $@ if $@; + eval q{use IkiWiki::UserInfo}; + error $@ if $@; + eval q{use URI}; + error($@) if $@; + + # Daemonize, in case the mail sending takes a while. + defined(my $pid = fork) or error("Can't fork: $!"); + return if $pid; # parent + chdir '/'; + open STDIN, '/dev/null'; + open STDOUT, '>/dev/null'; + POSIX::setsid() or error("Can't start a new session: $!"); + open STDERR, '>&STDOUT' or error("Can't dup stdout: $!"); + + # Don't need to keep a lock on the wiki as a daemon. + IkiWiki::unlockwiki(); + + my $userinfo=IkiWiki::userinfo_retrieve(); + exit 0 unless defined $userinfo; + + foreach my $user (keys %$userinfo) { + my $pagespec=$userinfo->{$user}->{"subscriptions"}; + next unless defined $pagespec && length $pagespec; + my $email=$userinfo->{$user}->{email}; + next unless defined $email && length $email; + + foreach my $file (@files) { + my $page=pagename($file); + next unless pagespec_match($page, $pagespec); + my $content=""; + my $showcontent=defined pagetype($file); + if ($showcontent) { + $content=eval { readfile(srcfile($file)) }; + $showcontent=0 if $@; + } + my $url; + if (! IkiWiki::isinternal($page)) { + $url=urlto($page, undef, 1); + } + elsif (defined $pagestate{$page}{meta}{permalink}) { + # need to use permalink for an internal page + $url=URI->new_abs($pagestate{$page}{meta}{permalink}, $config{url}); + } + else { + $url=$config{url}; # crummy fallback url + } + my $pagedesc=$page; + if (defined $pagestate{$page}{meta}{title} && + length $pagestate{$page}{meta}{title}) { + $pagedesc=qq{"$pagestate{$page}{meta}{title}"}; + } + my $subject=gettext("change notification:")." ".$pagedesc; + if (pagetype($file) eq '_comment') { + $subject=gettext("comment notification:")." ".$pagedesc; + } + my $prefsurl=IkiWiki::cgiurl_abs(do => 'prefs'); + if (IkiWiki::Plugin::passwordauth->can("anonusertoken")) { + my $token=IkiWiki::Plugin::passwordauth::anonusertoken($userinfo->{$user}); + $prefsurl=IkiWiki::cgiurl_abs( + do => 'tokenauth', + name => $user, + token => $token, + ) if defined $token; + } + my $template=template("notifyemail.tmpl"); + $template->param( + wikiname => $config{wikiname}, + url => $url, + prefsurl => $prefsurl, + showcontent => $showcontent, + content => $content, + ); + sendmail( + To => $email, + From => "$config{wikiname} <$config{adminemail}>", + Subject => $subject, + Message => $template->output, + ); + } + } + + exit 0; # daemon child +} + +1 diff --git a/IkiWiki/Plugin/opendiscussion.pm b/IkiWiki/Plugin/opendiscussion.pm index 2805f60ef..808d3cd2b 100644 --- a/IkiWiki/Plugin/opendiscussion.pm +++ b/IkiWiki/Plugin/opendiscussion.pm @@ -25,7 +25,7 @@ sub canedit ($$) { my $cgi=shift; my $session=shift; - return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i; + return "" if $config{discussion} && $page=~/(\/|^)\Q$config{discussionpage}\E$/i; return "" if pagespec_match($page, "postcomment(*)"); return undef; } diff --git a/IkiWiki/Plugin/openid.pm b/IkiWiki/Plugin/openid.pm index b6642619a..3b96e4b8e 100644 --- a/IkiWiki/Plugin/openid.pm +++ b/IkiWiki/Plugin/openid.pm @@ -100,9 +100,10 @@ sub formbuilder_setup (@) { IkiWiki::openiduser($session->param("name"))) { $form->field(name => "openid_identifier", disabled => 1, label => htmllink("", "", "ikiwiki/OpenID", noimageinline => 1), - value => $session->param("name"), - size => length($session->param("name")), force => 1, - fieldset => "login"); + value => "", + size => 1, force => 1, + fieldset => "login", + comment => $session->param("name")); $form->field(name => "email", type => "hidden"); } } @@ -155,8 +156,8 @@ sub validate ($$$;$) { $trust_root=$cgiurl if ! defined $trust_root; my $check_url = $claimed_identity->check_url( - return_to => "$cgiurl?do=postsignin", - trust_root => $trust_root, + return_to => auto_upgrade_https($q, "$cgiurl?do=postsignin"), + trust_root => auto_upgrade_https($q, $trust_root), delayed_return => 1, ); # Redirect the user to the OpenID server, which will @@ -237,10 +238,10 @@ sub getobj ($$) { my $ua; eval q{use LWPx::ParanoidAgent}; if (! $@) { - $ua=LWPx::ParanoidAgent->new; + $ua=LWPx::ParanoidAgent->new(agent => $config{useragent}); } else { - $ua=LWP::UserAgent->new; + $ua=useragent(); } # Store the secret in the session. @@ -257,10 +258,19 @@ sub getobj ($$) { ua => $ua, args => $q, consumer_secret => sub { return shift()+$secret }, - required_root => $cgiurl, + required_root => auto_upgrade_https($q, $cgiurl), ); } +sub auto_upgrade_https { + my $q=shift; + my $url=shift; + if ($q->https()) { + $url=~s/^http:/https:/i; + } + return $url; +} + sub load_openid_module { # Give up if module is unavailable to avoid needing to depend on it. eval q{use Net::OpenID::Consumer}; diff --git a/IkiWiki/Plugin/osm.pm b/IkiWiki/Plugin/osm.pm index be499495c..472e26945 100644 --- a/IkiWiki/Plugin/osm.pm +++ b/IkiWiki/Plugin/osm.pm @@ -60,37 +60,66 @@ sub getsetup () { safe => 0, rebuild => 1, }, + osm_openlayers_url => { + type => "string", + example => "http://www.openlayers.org/api/OpenLayers.js", + description => "Url for the OpenLayers.js file", + safe => 0, + rebuild => 1, + }, + osm_layers => { + type => "string", + example => { 'OSM', 'GoogleSatellite' }, + description => "Layers to use in the map. Can be either the 'OSM' string or a type option for Google maps (GoogleNormal, GoogleSatellite, GoogleHybrid or GooglePhysical). It can also be an arbitrary URL in a syntax acceptable for OpenLayers.Layer.OSM.url parameter.", + safe => 0, + rebuild => 1, + }, + osm_google_apikey => { + type => "string", + example => "", + description => "Google maps API key, Google layer not used if missing, see https://code.google.com/apis/console/ to get an API key", + safe => 1, + rebuild => 1, + }, +} + +sub register_rendered_files { + my $map = shift; + my $page = shift; + my $dest = shift; + + if ($page eq $dest) { + my %formats = get_formats(); + if ($formats{'GeoJSON'}) { + will_render($page, "$map/pois.json"); + } + if ($formats{'CSV'}) { + will_render($page, "$map/pois.txt"); + } + if ($formats{'KML'}) { + will_render($page, "$map/pois.kml"); + } + } } sub preprocess { my %params=@_; - my $page = $params{'page'}; - my $dest = $params{'destpage'}; - my $loc = $params{'loc'}; # sanitized below - my $lat = $params{'lat'}; # sanitized below - my $lon = $params{'lon'}; # sanitized below - my $href = $params{'href'}; + my $page = $params{page}; + my $dest = $params{destpage}; + my $loc = $params{loc}; # sanitized below + my $lat = $params{lat}; # sanitized below + my $lon = $params{lon}; # sanitized below + my $href = $params{href}; - my $fullscreen = defined($params{'fullscreen'}); # sanitized here my ($width, $height, $float); - if ($fullscreen) { - $height = '100%'; - $width = '100%'; - $float = 0; - } - else { - $height = scrub($params{'height'} || "300px", $page, $dest); # sanitized here - $width = scrub($params{'width'} || "500px", $page, $dest); # sanitized here - $float = (defined($params{'right'}) && 'right') || (defined($params{'left'}) && 'left'); # sanitized here - } + $height = scrub($params{'height'} || "300px", $page, $dest); # sanitized here + $width = scrub($params{'width'} || "500px", $page, $dest); # sanitized here + $float = (defined($params{'right'}) && 'right') || (defined($params{'left'}) && 'left'); # sanitized here + my $zoom = scrub($params{'zoom'} // $config{'osm_default_zoom'} // 15, $page, $dest); # sanitized below my $map; - if ($fullscreen) { - $map = $params{'map'} || $page; - } - else { - $map = $params{'map'} || 'map'; - } + $map = $params{'map'} || 'map'; + $map = scrub($map, $page, $dest); # sanitized here my $name = scrub($params{'name'} || $map, $page, $dest); @@ -101,16 +130,27 @@ sub preprocess { if ($zoom !~ /^\d\d?$/ || $zoom < 2 || $zoom > 18) { error("Bad zoom"); } + + if (! defined $href || ! length $href) { + $href=IkiWiki::cgiurl( + do => "osm", + map => $map, + ); + } + + register_rendered_files($map, $page, $dest); + $pagestate{$page}{'osm'}{$map}{'displays'}{$name} = { height => $height, width => $width, float => $float, zoom => $zoom, - fullscreen => $fullscreen, + fullscreen => 0, editable => defined($params{'editable'}), lat => $lat, lon => $lon, href => $href, + google_apikey => $config{'osm_google_apikey'}, }; return "
    "; } @@ -152,33 +192,9 @@ sub process_waypoint { } } $icon = urlto($icon, $dest, 1); + $icon =~ s!/*$!!; # hack - urlto shouldn't be appending a slash in the first place $tag = '' unless $tag; - if ($page eq $dest) { - if (!defined($config{'osm_format'}) || !$config{'osm_format'}) { - $config{'osm_format'} = 'KML'; - } - my %formats = map { $_ => 1 } split(/, */, $config{'osm_format'}); - if ($formats{'GeoJSON'}) { - will_render($page,$config{destdir} . "/$map/pois.json"); - } - if ($formats{'CSV'}) { - will_render($page,$config{destdir} . "/$map/pois.txt"); - } - if ($formats{'KML'}) { - will_render($page,$config{destdir} . "/$map/pois.kml"); - } - } - my $href = IkiWiki::cgiurl( - do => "osm", - map => $map, - lat => $lat, - lon => $lon, - zoom => $zoom, - ); - if (defined($destsources{htmlpage($map)})) { - $href = urlto($map,$page) . "?lat=$lat&lon=$lon&zoom=$zoom"; - $href =~ s!&!&!g; - } + register_rendered_files($map, $page, $dest); $pagestate{$page}{'osm'}{$map}{'waypoints'}{$name} = { page => $page, desc => $desc, @@ -186,18 +202,28 @@ sub process_waypoint { tag => $tag, lat => $lat, lon => $lon, - # how to link back to the page from the map, not to be + # How to link back to the page from the map, not to be # confused with the URL of the map itself sent to the - # embeded map below - href => urlto($page,$map), + # embeded map below. Note: used in generated KML etc file, + # so must be absolute. + href => urlto($page), }; + + my $mapurl = IkiWiki::cgiurl( + do => "osm", + map => $map, + lat => $lat, + lon => $lon, + zoom => $zoom, + ); my $output = ''; if (defined($params{'embed'})) { - $params{'href'} = $href; # propagate down to embeded - $output .= preprocess(%params); + $output .= preprocess(%params, + href => $mapurl, + ); } if (!$hidden) { - $output .= ""; + $output .= ""; } return $output; } @@ -292,10 +318,7 @@ sub savestate { } } - if (!defined($config{'osm_format'}) || !$config{'osm_format'}) { - $config{'osm_format'} = 'KML'; - } - my %formats = map { $_ => 1 } split(/, */, $config{'osm_format'}); + my %formats = get_formats(); if ($formats{'GeoJSON'}) { writejson(\%waypoints, \%linestrings); } @@ -335,59 +358,31 @@ sub writekml($;$) { eval q{use XML::Writer}; error $@ if $@; foreach my $map (keys %waypoints) { - -=pod -Sample placemark: - - - - - Simple placemark - Attached to the ground. Intelligently places itself - at the height of the underlying terrain. - - -122.0822035425683,37.42228990140251,0 - - - - -Sample style: - - - - - -=cut - my $output; my $writer = XML::Writer->new( OUTPUT => \$output, - DATA_MODE => 1, ENCODING => 'UTF-8'); + DATA_MODE => 1, DATA_INDENT => ' ', ENCODING => 'UTF-8'); $writer->xmlDecl(); $writer->startTag("kml", "xmlns" => "http://www.opengis.net/kml/2.2"); + $writer->startTag("Document"); # first pass: get the icons + my %tags_map = (); # keep track of tags seen foreach my $name (keys %{$waypoints{$map}}) { my %options = %{$waypoints{$map}{$name}}; - $writer->startTag("Style", id => $options{tag}); - $writer->startTag("IconStyle"); - $writer->startTag("Icon"); - $writer->startTag("href"); - $writer->characters($options{icon}); - $writer->endTag(); - $writer->endTag(); - $writer->endTag(); - $writer->endTag(); + if (!$tags_map{$options{tag}}) { + debug("found new style " . $options{tag}); + $tags_map{$options{tag}} = (); + $writer->startTag("Style", id => $options{tag}); + $writer->startTag("IconStyle"); + $writer->startTag("Icon"); + $writer->startTag("href"); + $writer->characters($options{icon}); + $writer->endTag(); + $writer->endTag(); + $writer->endTag(); + $writer->endTag(); + } + $tags_map{$options{tag}}{$name} = \%options; } foreach my $name (keys %{$waypoints{$map}}) { @@ -434,9 +429,10 @@ Sample style: $writer->endTag(); } $writer->endTag(); + $writer->endTag(); $writer->end(); - writefile("pois.kmp", $config{destdir} . "/$map", $output); + writefile("pois.kml", $config{destdir} . "/$map", $output); } } @@ -484,7 +480,7 @@ sub format (@) { return $params{content}; } -sub prefered_format() { +sub preferred_format() { if (!defined($config{'osm_format'}) || !$config{'osm_format'}) { $config{'osm_format'} = 'KML'; } @@ -492,24 +488,26 @@ sub prefered_format() { return shift @spl; } +sub get_formats() { + if (!defined($config{'osm_format'}) || !$config{'osm_format'}) { + $config{'osm_format'} = 'KML'; + } + map { $_ => 1 } split(/, */, $config{'osm_format'}); +} + sub include_javascript ($) { my $page=shift; my $loader; - eval q{use JSON}; - error $@ if $@; if (exists $pagestate{$page}{'osm'}) { foreach my $map (keys %{$pagestate{$page}{'osm'}}) { foreach my $name (keys %{$pagestate{$page}{'osm'}{$map}{'displays'}}) { - my %options = %{$pagestate{$page}{'osm'}{$map}{'displays'}{$name}}; - $options{'map'} = $map; - $options{'format'} = prefered_format(); - $loader .= "mapsetup(\"mapdiv-$name\", " . to_json(\%options) . ");\n"; + $loader .= map_setup_code($map, $name, %{$pagestate{$page}{'osm'}{$map}{'displays'}{$name}}); } } } if ($loader) { - return embed_map_code($page) . ""; + return embed_map_code($page) . ""; } else { return ''; @@ -521,6 +519,8 @@ sub cgi($) { return unless defined $cgi->param('do') && $cgi->param("do") eq "osm"; + + IkiWiki::loadindex(); IkiWiki::decode_cgi_utf8($cgi); @@ -534,7 +534,16 @@ sub cgi($) { print ""; print "
    "; print embed_map_code(); - print ""; + print ""; print ""; exit 0; @@ -542,9 +551,46 @@ sub cgi($) { sub embed_map_code(;$) { my $page=shift; - return ''. + my $olurl = $config{osm_openlayers_url} || "http://www.openlayers.org/api/OpenLayers.js"; + my $code = ''."\n". ''."\n"; + if ($config{'osm_google_apikey'}) { + $code .= ''; + } + return $code; +} + +sub map_setup_code($;@) { + my $map=shift; + my $name=shift; + my %options=@_; + + my $mapurl = $config{osm_map_url}; + + eval q{use JSON}; + error $@ if $@; + + $options{'format'} = preferred_format(); + + my %formats = get_formats(); + if ($formats{'GeoJSON'}) { + $options{'jsonurl'} = urlto($map."/pois.json"); + } + if ($formats{'CSV'}) { + $options{'csvurl'} = urlto($map."/pois.txt"); + } + if ($formats{'KML'}) { + $options{'kmlurl'} = urlto($map."/pois.kml"); + } + + if ($mapurl) { + $options{'mapurl'} = $mapurl; + } + $options{'layers'} = $config{osm_layers}; + + $name=~s/'//g; # $name comes from user input + return "mapsetup('mapdiv-$name', " . to_json(\%options) . ");"; } 1; diff --git a/IkiWiki/Plugin/passwordauth.pm b/IkiWiki/Plugin/passwordauth.pm index 35ebd961f..0cf2a26ea 100644 --- a/IkiWiki/Plugin/passwordauth.pm +++ b/IkiWiki/Plugin/passwordauth.pm @@ -96,6 +96,72 @@ sub setpassword ($$;$) { else { IkiWiki::userinfo_set($user, $field, $password); } + + # Setting the password clears any passwordless login token. + if ($field ne 'passwordless') { + IkiWiki::userinfo_set($user, "passwordless", ""); + } +} + +# Generates a token that can be used to log the user in. +# This needs to be hard to guess. Generating a cgi session id will +# make it as hard to guess as any cgi session. +sub gentoken ($$;$) { + my $user=shift; + my $tokenfield=shift; + my $reversable=shift; + + eval q{use CGI::Session}; + error($@) if $@; + my $token = CGI::Session->new->id; + if (! $reversable) { + setpassword($user, $token, $tokenfield); + } + else { + IkiWiki::userinfo_set($user, $tokenfield, $token); + } + return $token; +} + +# An anonymous user has no normal password, only a passwordless login +# token. Given an email address, this sets up such a user for that email, +# unless one already exists, and returns the username. +sub anonuser ($) { + my $email=shift; + + # Want a username for this email that won't overlap with any other. + my $user=$email; + $user=~s/@/_/g; + + my $userinfo=IkiWiki::userinfo_retrieve(); + if (! exists $userinfo->{$user} || ! ref $userinfo->{$user}) { + if (IkiWiki::userinfo_setall($user, { + 'email' => $email, + 'regdate' => time})) { + gentoken($user, "passwordless", 1); + return $user; + } + else { + error(gettext("Error creating account.")); + } + } + elsif (defined anonusertoken($userinfo->{$user})) { + return $user; + } + else { + return undef; + } +} + +sub anonusertoken ($) { + my $userhash=shift; + if (exists $userhash->{passwordless} && + length $userhash->{passwordless}) { + return $userhash->{passwordless}; + } + else { + return undef; + } } sub formbuilder_setup (@) { @@ -277,20 +343,13 @@ sub formbuilder (@) { if (! length $email) { error(gettext("No email address, so cannot email password reset instructions.")); } - - # Store a token that can be used once - # to log the user in. This needs to be hard - # to guess. Generating a cgi session id will - # make it as hard to guess as any cgi session. - eval q{use CGI::Session}; - error($@) if $@; - my $token = CGI::Session->new->id; - setpassword($user_name, $token, "resettoken"); + + my $token=gentoken($user_name, "resettoken"); my $template=template("passwordmail.tmpl"); $template->param( user_name => $user_name, - passwordurl => IkiWiki::cgiurl( + passwordurl => IkiWiki::cgiurl_abs( 'do' => "reset", 'name' => $user_name, 'token' => $token, @@ -329,7 +388,7 @@ sub formbuilder (@) { elsif ($form->title eq "preferences") { if ($form->submitted eq "Save Preferences" && $form->validate) { my $user_name=$form->field('name'); - if ($form->field("password") && length $form->field("password")) { + if (defined $form->field("password") && length $form->field("password")) { setpassword($user_name, $form->field('password')); } } @@ -356,6 +415,22 @@ sub sessioncgi ($$) { IkiWiki::cgi_prefs($q, $session); exit; } + elsif ($q->param('do') eq 'tokenauth') { + my $name=$q->param("name"); + my $token=$q->param("token"); + + if (! defined $name || ! defined $token || + ! length $name || ! length $token) { + error(gettext("incorrect url")); + } + if (! checkpassword($name, $token, "passwordless")) { + error(gettext("access denied")); + } + + $session->param("name", $name); + IkiWiki::cgi_prefs($q, $session); + exit; + } elsif ($q->param("do") eq "register") { # After registration, need to go somewhere, so show prefs page. $session->param(postsignin => "do=prefs"); diff --git a/IkiWiki/Plugin/pinger.pm b/IkiWiki/Plugin/pinger.pm index ea4f3e0dc..b2d54af8a 100644 --- a/IkiWiki/Plugin/pinger.pm +++ b/IkiWiki/Plugin/pinger.pm @@ -13,7 +13,7 @@ sub import { hook(type => "needsbuild", id => "pinger", call => \&needsbuild); hook(type => "preprocess", id => "ping", call => \&preprocess); hook(type => "delete", id => "pinger", call => \&ping); - hook(type => "change", id => "pinger", call => \&ping); + hook(type => "rendered", id => "pinger", call => \&ping); } sub getsetup () { @@ -72,7 +72,7 @@ sub ping { my $ua; eval q{use LWPx::ParanoidAgent}; if (!$@) { - $ua=LWPx::ParanoidAgent->new; + $ua=LWPx::ParanoidAgent->new(agent => $config{useragent}); } else { eval q{use LWP}; @@ -80,7 +80,7 @@ sub ping { debug(gettext("LWP not found, not pinging")); return; } - $ua=LWP::UserAgent->new; + $ua=useragent(); } $ua->timeout($config{pinger_timeout} || 15); diff --git a/IkiWiki/Plugin/po.pm b/IkiWiki/Plugin/po.pm index 6410a1c66..6107a4a22 100644 --- a/IkiWiki/Plugin/po.pm +++ b/IkiWiki/Plugin/po.pm @@ -23,7 +23,6 @@ use File::Copy; use File::Spec; use File::Temp; use Memoize; -use UNIVERSAL; my ($master_language_code, $master_language_name); my %translations; @@ -48,7 +47,7 @@ sub import { hook(type => "pagetemplate", id => "po", call => \&pagetemplate, last => 1); hook(type => "rename", id => "po", call => \&renamepages, first => 1); hook(type => "delete", id => "po", call => \&mydelete); - hook(type => "change", id => "po", call => \&change); + hook(type => "rendered", id => "po", call => \&rendered); hook(type => "checkcontent", id => "po", call => \&checkcontent); hook(type => "canremove", id => "po", call => \&canremove); hook(type => "canrename", id => "po", call => \&canrename); @@ -347,6 +346,12 @@ sub pagetemplate (@) { if ($template->query(name => "lang_code")) { $template->param(lang_code => $lang_code); } + if ($template->query(name => "html_lang_code")) { + $template->param(html_lang_code => htmllangcode($lang_code)); + } + if ($template->query(name => "html_lang_dir")) { + $template->param(html_lang_dir => htmllangdir($lang_code)); + } if ($template->query(name => "lang_name")) { $template->param(lang_name => languagename($lang_code)); } @@ -428,7 +433,7 @@ sub mydelete (@) { map { deletetranslations($_) } grep istranslatablefile($_), @deleted; } -sub change (@) { +sub rendered (@) { my @rendered=@_; my $updated_po_files=0; @@ -858,6 +863,19 @@ sub lang ($) { return $master_language_code; } +sub htmllangcode ($) { + (my $lang = shift) =~ tr/_/-/; + return $lang; +} + +sub htmllangdir ($) { + my $lang = shift; + if ($lang =~ /^(ar|fa|he)/) { + return 'rtl'; + } + return 'ltr'; +} + sub islanguagecode ($) { my $code=shift; @@ -1054,6 +1072,8 @@ sub otherlanguagesloop ($) { push @ret, { url => urlto_with_orig_beautiful_urlpath(masterpage($page), $page), code => $master_language_code, + html_code => htmllangcode($master_language_code), + html_dir => htmllangdir($master_language_code), language => $master_language_name, master => 1, }; @@ -1064,6 +1084,8 @@ sub otherlanguagesloop ($) { push @ret, { url => urlto_with_orig_beautiful_urlpath($otherpage, $page), code => $lang, + html_code => htmllangcode($lang), + html_dir => htmllangdir($lang), language => languagename($lang), percent => percenttranslated($otherpage), } @@ -1103,7 +1125,7 @@ sub deletetranslations ($) { IkiWiki::rcs_remove($_); } else { - IkiWiki::prune("$config{srcdir}/$_"); + IkiWiki::prune("$config{srcdir}/$_", $config{srcdir}); } } @todelete; diff --git a/IkiWiki/Plugin/poll.pm b/IkiWiki/Plugin/poll.pm index 2773486a6..3bd4af206 100644 --- a/IkiWiki/Plugin/poll.pm +++ b/IkiWiki/Plugin/poll.pm @@ -23,12 +23,14 @@ sub getsetup () { my %pagenum; sub preprocess (@) { - my %params=(open => "yes", total => "yes", percent => "yes", @_); + my %params=(open => "yes", total => "yes", percent => "yes", + expandable => "no", @_); my $open=IkiWiki::yesno($params{open}); my $showtotal=IkiWiki::yesno($params{total}); my $showpercent=IkiWiki::yesno($params{percent}); - $pagenum{$params{page}}++; + my $expandable=IkiWiki::yesno($params{expandable}); + my $num=++$pagenum{$params{page}}{$params{destpage}}; my %choices; my @choices; @@ -64,7 +66,7 @@ sub preprocess (@) { } if ($open && exists $config{cgiurl}) { $ret.="\n"; - $ret.="\n"; + $ret.="\n"; $ret.="\n"; $ret.="\n"; $ret.="\n"; @@ -74,6 +76,19 @@ sub preprocess (@) { $ret.="\n"; } } + + if ($expandable && $open && exists $config{cgiurl}) { + $ret.="

    \n"; + $ret.="

    \n"; + $ret.="\n"; + $ret.="\n"; + $ret.="\n"; + $ret.=gettext("Write in").": \n"; + $ret.="\n"; + $ret.="
    \n"; + $ret.="

    \n"; + } + if ($showtotal) { $ret.="".gettext("Total votes:")." $total\n"; } @@ -85,7 +100,7 @@ sub sessioncgi ($$) { my $session=shift; if (defined $cgi->param('do') && $cgi->param('do') eq "poll") { my $choice=decode_utf8($cgi->param('choice')); - if (! defined $choice) { + if (! defined $choice || not length $choice) { error("no choice specified"); } my $num=$cgi->param('num'); @@ -118,7 +133,14 @@ sub sessioncgi ($$) { my $params=shift; return "\\[[$prefix $params]]" if $escape; if (--$num == 0) { - $params=~s/(^|\s+)(\d+)\s+"?\Q$choice\E"?(\s+|$)/$1.($2+1)." \"$choice\"".$3/se; + if ($params=~s/(^|\s+)(\d+)\s+"?\Q$choice\E"?(\s+|$)/$1.($2+1)." \"$choice\"".$3/se) { + } + elsif ($params=~/expandable=(\w+)/ + & &IkiWiki::yesno($1)) { + $choice=~s/["\]\n\r]//g; + $params.=" 1 \"$choice\"" + if length $choice; + } if (defined $oldchoice) { $params=~s/(^|\s+)(\d+)\s+"?\Q$oldchoice\E"?(\s+|$)/$1.($2-1 >=0 ? $2-1 : 0)." \"$oldchoice\"".$3/se; } diff --git a/IkiWiki/Plugin/recentchanges.pm b/IkiWiki/Plugin/recentchanges.pm index 8ce9474be..eec9803be 100644 --- a/IkiWiki/Plugin/recentchanges.pm +++ b/IkiWiki/Plugin/recentchanges.pm @@ -165,6 +165,7 @@ sub store ($$$) { # Limit pages to first 10, and add links to the changed pages. my $is_excess = exists $change->{pages}[10]; delete @{$change->{pages}}[10 .. @{$change->{pages}}] if $is_excess; + my $has_diffurl=0; $change->{pages} = [ map { if (length $config{cgiurl}) { @@ -180,6 +181,9 @@ sub store ($$$) { else { $_->{link} = pagetitle($_->{page}); } + if (defined $_->{diffurl} && length($_->{diffurl})) { + $has_diffurl=1; + } $_; } @{$change->{pages}} @@ -227,6 +231,8 @@ sub store ($$$) { wikiname => $config{wikiname}, ); + $template->param(has_diffurl => 1) if $has_diffurl; + $template->param(permalink => urlto($config{recentchangespage})."#change-".titlepage($change->{rev})) if exists $config{url}; diff --git a/IkiWiki/Plugin/recentchangesdiff.pm b/IkiWiki/Plugin/recentchangesdiff.pm index 418822793..eb358be67 100644 --- a/IkiWiki/Plugin/recentchangesdiff.pm +++ b/IkiWiki/Plugin/recentchangesdiff.pm @@ -9,10 +9,12 @@ use HTML::Entities; my $maxlines=200; sub import { + add_underlay("javascript"); hook(type => "getsetup", id => "recentchangesdiff", call => \&getsetup); hook(type => "pagetemplate", id => "recentchangesdiff", call => \&pagetemplate); + hook(type => "format", id => "recentchangesdiff.pm", call => \&format); } sub getsetup () { @@ -55,4 +57,24 @@ sub pagetemplate (@) { } } +sub format (@) { + my %params=@_; + + if (! ($params{content}=~s!^(]*>)!$1.include_javascript($params{page})!em)) { + # no tag, probably in preview mode + $params{content}=include_javascript(undef).$params{content}; + } + return $params{content}; +} + +# taken verbatim from toggle.pm +sub include_javascript ($) { + my $from=shift; + + return ''."\n". + ''; +} + 1 diff --git a/IkiWiki/Plugin/remove.pm b/IkiWiki/Plugin/remove.pm index 14ac01c9b..d48b28f95 100644 --- a/IkiWiki/Plugin/remove.pm +++ b/IkiWiki/Plugin/remove.pm @@ -22,6 +22,13 @@ sub getsetup () { }, } +sub allowed_dirs { + return grep { defined $_ } ( + $config{srcdir}, + $IkiWiki::Plugin::transient::transientdir, + ); +} + sub check_canremove ($$$) { my $page=shift; my $q=shift; @@ -33,12 +40,22 @@ sub check_canremove ($$$) { htmllink("", "", $page, noimageinline => 1))); } - # Must exist on disk, and be a regular file. + # Must exist in either the srcdir or a suitable underlay (e.g. + # transient underlay), and be a regular file. my $file=$pagesources{$page}; - if (! -e "$config{srcdir}/$file") { + my $dir; + + foreach my $srcdir (allowed_dirs()) { + if (-e "$srcdir/$file") { + $dir = $srcdir; + last; + } + } + + if (! defined $dir) { error(sprintf(gettext("%s is not in the srcdir, so it cannot be deleted"), $file)); } - elsif (-l "$config{srcdir}/$file" && ! -f _) { + elsif (-l "$dir/$file" && ! -f _) { error(sprintf(gettext("%s is not a file"), $file)); } @@ -46,7 +63,7 @@ sub check_canremove ($$$) { # This is sorta overkill, but better safe than sorry. if (! defined pagetype($pagesources{$page})) { if (IkiWiki::Plugin::attachment->can("check_canattach")) { - IkiWiki::Plugin::attachment::check_canattach($session, $page, "$config{srcdir}/$file"); + IkiWiki::Plugin::attachment::check_canattach($session, $page, "$dir/$file"); } else { error("removal of attachments is not allowed"); @@ -124,7 +141,7 @@ sub removal_confirm ($$@) { my $f=IkiWiki::Plugin::attachment::is_held_attachment($page); if (defined $f) { require IkiWiki::Render; - IkiWiki::prune($f); + IkiWiki::prune($f, "$config{wikistatedir}/attachments"); } } } @@ -223,21 +240,34 @@ sub sessioncgi ($$) { require IkiWiki::Render; if ($config{rcs}) { IkiWiki::disable_commit_hook(); - foreach my $file (@files) { - IkiWiki::rcs_remove($file); + } + my $rcs_removed = 1; + + foreach my $file (@files) { + foreach my $srcdir (allowed_dirs()) { + if (-e "$srcdir/$file") { + if ($srcdir eq $config{srcdir} && $config{rcs}) { + IkiWiki::rcs_remove($file); + $rcs_removed = 1; + } + else { + IkiWiki::prune("$srcdir/$file", $srcdir); + } + } } - IkiWiki::rcs_commit_staged( - message => gettext("removed"), - session => $session, - ); - IkiWiki::enable_commit_hook(); - IkiWiki::rcs_update(); } - else { - foreach my $file (@files) { - IkiWiki::prune("$config{srcdir}/$file"); + + if ($config{rcs}) { + if ($rcs_removed) { + IkiWiki::rcs_commit_staged( + message => gettext("removed"), + session => $session, + ); } + IkiWiki::enable_commit_hook(); + IkiWiki::rcs_update(); } + IkiWiki::refresh(); IkiWiki::saveindex(); diff --git a/IkiWiki/Plugin/rename.pm b/IkiWiki/Plugin/rename.pm index 8e32d41ae..8387a1e32 100644 --- a/IkiWiki/Plugin/rename.pm +++ b/IkiWiki/Plugin/rename.pm @@ -206,14 +206,22 @@ sub rename_start ($$$$) { exit 0; } -sub postrename ($;$$$) { +sub postrename ($$$;$$) { + my $cgi=shift; my $session=shift; my $src=shift; my $dest=shift; my $attachment=shift; - # Load saved form state and return to edit page. - my $postrename=CGI->new($session->param("postrename")); + # Load saved form state and return to edit page, using stored old + # cgi state. Or, if the rename was not started on the edit page, + # return to the renamed page. + my $postrename=$session->param("postrename"); + if (! defined $postrename) { + IkiWiki::redirect($cgi, urlto(defined $dest ? $dest : $src)); + exit; + } + my $oldcgi=CGI->new($postrename); $session->clear("postrename"); IkiWiki::cgi_savesession($session); @@ -222,21 +230,21 @@ sub postrename ($;$$$) { # They renamed the page they were editing. This requires # fixups to the edit form state. # Tweak the edit form to be editing the new page. - $postrename->param("page", $dest); + $oldcgi->param("page", $dest); } # Update edit form content to fix any links present # on it. - $postrename->param("editcontent", + $oldcgi->param("editcontent", renamepage_hook($dest, $src, $dest, - $postrename->param("editcontent"))); + $oldcgi->param("editcontent"))); # Get a new edit token; old was likely invalidated. - $postrename->param("rcsinfo", + $oldcgi->param("rcsinfo", IkiWiki::rcs_prepedit($pagesources{$dest})); } - IkiWiki::cgi_editpage($postrename, $session); + IkiWiki::cgi_editpage($oldcgi, $session); } sub formbuilder (@) { @@ -291,16 +299,16 @@ sub sessioncgi ($$) { my $session=shift; my ($form, $buttons)=rename_form($q, $session, Encode::decode_utf8($q->param("page"))); IkiWiki::decode_form_utf8($form); + my $src=$form->field("page"); if ($form->submitted eq 'Cancel') { - postrename($session); + postrename($q, $session, $src); } elsif ($form->submitted eq 'Rename' && $form->validate) { IkiWiki::checksessionexpiry($q, $session, $q->param('sid')); # These untaints are safe because of the checks # performed in check_canrename later. - my $src=$form->field("page"); my $srcfile=IkiWiki::possibly_foolish_untaint($pagesources{$src}) if exists $pagesources{$src}; my $dest=IkiWiki::possibly_foolish_untaint(titlepage($form->field("new_name"))); @@ -324,7 +332,7 @@ sub sessioncgi ($$) { IkiWiki::Plugin::attachment::is_held_attachment($src); if ($held) { rename($held, IkiWiki::Plugin::attachment::attachment_holding_location($dest)); - postrename($session, $src, $dest, $q->param("attachment")) + postrename($q, $session, $src, $dest, $q->param("attachment")) unless defined $srcfile; } @@ -430,7 +438,7 @@ sub sessioncgi ($$) { $renamesummary.=$template->output; } - postrename($session, $src, $dest, $q->param("attachment")); + postrename($q, $session, $src, $dest, $q->param("attachment")); } else { IkiWiki::showform($form, $buttons, $session, $q); diff --git a/IkiWiki/Plugin/rsync.pm b/IkiWiki/Plugin/rsync.pm index e38801e4a..1b85ea000 100644 --- a/IkiWiki/Plugin/rsync.pm +++ b/IkiWiki/Plugin/rsync.pm @@ -7,7 +7,7 @@ use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "rsync", call => \&getsetup); - hook(type => "change", id => "rsync", call => \&postrefresh); + hook(type => "rendered", id => "rsync", call => \&postrefresh); hook(type => "delete", id => "rsync", call => \&postrefresh); } diff --git a/IkiWiki/Plugin/search.pm b/IkiWiki/Plugin/search.pm index 42d2e0d30..24b16fe2a 100644 --- a/IkiWiki/Plugin/search.pm +++ b/IkiWiki/Plugin/search.pm @@ -33,6 +33,13 @@ sub getsetup () { safe => 0, # external program rebuild => 0, }, + google_search => { + type => "booblean", + example => 1, + description => "use google site search rather than internal xapian index?", + safe => 1, + rebuild => 0, + }, } sub checkconfig () { @@ -75,6 +82,8 @@ my $stemmer; sub indexhtml (@) { my %params=@_; + return if $config{google_search}; + setupfiles(); # A unique pageterm is used to identify the document for a page. @@ -165,6 +174,8 @@ sub indexhtml (@) { } sub delete (@) { + return if $config{google_search}; + my $db=xapiandb(); foreach my $page (@_) { my $pageterm=pageterm(pagename($page)); @@ -176,14 +187,20 @@ sub cgi ($) { my $cgi=shift; if (defined $cgi->param('P')) { - # only works for GET requests - chdir("$config{wikistatedir}/xapian") || error("chdir: $!"); - $ENV{OMEGA_CONFIG_FILE}="./omega.conf"; - $ENV{CGIURL}=IkiWiki::cgiurl(); - IkiWiki::loadindex(); - $ENV{HELPLINK}=htmllink("", "", "ikiwiki/searching", - noimageinline => 1, linktext => "Help"); - exec($config{omega_cgi}) || error("$config{omega_cgi} failed: $!"); + if ($config{google_search}) { + print $cgi->redirect("https://www.google.com/search?sitesearch=$config{url}&q=".$cgi->param('P')); + exit 0; + } + else { + # only works for GET requests + chdir("$config{wikistatedir}/xapian") || error("chdir: $!"); + $ENV{OMEGA_CONFIG_FILE}="./omega.conf"; + $ENV{CGIURL}=IkiWiki::cgiurl(); + IkiWiki::loadindex(); + $ENV{HELPLINK}=htmllink("", "", "ikiwiki/searching", + noimageinline => 1, linktext => "Help"); + exec($config{omega_cgi}) || error("$config{omega_cgi} failed: $!"); + } } } diff --git a/IkiWiki/Plugin/skeleton.pm.example b/IkiWiki/Plugin/skeleton.pm.example index 7974d5e53..f9caef40c 100644 --- a/IkiWiki/Plugin/skeleton.pm.example +++ b/IkiWiki/Plugin/skeleton.pm.example @@ -26,7 +26,8 @@ sub import { hook(type => "templatefile", id => "skeleton", call => \&templatefile); hook(type => "pageactions", id => "skeleton", call => \&pageactions); hook(type => "delete", id => "skeleton", call => \&delete); - hook(type => "change", id => "skeleton", call => \&change); + hook(type => "rendered", id => "skeleton", call => \&rendered); + hook(type => "changes", id => "skeleton", call => \&changes); hook(type => "cgi", id => "skeleton", call => \&cgi); hook(type => "auth", id => "skeleton", call => \&auth); hook(type => "sessioncgi", id => "skeleton", call => \&sessioncgi); @@ -53,7 +54,6 @@ sub getsetup () { plugin => { safe => 1, rebuild => undef, - section => "misc", }, skeleton => { type => "boolean", @@ -167,10 +167,16 @@ sub delete (@) { debug("skeleton plugin told that files were deleted: @files"); } -sub change (@) { +sub rendered (@) { my @files=@_; - debug("skeleton plugin told that changed files were rendered: @files"); + debug("skeleton plugin told that files were rendered: @files"); +} + +sub changes (@) { + my @files=@_; + + debug("skeleton plugin told that files were changed: @files"); } sub cgi ($) { diff --git a/IkiWiki/Plugin/template.pm b/IkiWiki/Plugin/template.pm index 3df06e652..ccc9cb666 100644 --- a/IkiWiki/Plugin/template.pm +++ b/IkiWiki/Plugin/template.pm @@ -41,9 +41,11 @@ sub preprocess (@) { blind_cache => 1); }; if ($@) { + # gettext can clobber $@ + my $error = $@; error sprintf(gettext("failed to process template %s"), htmllink($params{page}, $params{destpage}, - "/templates/$params{id}"))." $@"; + "/templates/$params{id}"))." $error"; } $params{basename}=IkiWiki::basename($params{page}); diff --git a/IkiWiki/Plugin/theme.pm b/IkiWiki/Plugin/theme.pm index ee94547e9..9b84ea7f0 100644 --- a/IkiWiki/Plugin/theme.pm +++ b/IkiWiki/Plugin/theme.pm @@ -9,6 +9,7 @@ sub import { hook(type => "getsetup", id => "theme", call => \&getsetup); hook(type => "checkconfig", id => "theme", call => \&checkconfig); hook(type => "needsbuild", id => "theme", call => \&needsbuild); + hook(type => "pagetemplate", id => "theme", call => \&pagetemplate); } sub getsetup () { @@ -63,4 +64,12 @@ sub needsbuild ($) { return $needsbuild; } +sub pagetemplate (@) { + my %params=@_; + my $template=$params{template}; + if (exists $config{theme} && length $config{theme}) { + $template->param("theme_$config{theme}" => 1); + } +} + 1 diff --git a/IkiWiki/Plugin/trail.pm b/IkiWiki/Plugin/trail.pm new file mode 100644 index 000000000..476db4dcb --- /dev/null +++ b/IkiWiki/Plugin/trail.pm @@ -0,0 +1,466 @@ +#!/usr/bin/perl +# Copyright © 2008-2011 Joey Hess +# Copyright © 2009-2012 Simon McVittie +# Licensed under the GNU GPL, version 2, or any later version published by the +# Free Software Foundation +package IkiWiki::Plugin::trail; + +use warnings; +use strict; +use IkiWiki 3.00; + +sub import { + hook(type => "getsetup", id => "trail", call => \&getsetup); + hook(type => "needsbuild", id => "trail", call => \&needsbuild); + hook(type => "preprocess", id => "trailoptions", call => \&preprocess_trailoptions, scan => 1); + hook(type => "preprocess", id => "trailitem", call => \&preprocess_trailitem, scan => 1); + hook(type => "preprocess", id => "trailitems", call => \&preprocess_trailitems, scan => 1); + hook(type => "preprocess", id => "traillink", call => \&preprocess_traillink, scan => 1); + hook(type => "pagetemplate", id => "trail", call => \&pagetemplate); + hook(type => "build_affected", id => "trail", call => \&build_affected); +} + +# Page state +# +# If a page $T is a trail, then it can have +# +# * $pagestate{$T}{trail}{contents} +# Reference to an array of lists each containing either: +# - [pagenames => "page1", "page2"] +# Those literal pages +# - [link => "link"] +# A link specification, pointing to the same page that [[link]] +# would select +# - [pagespec => "posts/*", "age", 0] +# A match by pagespec; the third array element is the sort order +# and the fourth is whether to reverse sorting +# +# * $pagestate{$T}{trail}{sort} +# A sorting order; if absent or undef, the trail is in the order given +# by the links that form it +# +# * $pagestate{$T}{trail}{circular} +# True if this trail is circular (i.e. going "next" from the last item is +# allowed, and takes you back to the first) +# +# * $pagestate{$T}{trail}{reverse} +# True if C is to be reversed. +# +# If a page $M is a member of a trail $T, then it has +# +# * $pagestate{$M}{trail}{item}{$T}[0] +# The page before this one in C<$T> at the last rebuild, or undef. +# +# * $pagestate{$M}{trail}{item}{$T}[1] +# The page after this one in C<$T> at the last refresh, or undef. + +sub getsetup () { + return + plugin => { + safe => 1, + rebuild => undef, + }, +} + +# Cache of pages' old titles, so we can tell whether they changed +my %old_trail_titles; + +sub needsbuild (@) { + my $needsbuild=shift; + + foreach my $page (keys %pagestate) { + if (exists $pagestate{$page}{trail}) { + if (exists $pagesources{$page} && + grep { $_ eq $pagesources{$page} } @$needsbuild) { + # Remember its title, so we can know whether + # it changed. + $old_trail_titles{$page} = title_of($page); + + # Remove state, it will be re-added + # if the preprocessor directive is still + # there during the rebuild. {item} is the + # only thing that's added for items, not + # trails, and it's harmless to delete that - + # the item is being rebuilt anyway. + delete $pagestate{$page}{trail}; + } + } + } + + return $needsbuild; +} + +my $scanned = 0; + +sub preprocess_trailoptions (@) { + my %params = @_; + + if (exists $params{circular}) { + $pagestate{$params{page}}{trail}{circular} = + IkiWiki::yesno($params{circular}); + } + + if (exists $params{sort}) { + $pagestate{$params{page}}{trail}{sort} = $params{sort}; + } + + if (exists $params{reverse}) { + $pagestate{$params{page}}{trail}{reverse} = $params{reverse}; + } + + return ""; +} + +sub preprocess_trailitem (@) { + my $link = shift; + shift; + + # avoid collecting everything in the preprocess stage if we already + # did in the scan stage + if (defined wantarray) { + return "" if $scanned; + } + else { + $scanned = 1; + } + + my %params = @_; + my $trail = $params{page}; + + $link = linkpage($link); + + add_link($params{page}, $link, 'trail'); + push @{$pagestate{$params{page}}{trail}{contents}}, [link => $link]; + + return ""; +} + +sub preprocess_trailitems (@) { + my %params = @_; + + # avoid collecting everything in the preprocess stage if we already + # did in the scan stage + if (defined wantarray) { + return "" if $scanned; + } + else { + $scanned = 1; + } + + # trail members from a pagespec ought to be in some sort of order, + # and path is a nice obvious default + $params{sort} = 'path' unless exists $params{sort}; + $params{reverse} = 'no' unless exists $params{reverse}; + + if (exists $params{pages}) { + push @{$pagestate{$params{page}}{trail}{contents}}, + ["pagespec" => $params{pages}, $params{sort}, + IkiWiki::yesno($params{reverse})]; + } + + if (exists $params{pagenames}) { + push @{$pagestate{$params{page}}{trail}{contents}}, + [pagenames => (split ' ', $params{pagenames})]; + } + + return ""; +} + +sub preprocess_traillink (@) { + my $link = shift; + shift; + + my %params = @_; + my $trail = $params{page}; + + $link =~ qr{ + (?: + ([^\|]+) # 1: link text + \| # followed by | + )? # optional + + (.+) # 2: page to link to + }x; + + my $linktext = $1; + $link = linkpage($2); + + add_link($params{page}, $link, 'trail'); + + # avoid collecting everything in the preprocess stage if we already + # did in the scan stage + my $already; + if (defined wantarray) { + $already = $scanned; + } + else { + $scanned = 1; + } + + push @{$pagestate{$params{page}}{trail}{contents}}, [link => $link] unless $already; + + if (defined $linktext) { + $linktext = pagetitle($linktext); + } + + if (exists $params{text}) { + $linktext = $params{text}; + } + + if (defined $linktext) { + return htmllink($trail, $params{destpage}, + $link, linktext => $linktext); + } + + return htmllink($trail, $params{destpage}, $link); +} + +# trail => [member1, member2] +my %trail_to_members; +# member => { trail => [prev, next] } +# e.g. if %trail_to_members = ( +# trail1 => ["member1", "member2"], +# trail2 => ["member0", "member1"], +# ) +# +# then $member_to_trails{member1} = { +# trail1 => [undef, "member2"], +# trail2 => ["member0", undef], +# } +my %member_to_trails; + +# member => 1 +my %rebuild_trail_members; + +sub trails_differ { + my ($old, $new) = @_; + + foreach my $trail (keys %$old) { + if (! exists $new->{$trail}) { + return 1; + } + + if (exists $old_trail_titles{$trail} && + title_of($trail) ne $old_trail_titles{$trail}) { + return 1; + } + + my ($old_p, $old_n) = @{$old->{$trail}}; + my ($new_p, $new_n) = @{$new->{$trail}}; + $old_p = "" unless defined $old_p; + $old_n = "" unless defined $old_n; + $new_p = "" unless defined $new_p; + $new_n = "" unless defined $new_n; + if ($old_p ne $new_p) { + return 1; + } + + if (exists $old_trail_titles{$old_p} && + title_of($old_p) ne $old_trail_titles{$old_p}) { + return 1; + } + + if ($old_n ne $new_n) { + return 1; + } + + if (exists $old_trail_titles{$old_n} && + title_of($old_n) ne $old_trail_titles{$old_n}) { + return 1; + } + } + + foreach my $trail (keys %$new) { + if (! exists $old->{$trail}) { + return 1; + } + } + + return 0; +} + +my $done_prerender = 0; + +sub prerender { + return if $done_prerender; + + %trail_to_members = (); + %member_to_trails = (); + + foreach my $trail (keys %pagestate) { + next unless exists $pagestate{$trail}{trail}{contents}; + + my $members = []; + my @contents = @{$pagestate{$trail}{trail}{contents}}; + + foreach my $c (@contents) { + if ($c->[0] eq 'pagespec') { + push @$members, pagespec_match_list($trail, + $c->[1], sort => $c->[2], + reverse => $c->[3]); + } + elsif ($c->[0] eq 'pagenames') { + my @pagenames = @$c; + shift @pagenames; + foreach my $page (@pagenames) { + if (exists $pagesources{$page}) { + push @$members, $page; + } + else { + # rebuild trail if it turns up + add_depends($trail, $page, deptype("presence")); + } + } + } + elsif ($c->[0] eq 'link') { + my $best = bestlink($trail, $c->[1]); + push @$members, $best if length $best; + } + } + + if (defined $pagestate{$trail}{trail}{sort}) { + @$members = IkiWiki::sort_pages( + $pagestate{$trail}{trail}{sort}, + $members); + } + + if (IkiWiki::yesno $pagestate{$trail}{trail}{reverse}) { + @$members = reverse @$members; + } + + # uniquify + my %seen; + my @tmp; + foreach my $member (@$members) { + push @tmp, $member unless $seen{$member}; + $seen{$member} = 1; + } + $members = [@tmp]; + + for (my $i = 0; $i <= $#$members; $i++) { + my $member = $members->[$i]; + my $prev; + $prev = $members->[$i - 1] if $i > 0; + my $next = $members->[$i + 1]; + + $member_to_trails{$member}{$trail} = [$prev, $next]; + } + + if ((scalar @$members) > 1 && $pagestate{$trail}{trail}{circular}) { + $member_to_trails{$members->[0]}{$trail}[0] = $members->[$#$members]; + $member_to_trails{$members->[$#$members]}{$trail}[1] = $members->[0]; + } + + $trail_to_members{$trail} = $members; + } + + foreach my $member (keys %pagestate) { + if (exists $pagestate{$member}{trail}{item} && + ! exists $member_to_trails{$member}) { + $rebuild_trail_members{$member} = 1; + delete $pagestate{$member}{trail}{item}; + } + } + + foreach my $member (keys %member_to_trails) { + if (! exists $pagestate{$member}{trail}{item}) { + $rebuild_trail_members{$member} = 1; + } + else { + if (trails_differ($pagestate{$member}{trail}{item}, + $member_to_trails{$member})) { + $rebuild_trail_members{$member} = 1; + } + } + + $pagestate{$member}{trail}{item} = $member_to_trails{$member}; + } + + $done_prerender = 1; +} + +sub build_affected { + my %affected; + + # In principle we might not have done this yet, although in practice + # at least the trail itself has probably changed, and its template + # almost certainly contains TRAILS or TRAILLOOP, triggering our + # prerender as a side-effect. + prerender(); + + foreach my $member (keys %rebuild_trail_members) { + $affected{$member} = sprintf(gettext("building %s, its previous or next page has changed"), $member); + } + + return %affected; +} + +sub title_of ($) { + my $page = shift; + if (defined ($pagestate{$page}{meta}{title})) { + return $pagestate{$page}{meta}{title}; + } + return pagetitle(IkiWiki::basename($page)); +} + +my $recursive = 0; + +sub pagetemplate (@) { + my %params = @_; + my $page = $params{page}; + my $template = $params{template}; + + return unless length $page; + + if ($template->query(name => 'trails') && ! $recursive) { + prerender(); + + $recursive = 1; + my $inner = template("trails.tmpl", blind_cache => 1); + IkiWiki::run_hooks(pagetemplate => sub { + shift->(%params, template => $inner) + }); + $template->param(trails => $inner->output); + $recursive = 0; + } + + if ($template->query(name => 'trailloop')) { + prerender(); + + my @trails; + + # sort backlinks by page name to have a consistent order + foreach my $trail (sort keys %{$member_to_trails{$page}}) { + + my $members = $trail_to_members{$trail}; + my ($prev, $next) = @{$member_to_trails{$page}{$trail}}; + my ($prevurl, $nexturl, $prevtitle, $nexttitle); + + if (defined $prev) { + $prevurl = urlto($prev, $page); + $prevtitle = title_of($prev); + } + + if (defined $next) { + $nexturl = urlto($next, $page); + $nexttitle = title_of($next); + } + + push @trails, { + prevpage => $prev, + prevtitle => $prevtitle, + prevurl => $prevurl, + nextpage => $next, + nexttitle => $nexttitle, + nexturl => $nexturl, + trailpage => $trail, + trailtitle => title_of($trail), + trailurl => urlto($trail, $page), + }; + } + + $template->param(trailloop => \@trails); + } +} + +1; diff --git a/IkiWiki/Plugin/transient.pm b/IkiWiki/Plugin/transient.pm index c0ad5fc11..d4eb005ea 100644 --- a/IkiWiki/Plugin/transient.pm +++ b/IkiWiki/Plugin/transient.pm @@ -8,7 +8,7 @@ use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "transient", call => \&getsetup); hook(type => "checkconfig", id => "transient", call => \&checkconfig); - hook(type => "change", id => "transient", call => \&change); + hook(type => "rendered", id => "transient", call => \&rendered); } sub getsetup () { @@ -33,7 +33,7 @@ sub checkconfig () { } } -sub change (@) { +sub rendered (@) { foreach my $file (@_) { # If the corresponding file exists in the transient underlay # and isn't actually being used, we can get rid of it. @@ -43,7 +43,7 @@ sub change (@) { my $casualty = "$transientdir/$file"; if (srcfile($file) ne $casualty && -e $casualty) { debug(sprintf(gettext("removing transient version of %s"), $file)); - IkiWiki::prune($casualty); + IkiWiki::prune($casualty, $transientdir); } } } diff --git a/IkiWiki/Plugin/underlay.pm b/IkiWiki/Plugin/underlay.pm index 3ea19c635..2967761c8 100644 --- a/IkiWiki/Plugin/underlay.pm +++ b/IkiWiki/Plugin/underlay.pm @@ -18,6 +18,7 @@ sub getsetup () { plugin => { safe => 0, rebuild => undef, + section => "special-purpose", }, add_underlays => { type => "string", diff --git a/IkiWiki/Render.pm b/IkiWiki/Render.pm index 05132a8a8..fa2940b01 100644 --- a/IkiWiki/Render.pm +++ b/IkiWiki/Render.pm @@ -262,12 +262,13 @@ sub render ($$) { } } -sub prune ($) { +sub prune ($;$) { my $file=shift; + my $up_to=shift; unlink($file); my $dir=dirname($file); - while (rmdir($dir)) { + while ((! defined $up_to || $dir =~ m{^\Q$up_to\E\/}) && rmdir($dir)) { $dir=dirname($dir); } } @@ -286,9 +287,22 @@ sub srcdir_check () { } -sub find_src_files () { +# Finds all files in the srcdir, and the underlaydirs. +# Returns the files, and their corresponding pages. +# +# When run in only_underlay mode, adds only the underlay files to +# the files and pages passed in. +sub find_src_files (;$$$) { + my $only_underlay=shift; my @files; + if (defined $_[0]) { + @files=@{shift()}; + } my %pages; + if (defined $_[0]) { + %pages=%{shift()}; + } + eval q{use File::Find}; error($@) if $@; @@ -296,6 +310,8 @@ sub find_src_files () { die $@ if $@; my $origdir=getcwd(); my $abssrcdir=Cwd::abs_path($config{srcdir}); + + @IkiWiki::underlayfiles=(); my ($page, $underlay); my $helper=sub { @@ -322,6 +338,7 @@ sub find_src_files () { if (! -l "$abssrcdir/$f" && ! -e _) { if (! $pages{$page}) { push @files, $f; + push @IkiWiki::underlayfiles, $f; $pages{$page}=1; } } @@ -335,12 +352,14 @@ sub find_src_files () { } }; - chdir($config{srcdir}) || die "chdir $config{srcdir}: $!"; - find({ - no_chdir => 1, - wanted => $helper, - }, '.'); - chdir($origdir) || die "chdir $origdir: $!"; + unless ($only_underlay) { + chdir($config{srcdir}) || die "chdir $config{srcdir}: $!"; + find({ + no_chdir => 1, + wanted => $helper, + }, '.'); + chdir($origdir) || die "chdir $origdir: $!"; + } $underlay=1; foreach (@{$config{underlaydirs}}, $config{underlaydir}) { @@ -356,6 +375,50 @@ sub find_src_files () { return \@files, \%pages; } +# Given a hash of files that have changed, and a hash of files that were +# deleted, should return the same results as find_src_files, with the same +# sanity checks. But a lot faster! +sub process_changed_files ($$) { + my $changed_raw=shift; + my $deleted_raw=shift; + + my @files; + my %pages; + + foreach my $file (keys %$changed_raw) { + my $page = pagename($file); + next if ! exists $pagesources{$page} && file_pruned($file); + my ($f) = $file =~ /$config{wiki_file_regexp}/; # untaint + if (! defined $f) { + warn(sprintf(gettext("skipping bad filename %s"), $file)."\n"); + next; + } + push @files, $f; + if ($pages{$page}) { + debug(sprintf(gettext("%s has multiple possible source pages"), $page)); + } + $pages{$page}=1; + } + + # So far, we only have the changed files. Now add in all the old + # files that were not changed or deleted, excluding ones that came + # from the underlay. + my %old_underlay; + foreach my $f (@IkiWiki::underlayfiles) { + $old_underlay{$f}=1; + } + foreach my $page (keys %pagesources) { + my $f=$pagesources{$page}; + unless ($old_underlay{$f} || exists $pages{$page} || exists $deleted_raw->{$f}) { + $pages{$page}=1; + push @files, $f; + } + } + + # add in the underlay + find_src_files(1, \@files, \%pages); +} + sub find_new_files ($) { my $files=shift; my @new; @@ -447,7 +510,7 @@ sub remove_del (@) { } foreach my $old (@{$oldrenderedfiles{$page}}) { - prune($config{destdir}."/".$old); + prune($config{destdir}."/".$old, $config{destdir}); } foreach my $source (keys %destsources) { @@ -537,7 +600,7 @@ sub remove_unrendered () { foreach my $file (@{$oldrenderedfiles{$page}}) { if (! grep { $_ eq $file } @{$renderedfiles{$page}}) { debug(sprintf(gettext("removing %s, no longer built by %s"), $file, $page)); - prune($config{destdir}."/".$file); + prune($config{destdir}."/".$file, $config{destdir}); } } } @@ -761,17 +824,40 @@ sub gen_autofile ($$$) { return 1; } - sub refresh () { srcdir_check(); run_hooks(refresh => sub { shift->() }); - my ($files, $pages)=find_src_files(); - my ($new, $internal_new)=find_new_files($files); - my ($del, $internal_del)=find_del_files($pages); - my ($changed, $internal_changed)=find_changed($files); + my ($files, $pages, $new, $internal_new, $del, $internal_del, $changed, $internal_changed); + my $want_find_changes=$config{only_committed_changes} && + exists $IkiWiki::hooks{rcs}{rcs_find_changes} && + exists $IkiWiki::hooks{rcs}{rcs_get_current_rev}; + if (! $config{rebuild} && $want_find_changes && defined $IkiWiki::lastrev && length $IkiWiki::lastrev) { + my ($changed_raw, $del_raw); + ($changed_raw, $del_raw, $IkiWiki::lastrev) = $IkiWiki::hooks{rcs}{rcs_find_changes}{call}->($IkiWiki::lastrev); + ($files, $pages)=process_changed_files($changed_raw, $del_raw); + } + else { + ($files, $pages)=find_src_files(); + } + if ($want_find_changes) { + if (! defined($IkiWiki::lastrev) || ! length $IkiWiki::lastrev) { + $IkiWiki::lastrev=$IkiWiki::hooks{rcs}{rcs_get_current_rev}{call}->(); + } + } + ($new, $internal_new)=find_new_files($files); + ($del, $internal_del)=find_del_files($pages); + ($changed, $internal_changed)=find_changed($files); + my %existingfiles; run_hooks(needsbuild => sub { my $ret=shift->($changed, [@$del, @$internal_del]); - $changed=$ret if ref $ret eq 'ARRAY'; + if (ref $ret eq 'ARRAY' && $ret != $changed) { + if (! %existingfiles) { + foreach my $f (@$files) { + $existingfiles{$f}=1; + } + } + @$changed=grep $existingfiles{$_}, @$ret; + } }); my $oldlink_targets=calculate_old_links($changed, $del); @@ -800,6 +886,14 @@ sub refresh () { derender_internal($file); } + run_hooks(build_affected => sub { + my %affected = shift->(); + while (my ($page, $message) = each %affected) { + next unless exists $pagesources{$page}; + render($pagesources{$page}, $message); + } + }); + my ($backlinkchanged, $linkchangers)=calculate_changed_links($changed, $del, $oldlink_targets); @@ -821,8 +915,13 @@ sub refresh () { run_hooks(delete => sub { shift->(@$del, @$internal_del) }); } if (%rendered) { - run_hooks(change => sub { shift->(keys %rendered) }); + run_hooks(rendered => sub { shift->(keys %rendered) }); + run_hooks(change => sub { shift->(keys %rendered) }); # back-compat } + my %all_changed = map { $_ => 1 } + @$new, @$changed, @$del, + @$internal_new, @$internal_changed, @$internal_del; + run_hooks(changes => sub { shift->(keys %all_changed) }); } sub clean_rendered { @@ -831,7 +930,7 @@ sub clean_rendered { remove_unrendered(); foreach my $page (keys %oldrenderedfiles) { foreach my $file (@{$oldrenderedfiles{$page}}) { - prune($config{destdir}."/".$file); + prune($config{destdir}."/".$file, $config{destdir}); } } } diff --git a/IkiWiki/Setup.pm b/IkiWiki/Setup.pm index 48f3d4634..453d19670 100644 --- a/IkiWiki/Setup.pm +++ b/IkiWiki/Setup.pm @@ -172,6 +172,11 @@ sub getsetup () { my @s=eval { $IkiWiki::hooks{getsetup}{$plugin}{call}->() }; next unless @s; + if (scalar(@s) % 2 != 0) { + print STDERR "warning: plugin $plugin has a broken getsetup; ignoring\n"; + next; + } + # set default section value (note use of shared # hashref between array and hash) my %s=@s; @@ -223,6 +228,10 @@ sub commented_dump ($$) { my $setup=$pair->[1]; my %s=@{$setup}; my $section=$s{plugin}->{section}; + if (! defined $section) { + print STDERR "warning: missing section in $plugin\n"; + $section="other"; + } push @{$section_plugins{$section}}, $plugin; if (@{$section_plugins{$section}} == 1) { push @ret, "", $indent.("#" x 70), "$indent# $section plugins", diff --git a/IkiWiki/Wrapper.pm b/IkiWiki/Wrapper.pm index c39aa2ef7..b46bc6aa9 100644 --- a/IkiWiki/Wrapper.pm +++ b/IkiWiki/Wrapper.pm @@ -28,10 +28,11 @@ sub gen_wrappers () { %config=(%origconfig); } +our $program_to_wrap = $0; sub gen_wrapper () { $config{srcdir}=File::Spec->rel2abs($config{srcdir}); $config{destdir}=File::Spec->rel2abs($config{destdir}); - my $this=File::Spec->rel2abs($0); + my $this=File::Spec->rel2abs($program_to_wrap); if (! -x $this) { error(sprintf(gettext("%s doesn't seem to be executable"), $this)); } @@ -93,12 +94,53 @@ EOF # memory, a pile up of processes could cause thrashing # otherwise. The fd of the lock is stored in # IKIWIKI_CGILOCK_FD so unlockwiki can close it. - $pre_exec=<<"EOF"; + # + # A lot of cgi wrapper processes can potentially build + # up and clog an otherwise unloaded web server. To + # partially avoid this, when a GET comes in and the lock + # is already held, rather than blocking a html page is + # constructed that retries. This is enabled by setting + # cgi_overload_delay. + if (defined $config{cgi_overload_delay} && + $config{cgi_overload_delay} =~/^[0-9]+/) { + my $i=int($config{cgi_overload_delay}); + $pre_exec.="#define CGI_OVERLOAD_DELAY $i\n" + if $i > 0; + my $msg=gettext("Please wait"); + $msg=~s/"/\\"/g; + $pre_exec.='#define CGI_PLEASE_WAIT_TITLE "'.$msg."\"\n"; + if (defined $config{cgi_overload_message} && length $config{cgi_overload_message}) { + $msg=$config{cgi_overload_message}; + $msg=~s/"/\\"/g; + } + $pre_exec.='#define CGI_PLEASE_WAIT_BODY "'.$msg."\"\n"; + } + $pre_exec.=<<"EOF"; lockfd=open("$config{wikistatedir}/cgilock", O_CREAT | O_RDWR, 0666); - if (lockfd != -1 && lockf(lockfd, F_LOCK, 0) == 0) { - char *fd_s=malloc(8); - sprintf(fd_s, "%i", lockfd); - setenv("IKIWIKI_CGILOCK_FD", fd_s, 1); + if (lockfd != -1) { +#ifdef CGI_OVERLOAD_DELAY + char *request_method = getenv("REQUEST_METHOD"); + if (request_method && strcmp(request_method, "GET") == 0) { + if (lockf(lockfd, F_TLOCK, 0) == 0) { + set_cgilock_fd(lockfd); + } + else { + printf("Content-Type: text/html\\nRefresh: %i; URL=%s\\n\\n%s

    %s

    ", + CGI_OVERLOAD_DELAY, + getenv("REQUEST_URI"), + CGI_PLEASE_WAIT_TITLE, + CGI_PLEASE_WAIT_BODY); + exit(0); + } + } + else if (lockf(lockfd, F_LOCK, 0) == 0) { + set_cgilock_fd(lockfd); + } +#else + if (lockf(lockfd, F_LOCK, 0) == 0) { + set_cgilock_fd(lockfd); + } +#endif } EOF } @@ -140,6 +182,12 @@ void addenv(char *var, char *val) { newenviron[i++]=s; } +void set_cgilock_fd (int lockfd) { + char *fd_s=malloc(8); + sprintf(fd_s, "%i", lockfd); + setenv("IKIWIKI_CGILOCK_FD", fd_s, 1); +} + int main (int argc, char **argv) { int lockfd=-1; char *s; @@ -214,7 +262,7 @@ $set_background_command EOF my @cc=exists $ENV{CC} ? possibly_foolish_untaint($ENV{CC}) : 'cc'; - push @cc, possibly_foolish_untaint($ENV{CFLAGS}) if exists $ENV{CFLAGS}; + push @cc, split(' ', possibly_foolish_untaint($ENV{CFLAGS})) if exists $ENV{CFLAGS}; if (system(@cc, "$wrapper.c", "-o", "$wrapper.new") != 0) { #translators: The parameter is a C filename. error(sprintf(gettext("failed to compile %s"), "$wrapper.c")); diff --git a/Makefile.PL b/Makefile.PL index ef29a950c..ad3e4623c 100755 --- a/Makefile.PL +++ b/Makefile.PL @@ -75,7 +75,7 @@ underlay_install: install -d $(DESTDIR)$(PREFIX)/share/ikiwiki for dir in `cd underlays && $(FIND) . -follow -type d`; do \ install -d $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir; \ - for file in `$(FIND) underlays/$$dir -follow -maxdepth 1 -type f ! -name \\*.full.js ! -name \\*.full.css`; do \ + for file in `$(FIND) underlays/$$dir -follow -maxdepth 1 -type f ! -name jquery.js ! -name jquery-ui.css ! -name jquery-ui.js ! -name jquery.tmpl.js`; do \ cp -pRL $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir 2>/dev/null || \ install -m 644 $$file $(DESTDIR)$(PREFIX)/share/ikiwiki/$$dir; \ done; \ @@ -96,7 +96,7 @@ underlay_install: install -d $(DESTDIR)$(PREFIX)/share/ikiwiki/$$theme; \ for file in $$theme/*; do \ if echo "$$file" | grep -q style.css; then \ - (cat doc/style.css; cat $$theme/base.css 2>/dev/null; cat $$file) >> $(DESTDIR)$(PREFIX)/share/ikiwiki/$$theme/style.css; \ + (cat doc/style.css; cat $$theme/base.css 2>/dev/null; cat $$file) > $(DESTDIR)$(PREFIX)/share/ikiwiki/$$theme/style.css; \ elif echo "$$file" | grep -q base.css; then \ :; \ elif [ -f "$$file" ]; then \ @@ -187,5 +187,6 @@ WriteMakefile( 'HTML::Parser' => "0", 'URI' => "0", 'Data::Dumper' => "2.11", + 'YAML::XS' => "0", }, ); diff --git a/auto-blog.setup b/auto-blog.setup index 0eb83ded6..5617daf9e 100644 --- a/auto-blog.setup +++ b/auto-blog.setup @@ -36,7 +36,7 @@ IkiWiki::Setup::Automator->import( cgiurl => "http://$domain/~$ENV{USER}/$wikiname_short/ikiwiki.cgi", cgi_wrapper => "$ENV{HOME}/public_html/$wikiname_short/ikiwiki.cgi", adminemail => "$ENV{USER}\@$domain", - add_plugins => [qw{goodstuff websetup comments blogspam calendar sidebar}], + add_plugins => [qw{goodstuff websetup comments blogspam calendar sidebar trail}], disable_plugins => [qw{}], libdir => "$ENV{HOME}/.ikiwiki", rss => 1, diff --git a/debian/changelog b/debian/changelog index d6b71eaa3..e2b5ac523 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,20 +1,333 @@ -ikiwiki (3.20120203) UNRELEASED; urgency=low +ikiwiki (3.20140912) UNRELEASED; urgency=medium + + * Don't double-decode CGI submissions with Encode.pm >= 2.53, + fixing "Error: Cannot decode string with wide characters". + Thanks, Antoine Beaupré + * Avoid making trails depend on everything in the wiki by giving them + a better way to sort the pages + * Don't let users post comments that won't be displayed + * Fix encoding of Unicode strings in Python plugins. + Thanks, chrysn + * Improve performance and correctness of the [[!if]] directive + * Let [[!inline rootpage=foo postform=no]] disable the posting form + * Switch default [[!man]] shortcut to manpages.debian.org. Closes: #700322 + + -- Simon McVittie Fri, 12 Sep 2014 21:23:58 +0100 + +ikiwiki (3.20140831) unstable; urgency=medium + + * Make --no-gettime work in initial build. Closes: #755075 + + -- Joey Hess Sun, 31 Aug 2014 14:17:24 -0700 + +ikiwiki (3.20140815) unstable; urgency=medium + + * Add google back to openid selector. Apparently this has gotten a stay + of execution until April 2015. (It may continue to work until 2017.) + * highlight: Add compatibility with highlight 3.18, while still supporting + 3.9+. Closes: #757679 + Thanks, David Bremner + * highlight: Add support for multiple language definition directories + Closes: #757680 + Thanks, David Bremner + + -- Joey Hess Fri, 15 Aug 2014 12:58:08 -0400 + +ikiwiki (3.20140613) unstable; urgency=medium + + * only_committed_changes could fail in a git repository merged + with git merge -s ours. + * Remove google from openid selector, per http://xkcd.com/1361/ + + -- Joey Hess Fri, 13 Jun 2014 10:09:10 -0400 + +ikiwiki (3.20140227) unstable; urgency=medium + + * Added useragent config setting. Closes: #737121 + Thanks, Tuomas Jormola + * po: Add html_lang_code and html_lang_dir template variables + for the language code and direction of text. + Thanks, Mesar Hameed + * Allow up to 8 levels of nested directives, rather than previous 3 + in directive infinite loop guard. + * git diffurl: Do not escape / in paths to changed files, in order to + interoperate with cgit (gitweb works either way) + Thanks, intrigeri. + * git: Explicity push master branch, as will be needed by git 2.0's + change to push.default=matching by default. + Thanks, smcv + * Deal with nasty issue with gettext clobbering $@ while printing + error message containing it. + Thanks, smcv + * Cleanup of the openid login widget, including replacing of hotlinked + images from openid providers with embedded, freely licensed artwork. + Thanks, smcv + * Improve templates testing. + Thanks, smcv + * python proxy: Avoid utf-8 related crash. + Thanks, Antoine Beaupré + * Special thanks to Simon McVittie for being the patchmeister for this + release. + + -- Joey Hess Thu, 27 Feb 2014 11:55:35 -0400 + +ikiwiki (3.20140125) unstable; urgency=medium + + * inline: Allow overriding the title of the feed. Closes: #735123 + Thanks, Christophe Rhodes + * osm: Escape name parameter. Closes: #731797 + + -- Joey Hess Sat, 25 Jan 2014 16:40:32 -0400 +ikiwiki (3.20140102) unstable; urgency=low + + * aggregate: Improve display of post author. + * poll: Fix behavior of poll buttons when inlined. + * Fixed unncessary tight loop hash copy in saveindex where a pointer + can be used instead. Can speed up refreshes by nearly 50% in some + circumstances. + * Optimized loadindex by caching the page name in the index. + * Added only_committed_changes config setting, which speeds up wiki + refresh by querying git to find the files that were changed, rather + than looking at the work tree. Not enabled by default as it can + break some setups where not all files get committed to git. + * comments: Write pending moderation comments to the transient underlay + to avoid conflict with only_committed_changes. + * search: Added google_search option, which makes it search google + rather than using the internal xapain database. + (googlesearch plugin is too hard to turn on when xapain databases + corrupt themselves, which happens all too frequently). + * osm: Remove invalid use of charset on embedded javascript tags. + Closes: #731197 + * style.css: Add compatibility definitions for more block-level + html5 elements. Closes: #731199 + * aggregrate: Fix several bugs in handling of empty and colliding + titles when generating filenames. + + -- Joey Hess Thu, 02 Jan 2014 12:22:22 -0400 + +ikiwiki (3.20130904.1) unstable; urgency=low + + * Fix cookiejar default setting. + + -- Joey Hess Wed, 04 Sep 2013 10:15:37 -0400 + +ikiwiki (3.20130904) unstable; urgency=low + + * calendar: Display the popup mouseover when there is only 1 page for a + given day, for better UI consistency. + * meta: Can now be used to add an enclosure to a page, which is a fancier + way to do podcasting than just inlining the media files directly; + this way you can write a post about the podcast episode with show notes, + author information, etc. + (schmonz) + * aggregate: Show author in addition to feedname, if different. + (schmonz) + * Consistently configure LWP::UserAgent to allow use of http_proxy + and no_proxy environment variables, as well as ~/.ikiwiki/cookies + (schmonz) + * Fix test suite to work with perl 5.18. Closes: #719969 + + -- Joey Hess Wed, 04 Sep 2013 08:54:31 -0400 + +ikiwiki (3.20130711) unstable; urgency=low + + * Deal with git behavior change in 1.7.2 and newer that broke support + for commits with an empty commit message. + * Pass --no-edit when used with git 1.7.8 and newer. + + -- Joey Hess Wed, 10 Jul 2013 21:49:23 -0400 + +ikiwiki (3.20130710) unstable; urgency=low + + * blogspam: Fix encoding issue in RPC::XML call. + Thanks, Changaco + * comments: The formats allowed to be used in comments can be configured + using comments_allowformats. + Thanks, Michal Sojka + * calendar: When there are multiple pages for a given day, they're + displayed in a popup on mouseover. + Thanks, Louis + * osm: Remove trailing slash from KML maps icon. + * page.tmpl: omit searchform, trails, sidebar and most metadata in CGI + (smcv) + * openid: Automatically upgrade openid_realm to https when + accessed via https. + * The ip() pagespec can now contain glob characters to match eg, a subnet + full of spammers. + * Fix crash that could occur when a needsbuild hook returned a file + that does not exist. + * Fix python proxy to not crash when fed unicode data in getstate + and setstate. + Thanks, chrysn + * Fix committing attachments when using svn. + + -- Joey Hess Wed, 10 Jul 2013 17:45:40 -0400 + +ikiwiki (3.20130518) unstable; urgency=low + + * Fix test suite to not fail when XML::Twig is not installed. + Closes: #707436 + * theme: Now can be used in all templates when + a theme is enabled. + * notifyemail: Fix bug that caused duplicate emails to be sent when + site was rebuilt. + * bzr: bzr rm no longer has a --force option, remove + + -- Joey Hess Sat, 18 May 2013 16:28:21 -0400 + +ikiwiki (3.20130504) unstable; urgency=low + + * Allow dots in directive parameter names. (tango) + * Add missing plugin section, and deal with missing sections with a warning. + * Detect plugins with a broken getsetup and warn. + * map: Correct reversion introduced in version 3.20110225 that could + generate invalid html. (smcv) + * Makefile.PL: overwrite theme style.css instead of appending + (Thanks, Mikko Rapeli) + * meta: Fix anchors used to link to the page's license and copyright. + Closes: #706437 + + -- Joey Hess Sat, 04 May 2013 23:47:21 -0400 + +ikiwiki (3.20130212) unstable; urgency=low + + * htmlscrubber: Allow the bitcoin URI scheme. + * htmlscrubber: Allow the URI schemes of major VCS's. + * aggregate: When run with --aggregate, if an aggregation is already + running, don't go on and --refresh. + * trail: Avoid excess dependencies between pages in the trail + and the page defining the trail. Thanks, smcv. + * opendiscussion: Don't allow editing discussion pages if discussion pages + are disabled. (smcv) + * poll: Add expandable option to allow users to easily add new choices to + a poll. + * trail: Avoid massive slowdown caused by pagetemplate hook when displaying + dynamic cgi pages, which cannot use trail anyway. + * Deal with empty diffurl in configuration. + * cvs: Various fixes. (schmonz) + * highlight: Now adds a span with class highlight- around + highlighted content, allowing for language-specific css styling. + + -- Joey Hess Tue, 12 Feb 2013 21:48:02 -0400 + +ikiwiki (3.20121212) unstable; urgency=low + + * filecheck: Fix bug that prevented File::MimeInfo::Magic from ever + being used. + * openid: Display openid in Preferences page as a comment, so it can be + selected in all browsers. + + -- Joey Hess Tue, 11 Dec 2012 12:12:12 -0400 + +ikiwiki (3.20121017) unstable; urgency=low + + * recentchangesdiff: fix further breakage to the template from 3.20120725 + + -- Joey Hess Tue, 16 Oct 2012 20:49:27 -0400 + +ikiwiki (3.20121016) unstable; urgency=low + + * monochrome: New theme, contributed by Jon Dowland. + * rst: Ported to python 3, while still also being valid python 2. + Thanks, W. Trevor King + * Try to avoid a situation in which so many ikiwiki cgi wrapper programs + are running, all waiting on some long-running thing like a site rebuild, + that it prevents the web server from doing anything else. The current + approach only avoids this problem for GET requests; if multiple cgi's + run GETs on a site at the same time, one will display a "please wait" + page for a configurable number of seconds, which then redirects to retry. + To enable this protection, set cgi_overload_delay to the number of + seconds to wait. This is not enabled by default. + * Add back a 1em margin between archivepage divs. + * recentchangesdiff: Correct broken template that resulted in duplicate + diff icons being displayed, and bloated the recentchanges page with + inline diffs when the configuration should have not allowed them. + + -- Joey Hess Tue, 16 Oct 2012 15:14:19 -0400 + +ikiwiki (3.20120725) unstable; urgency=low + + * recentchangesdiff: When diffurl is not set, provide inline diffs + in the recentchanges page, with visibility toggleable via javascript. + Thanks, Antoine Beaupré + * Split CFLAGS into words when building wrapper. Closes: #682237 + * osm: Avoid calling urlto before generated files are registered. + Thanks, Philippe Gauthier and Antoine Beaupré + * osm: Add osm_openlayers_url configuration setting. + Thanks, Genevieve + * osm: osm_layers can be used to configured the layers displayed on the map. + Thanks, Antoine Beaupré + * comments: Remove ipv6 address specific code. + + -- Joey Hess Sat, 25 Aug 2012 10:58:42 -0400 + +ikiwiki (3.20120629) unstable; urgency=low + + * mirrorlist: Add mirrorlist_use_cgi setting that avoids usedirs or + other config differences by linking to the mirror's CGI. (intrigeri) + + -- Joey Hess Fri, 29 Jun 2012 10:16:08 -0400 + +ikiwiki (3.20120516) unstable; urgency=high + + * meta: Security fix; add missing sanitization of author and authorurl. + CVE-2012-0220 Thanks, Raúl Benencia + + -- Joey Hess Wed, 16 May 2012 19:51:27 -0400 + +ikiwiki (3.20120419) unstable; urgency=low + + * Remove dead link from plugins/teximg. Closes: #664885 + * inline: When the pagenames list includes pages that do not exist, skip + them. + * meta: Export author information in html tag. Closes: #664779 + Thanks, Martin Michlmayr + * notifyemail: New plugin, sends email notifications about new and + changed pages, and allows subscribing to comments. + * Added a "changes" hook. Renamed the "change" hook to "rendered", but + the old hook name is called for now for back-compat. + * meta: Support keywords header. Closes: #664780 + Thanks, Martin Michlmayr + * passwordauth: Fix url in password recovery email to be absolute. + * httpauth: When it's the only auth method, avoid a pointless and + confusing signin form, and go right to the httpauthurl. + * rename: Allow rename to be started not from the edit page; return to + the renamed page in this case. + * remove: Support removing of pages in the transient underlay. (smcv) + * inline, trail: The pagenames parameter is now a list of absolute + pagenames, not relative wikilink type names. This is necessary to fix + a bug, and makes pagenames more consistent with the pagespec used + in the pages parameter. (smcv) + * link: Fix renaming wikilinks that contain embedded urls. + * graphviz: Handle self-links. + * trail: Improve CSS, also display trail links at bottom of page, + and a bug fix. (smcv) + + -- Joey Hess Thu, 19 Apr 2012 15:32:07 -0400 + +ikiwiki (3.20120319) unstable; urgency=low + + * osm: New plugin to embed an OpenStreetMap into a wiki page. + Supports waypoints, tags, and can even draw paths matching + wikilinks between pages containing waypoints. + Thanks to Blars Blarson and Antoine Beaupré, as well as the worldwide + OpenStreetMap community for this utter awesomeness. + * trail: New plugin to add navigation trails through pages via Next and + Previous links. Trails can easily be added to existing inlines by setting + trail=yes in the inline. + Thanks to Simon McVittie for his persistance developing this feature. * Fix a snail mail address. Closes: #659158 * openid-jquery.js: Update URL of Wordpress favicon. Closes: #660549 * Drop the version attribute on the generator tag in Atom feeds to make builds more reproducible. Closes: #661569 (Paul Wise) * shortcut: Support Wikipedia's form of url-encoding for unicode characters, which involves mojibake. Closes: #661198 - * osm: New plugin to embed an OpenStreetMap into a wiki page. - Supports waypoints, tags, and can even draw paths matching - wikilinks between pages containing waypoints. - Thanks to Blars Blarson and Antoine Beaupré, as well as the worldwide - OpenStreetMap community for this utter awesomeness. * Add a few missing jquery UI icons to attachment upload widget underlay. * URI escape filename when generating the diffurl. + * Add build-affected hook. Used by trail. - -- Joey Hess Wed, 08 Feb 2012 16:07:00 -0400 + -- Joey Hess Mon, 19 Mar 2012 14:24:43 -0400 ikiwiki (3.20120202) unstable; urgency=low diff --git a/debian/compat b/debian/compat index 7f8f011eb..ec635144f 100644 --- a/debian/compat +++ b/debian/compat @@ -1 +1 @@ -7 +9 diff --git a/debian/control b/debian/control index 54cbd0f7b..2ab6207e0 100644 --- a/debian/control +++ b/debian/control @@ -1,22 +1,23 @@ Source: ikiwiki Section: web Priority: optional -Build-Depends: perl, debhelper (>= 7.0.50) +Build-Depends: perl, debhelper (>= 9) Build-Depends-Indep: dpkg-dev (>= 1.9.0), libxml-simple-perl, libtext-markdown-discount-perl, libtimedate-perl, libhtml-template-perl, libhtml-scrubber-perl, wdg-html-validator, libhtml-parser-perl, liburi-perl (>= 1.36), perlmagick, po4a (>= 0.34), - libfile-chdir-perl, libyaml-libyaml-perl, python-support + libfile-chdir-perl, libyaml-libyaml-perl, python-support, librpc-xml-perl, + libcgi-session-perl Maintainer: Joey Hess Uploaders: Josh Triplett -Standards-Version: 3.9.2 +Standards-Version: 3.9.5 Homepage: http://ikiwiki.info/ Vcs-Git: git://git.ikiwiki.info/ Package: ikiwiki Architecture: all -Depends: ${misc:Depends}, ${perl:Depends}, ${python:Depends}, +Depends: ${misc:Depends}, ${perl:Depends}, libtext-markdown-discount-perl, libhtml-scrubber-perl, libhtml-template-perl, libhtml-parser-perl, liburi-perl (>= 1.36), libyaml-libyaml-perl, libjson-perl @@ -45,8 +46,8 @@ Provides: ikiwiki-plugin-table Description: a wiki compiler Ikiwiki converts a directory full of wiki pages into HTML pages suitable for publishing on a website. Unlike many wikis, ikiwiki does not have its - own ad-hoc means of storing page history, and instead uses a revision control - system such as Subversion or Git. + own ad-hoc means of storing page history, and instead uses a revision + control system such as Subversion or Git. . Ikiwiki implements all of the other standard features of a wiki, including web-based page editing, user registration and logins, a RecentChanges diff --git a/debian/copyright b/debian/copyright index 8fddb682b..f3ea9e740 100644 --- a/debian/copyright +++ b/debian/copyright @@ -157,6 +157,10 @@ Files: IkiWiki/Plugin/osm.pm Copyright: © 2011 Blars Blarson, Antoine Beaupré License: GPL-2 +Files: IkiWiki/Plugin/trail.pm +Copyright: 2009-2012 Simon McVittie +License: GPL-2+ + Files: doc/logo/* Copyright: © 2006 Recai Oktaş License: GPL-2+ @@ -207,6 +211,34 @@ Comment: From http://code.google.com/p/openid-selector/ License: BSD-2-clause +Files: underlays/openid-selector/ikiwiki/openid/goa-* +Copyright: + © 2011 Red Hat, Inc. +License: LGPL-2.1+ +Comment: + taken from data/icons/16x16/ in gnome-online-accounts git + +Files: underlays/openid-selector/ikiwiki/openid/wordpress.png +Copyright: + © 2003-2013 "the contributors" +License: GPL-2+ +Comment: + taken from wp-admin/images/w-logo-blue.png in wordpress/3.8.1+dfsg1-1 + and trivially modified (resized to 16x16, placed on a white background) + +Files: + icons/aol.svg + icons/livejournal.svg + icons/verisign.svg + underlays/openid-selector/ikiwiki/openid/aol.png + underlays/openid-selector/ikiwiki/openid/livejournal.png + underlays/openid-selector/ikiwiki/openid/verisign.png +Copyright: + © 2014 Simon McVittie +License: other + Redistribution and use in source and compiled forms, with or without + modification, are permitted under any circumstances. No warranty. + Files: underlays/jquery/* Copyright: © 2005-2011 by John Resig, Branden Aaron & Jörn Zaefferer © 2011 The Dojo Foundation @@ -244,6 +276,10 @@ Files: underlays/themes/goldtype/* Copyright: © Lars Wirzenius License: GPL-2+ +Files: underlays/themes/monochrome/* +Copyright: © 2012 Jon Dowland +License: GPL-2+ + License: BSD-2-clause Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions @@ -293,3 +329,20 @@ License: Expat LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +License: LGPL-2.1+ + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + . + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + . + You should have received a copy of the GNU Lesser General Public License + along with this program. If not, see . + . + On Debian systems, the complete text of the GNU Lesser General + Public License can be found in `/usr/share/common-licenses/LGPL-2.1'. diff --git a/doc/anchor.mdwn b/doc/anchor.mdwn index 012e52fa0..12d193fe9 100644 --- a/doc/anchor.mdwn +++ b/doc/anchor.mdwn @@ -1,3 +1,11 @@ ikiwiki works with anchors in various situations. +You can insert anchors directly in the body of a page and it will be used on the resulting HTML, for example: + + + +... will make the link [[anchor#anchor]] work.. + + + This page accumulates links to the concept of anchors. diff --git a/doc/basewiki/sandbox.mdwn b/doc/basewiki/sandbox.mdwn index e76bdb8d1..c66534fc2 100644 --- a/doc/basewiki/sandbox.mdwn +++ b/doc/basewiki/sandbox.mdwn @@ -30,5 +30,3 @@ Bulleted list * item [[ikiwiki/WikiLink]] - -[[!calendar type="month" pages="blog/*"]] diff --git a/doc/branches.mdwn b/doc/branches.mdwn index b7b9164ac..232f2ce6a 100644 --- a/doc/branches.mdwn +++ b/doc/branches.mdwn @@ -20,6 +20,6 @@ Long-lived branches in the main git repository: * `ignore` gets various branches merged to it that [[Joey]] wishes to ignore when looking at everyone's unmerged changes. * `pristine-tar` contains deltas that - [pristine-tar](http://kitenet.net/~joey/code/pristine-tar) + [pristine-tar](http://joeyh.name/code/pristine-tar) can use to recreate released tarballs of ikiwiki * `setup` contains the ikiwiki.setup file for this site diff --git a/doc/bugs/Attachment_plug-in_not_committing_files.mdwn b/doc/bugs/Attachment_plug-in_not_committing_files.mdwn new file mode 100644 index 000000000..aaba13326 --- /dev/null +++ b/doc/bugs/Attachment_plug-in_not_committing_files.mdwn @@ -0,0 +1,18 @@ +I've added the attachment plug-in to our wiki. I am able to add files to the working copy of the website on the server, but none of the file are being checked into the SVN repository. Using logging I've tracked the problem to line 293 of attachment.pm: + + IkiWiki::rcs_add($_) foreach @attachments; + +Here it is trying to add an absolute path to the file when rcs_add is expecting a path relative to the SVN root. + +From this code it looks like $dest needs to be absolute and that a relative path needs to be pushed to @attachments: + + rename($filename, $dest); + push @attachments, $dest; + +I'm using ikiwiki version 3.20120202ubuntu1. + +> I don't think this affects git, just because it happens to +> allow adding with an absolute path. +> +> So, this is an interesting way svn support can bit rot if nothing +> is testing it! [[fixed|done]] --[[Joey]] diff --git a/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn b/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn new file mode 100644 index 000000000..81a5abf28 --- /dev/null +++ b/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn @@ -0,0 +1,28 @@ +If you wish to install ikiwiki in your home directory (for example because you don't have root access), you need to set environment variables (such as PATH and PERL5LIB) to point to these directories that contain your personal copy of IkiWiki. + +The CGI wrapper remembers PATH, but not the environment variable PERL5LIB. Consequently, it will look for plugins and so on in the usual system directories, not in your personal copy. This is particularly insidious if you have a system copy of a different version installed, as your CGI wrapper may then load in code from this version. + +I think the CGI wrapper should remember PERL5LIB too. + +-- Martin + +Thank's a lot for pointing me to this location in the code. I was looking it for some time. + +This brutal patch implement your solution as a temporary fix. + + *** Wrapper.pm.old 2012-08-25 16:41:41.000000000 +0200 + --- Wrapper.pm 2012-10-01 17:33:17.582956524 +0200 + *************** + *** 149,154 **** + --- 149,155 ---- + $envsave + newenviron[i++]="HOME=$ENV{HOME}"; + newenviron[i++]="PATH=$ENV{PATH}"; + + newenviron[i++]="PERL5LIB=$ENV{PERL5LIB}"; + newenviron[i++]="WRAPPED_OPTIONS=$configstring"; + + #ifdef __TINYC__ + +As I am not sure that remembering `PERL5LIB` is a good idea, I think that a prettier solution will be to add a config variable (let's say `cgi_wrapper_perllib`) which, if fixed, contains the `PERL5LIB` value to include in the wrapper, or another (let's say `cgi_wrapper_remember_libdir`), which, if fixed, remember the current `PERL5LIB`. + +-- Bruno diff --git a/doc/bugs/CamelCase_and_Recent_Changes_create_spurious_Links.mdwn b/doc/bugs/CamelCase_and_Recent_Changes_create_spurious_Links.mdwn new file mode 100644 index 000000000..de95fb7d3 --- /dev/null +++ b/doc/bugs/CamelCase_and_Recent_Changes_create_spurious_Links.mdwn @@ -0,0 +1,11 @@ +Hi folks, + +This is a fairly fresh wiki. I recently noticed the Links: section the the bottom looked like this: + +Links: index recentchanges/change 0b2f03d3d21a3bb21f6de75d8711c73df227e17c recentchanges/change 1c5b830b15c4f2f0cc97ecc0adfd60a1f1578918 recentchanges/change 20b20b91b90b28cdf2563eb959a733c6dfebea7a recentchanges/change 3377cedd66380ed416f59076d69f546bf12ae1e4 recentchanges/change 4c53d778870ea368931e7df2a40ea67d00130202 recentchanges/change 7a9f3c441a9ec7e189c9df322851afa21fd8b00c recentchanges/change 7dcaea1be47308ee27a18f893ff232a8370e348a recentchanges/change 963245d4e127159e12da436dea30941ec371c6be recentchanges/change cd489ff4abde8dd611f7e42596b93953b38b9e1c ... + +All of those "recentchanges/ change xxxxxxx" links are clickable, but all yield 404 when clicked. + +When I disable the CamelCase plugin and rebuild the wiki, all the Links other than index disappear, as they should. Re-enable CamelCase, and they're back. + +This is a very simple wiki. Just fresh, only one page other than index (this one), and nothing at all fancy/weird about it. diff --git a/doc/bugs/Error:_OpenID_failure:_time_bad_sig:.mdwn b/doc/bugs/Error:_OpenID_failure:_time_bad_sig:.mdwn index 2fa4a4759..bc46302b0 100644 --- a/doc/bugs/Error:_OpenID_failure:_time_bad_sig:.mdwn +++ b/doc/bugs/Error:_OpenID_failure:_time_bad_sig:.mdwn @@ -81,3 +81,7 @@ Please tell me if you need more info. The same openid worked fine to login to * >>>>> Investigation revealed it was a bug in the freebsd patch, which I >>>>> understand is going to be dealt with. [[done]] --[[Joey]] + +I am getting the same error here with ikiwiki 3.20120629 (wheezy). I had trouble with ikiwiki-hosting configurations of OpenID, basically related to the `openid_realm` parameter - which I had to comment out. But now it seems to fail regardless. --[[anarcat]] + +> Nevermind, this was because I was blocking cookie on the CGI (!!). Message *could* be improved though, it's not the first time i stumble upon this... --[[anarcat]] diff --git a/doc/bugs/Error:_no_text_was_copied_in_this_page_--_missing_page_dependencies.mdwn b/doc/bugs/Error:_no_text_was_copied_in_this_page_--_missing_page_dependencies.mdwn index 0082eed4d..0bbf6096f 100644 --- a/doc/bugs/Error:_no_text_was_copied_in_this_page_--_missing_page_dependencies.mdwn +++ b/doc/bugs/Error:_no_text_was_copied_in_this_page_--_missing_page_dependencies.mdwn @@ -2,7 +2,7 @@ That one has bitten me for some time; here is the minimal testcase. There is also an equivalent (I suppose) problem when using another plugin, but I hope it's enough to track it down for this one. - $ tar -xj < [bug-dep_order.tar.bz2](http://schwinge.homeip.net/~thomas/tmp/bug-dep_order.tar.bz2) + $ tar -xj < [bug-dep_order.tar.bz2](http://nic-nac-project.de/~schwinge/ikiwiki/bug-dep_order.tar.bz2) $ cd bug-dep_order/ $ ./render_locally [...] diff --git a/doc/bugs/Existing_Discussion_pages_appear_as_non-existing.mdwn b/doc/bugs/Existing_Discussion_pages_appear_as_non-existing.mdwn new file mode 100644 index 000000000..9ba4ede6e --- /dev/null +++ b/doc/bugs/Existing_Discussion_pages_appear_as_non-existing.mdwn @@ -0,0 +1,5 @@ +If you look at [[todo/org mode]], the link to the Discussion page is not there (has a question mark), as if it didn't exist. But--through the search--I discovered that the Discussion page does exist actually: [[todo/org mode/Discussion]]. + +So, there is a bug that prevents a link to the existing Discussion page from appearing in the correct way on the corresponding main page. --Ivan Z. + +Perhaps, this has something to do with the same piece of code/logic (concerning case-sensitivity) as the fixed [[bugs/unwanted discussion links on discussion pages]]? --Ivan Z. diff --git a/doc/bugs/FormattingHelp_links_to_MarkDown_help_page_regardless_of_page_format.mdwn b/doc/bugs/FormattingHelp_links_to_MarkDown_help_page_regardless_of_page_format.mdwn new file mode 100644 index 000000000..04e7e9078 --- /dev/null +++ b/doc/bugs/FormattingHelp_links_to_MarkDown_help_page_regardless_of_page_format.mdwn @@ -0,0 +1,3 @@ +The `FormattingHelp` link in the edit form of any page points to the same [ikiwiki/formatting](/ikiwiki/formatting) help text for Markdown, regardless of page type (which could be HTML, reStructuredText, etc.) On the wiki I run, this is confusing users. + +What I would like is that either the `FormattingHelp` link changes with page type (requires Javascript, if one is going to change the page type for new pages), or that the [ikiwiki/formatting](/ikiwiki/formatting) page is an index of supported page types with a further link to help text for each one (less user-friendly but likely easier to implement). diff --git a/doc/bugs/Inlining_adds_newlines_which_can_break_markdown.mdwn b/doc/bugs/Inlining_adds_newlines_which_can_break_markdown.mdwn new file mode 100644 index 000000000..eb71994e5 --- /dev/null +++ b/doc/bugs/Inlining_adds_newlines_which_can_break_markdown.mdwn @@ -0,0 +1,43 @@ +I'm trying to put a list of tags in a table, so I carefully make a newline-free taglist.tmpl and then do: + + | \[[!inline pages="link(/category/env)" feeds=no archive=yes sort=title template=taglist]] | + +but there's a line in `inline.pm` that does: + + return "<div class=\"inline\" id=\"$#inline\"></div>\n\n"; + +And the extra newlines break the table. Can they be safely removed? + +> If you want an HTML table, I would suggest using an HTML table, which +> should pass through Markdown without being interpreted further: +> +> +> \[[!inline pages="link(/category/env)" feeds=no archive=yes sort=title template=tagtd]] +>
    +> +> where tagtd.tmpl is of the form `your markup here`; or even just +> +> \[[!inline pages="link(/category/env)" feeds=no archive=yes sort=title template=tagtable]] +> +> where tagtable.tmpl looks like +> +> +> +> +> +> +> +> +>
    your tag here
    +>
    +> +> I don't think you're deriving much benefit from Markdown's table syntax +> if you have to mix it with HTML::Template and ikiwiki directives, +> and be pathologically careful with whitespace. "Right tool for the job" +> and all that :-) +> +> When I edited this page I was amused to find that you used HTML, +> not Markdown, as its format. It seems oddly appropriate to my answer, but +> I've converted it to Markdown and adjusted the formatting, for easier +> commenting. +> --[[smcv]] diff --git a/doc/bugs/Linkmap_doesn__39__t_support_multiple_linkmaps_on_a_single_page.mdwn b/doc/bugs/Linkmap_doesn__39__t_support_multiple_linkmaps_on_a_single_page.mdwn new file mode 100644 index 000000000..a0645477e --- /dev/null +++ b/doc/bugs/Linkmap_doesn__39__t_support_multiple_linkmaps_on_a_single_page.mdwn @@ -0,0 +1,3 @@ +If I use the linkmap directive twice on a single page, I get the same image appearing in both locations, even though the parameters for the two directives may have been different. + +-- Martin diff --git a/doc/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting.mdwn b/doc/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting.mdwn new file mode 100644 index 000000000..4c7b12e8c --- /dev/null +++ b/doc/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting.mdwn @@ -0,0 +1,11 @@ +Say you are commenting on this report. The Navbar on top will look like + +[ikiwiki](http://ikiwiki.info/)/ [bugs](http://ikiwiki.info/bugs/)/ commenting on Navbar does not link to page being commented on while commenting + +while either of those two options would be better: + +[ikiwiki](http://ikiwiki.info/)/ [bugs](http://ikiwiki.info/bugs/)/ commenting on [Navbar does not link to page being commented on while commenting](http://ikiwiki.info/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting/) + +[ikiwiki](http://ikiwiki.info/)/ [bugs](http://ikiwiki.info/bugs/)/ [Navbar does not link to page being commented on while commenting](http://ikiwiki.info/bugs/Navbar_does_not_link_to_page_being_commented_on_while_commenting/) / New comment + +-- RichiH diff --git a/doc/bugs/Please_update_highlight_plugin_for_highlight_3.18.mdwn b/doc/bugs/Please_update_highlight_plugin_for_highlight_3.18.mdwn new file mode 100644 index 000000000..e98f66881 --- /dev/null +++ b/doc/bugs/Please_update_highlight_plugin_for_highlight_3.18.mdwn @@ -0,0 +1,12 @@ +I have put two patches + + git://pivot.cs.unb.ca/ikiwiki.git -b master + +The first works around a highlight API change, and the second supports the new(ish) +feature of having multiple directories with language defintions for highlight. + +The corresponding version of libhighlight-perl is in Debian experimental if you want to test. + +[[!tag patch]] + +> [[done]] thanks --[[Joey]] diff --git a/doc/bugs/Remove_redirect_pages_from_inline_pages.mdwn b/doc/bugs/Remove_redirect_pages_from_inline_pages.mdwn new file mode 100644 index 000000000..a43bd408f --- /dev/null +++ b/doc/bugs/Remove_redirect_pages_from_inline_pages.mdwn @@ -0,0 +1,15 @@ +[[!tag bugs wishlist]] + + +I accidentally made a typo spelling "surprises" and changed my URL from + + +to + + +Using the meta redir. However the meta redir now appears in the index of + +Any ideas how to handle this situation? + +> Well, you can adjust the inline's pagespec to exclude it, or even tag it +> with a tag that the pagespec is adjusted to exclude. --[[Joey]] diff --git a/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__.mdwn b/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__.mdwn new file mode 100644 index 000000000..e93f4e546 --- /dev/null +++ b/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__.mdwn @@ -0,0 +1,47 @@ +Saving a wiki page in ikwiki or +ikiwiki --setup wiki.setup --rebuild takes a **dozen minutes** on a tiny tiny wiki (10 user-added pages)! + +I profiled ikiwiki with [[!cpan Devel::SmallProf]] : see [[users/mathdesc]] for details. + +And I came to the conclusion that [[plugins/filecheck]] on attachment was the only cause. +It always go the fallback code using time-consuming file even there it's look like it's +not successful. + +
    + # Get the mime type.
    +        #
    +        # First, try File::Mimeinfo. This is fast, but doesn't recognise
    +        # all files.
    +        eval q{use File::MimeInfo::Magic};                    
    +        my $mimeinfo_ok=! $@;                                     
    +        my $mimetype;
    +        if ($mimeinfo_ok) {
    +                my $mimetype=File::MimeInfo::Magic::magic($file);
    +        }                                                         
    +        
    +        # Fall back to using file, which has a more complete
    +        # magic database.
    +        if (! defined $mimetype) {
    +                open(my $file_h, "-|", "file", "-bi", $file); 
    +                $mimetype=<$file_h>;                                 
    +                chomp $mimetype;                            
    +                close $file_h;                   
    +        }
    +        if (! defined $mimetype || $mimetype !~s /;.*//) {
    +                # Fall back to default value.
    +                $mimetype=File::MimeInfo::Magic::default($file)
    +                        if $mimeinfo_ok; 
    +                if (! defined $mimetype) {
    +                        $mimetype="unknown";
    +                }                                                  
    +        }        
    +
    + +I found on [[plugins/filecheck/discussion/]] what [[users/DavidBremner/]] described as : +> no way to detect text/plain using File::MimeInfo::Magic::magic() +But I can't figure out if my issue is boarder and includes this or not.. + +Any ideas , solve :) more that welcome. + +> [[done]], as isbear noted in [[discussion]], there was a bug that +> prevented File::MimeInfo::Magic from ever being used. --[[Joey]] diff --git a/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__/discussion.mdwn b/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__/discussion.mdwn new file mode 100644 index 000000000..629aba71e --- /dev/null +++ b/doc/bugs/Slow_Filecheck_attachments___34__snails_it_all__34__/discussion.mdwn @@ -0,0 +1,141 @@ +##Foreword : +Disabling of filecheck is not actually possible because btw it cause the attachment.pm to malfunction and +any of pagespec that could contain a *mimetype* condition. + +attachment.pm imports "statically" filecheck so actually disabling it should be *interdicted* . + +
    +sub import {
    +        add_underlay("attachment");
    +        add_underlay("javascript");
    +        add_underlay("jquery");
    +        hook(type => "getsetup", id => "attachment", call => \&getsetup);
    +        hook(type => "checkconfig", id => "attachment", call => \&checkconfig);
    +        hook(type => "formbuilder_setup", id => "attachment", call => \&formbuilder_setup);
    +        hook(type => "formbuilder", id => "attachment", call => \&formbuilder, last => 1);
    +        IkiWiki::loadplugin("filecheck");
    +}
    +
    + +---- + +## How bad is it ? + +So I tried on three pages to inline !mimetype(image/*) while I allowed attachment of mimetype(image/*) + +My profiling tests in the bug report shows that most of the time is spend in the "Fallback using file" block code, +I tried to comment that block and see how it'll perform. Obviously this is much much faster ... but is the mimetype +discovered using only *File::MimeInfo* ? + + +Dumping some strings before return to STDERR, rebuilding . This is just a [[!toggle id="code-test" text="dumpdebug adding"]] + +[[!toggleable id="code-test" text=""" +
    +sub match_mimetype ($$;@) {
    +        my $page=shift;
    +        my $wanted=shift;
    +
    +        my %params=@_;
    +        my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page});
    +        if (! defined $file) {
    +                return IkiWiki::ErrorReason->new("file does not exist");
    +        }
    +
    +        # Get the mime type.
    +        #
    +        # First, try File::Mimeinfo. This is fast, but doesn't recognise
    +        # all files.
    +        eval q{use File::MimeInfo::Magic};
    +        my $mimeinfo_ok=! $@;
    +        my $mimetype;
    +        print STDERR " --- match_mimetype (".$file.")\n";
    +        if ($mimeinfo_ok) {
    +                my $mimetype=File::MimeInfo::Magic::magic($file);
    +        }
    +
    +        # Fall back to using file, which has a more complete
    +        # magic database.
    +        #if (! defined $mimetype) {
    +        #       open(my $file_h, "-|", "file", "-bi", $file);
    +        #       $mimetype=<$file_h>;
    +        #       chomp $mimetype;
    +        #       close $file_h;
    +        #}
    +
    +        if (! defined $mimetype || $mimetype !~s /;.*//) {
    +                # Fall back to default value.
    +                $mimetype=File::MimeInfo::Magic::default($file)
    +                        if $mimeinfo_ok;
    +                if (! defined $mimetype) {
    +                        $mimetype="unknown";
    +                }
    +        }
    +
    +        my $regexp=IkiWiki::glob2re($wanted);
    +        if ($mimetype!~$regexp) {
    +                 print STDERR " xxx MIME unknown ($mimetype - $wanted - $regexp ) \n";
    +                return IkiWiki::FailReason->new("file MIME type is $mimetype, not $wanted");
    +        }
    +        else {
    +                print STDERR " vvv MIME found\n";
    +                return IkiWiki::SuccessReason->new("file MIME type is $mimetype");
    +        }
    +}
    +
    +"""]] + +The results dump to stderr (or a file called... 'say *mime*) looks like this : +
    +--- match_mimetype (/usr/share/ikiwiki/attachment/ikiwiki/jquery.fileupload-ui.js)
    + xxx MIME unknown (text/plain - image/* - (?i-xsm:^image\/.*$) )
    + --- match_mimetype (/usr/share/ikiwiki/locale/fr/directives/ikiwiki/directive/fortune.mdwn)
    + xxx MIME unknown (text/plain - image/* - (?i-xsm:^image\/.*$) )
    + --- match_mimetype (/usr/share/ikiwiki/locale/fr/basewiki/shortcuts.mdwn)
    + xxx MIME unknown (text/plain - image/* - (?i-xsm:^image\/.*$) 
    + --- match_mimetype (/usr/share/ikiwiki/smiley/smileys/alert.png)
    + xxx MIME unknown (application/octet-stream - image/* - (?i-xsm:^image\/.*$) )
    + --- match_mimetype (/usr/share/ikiwiki/attachment/ikiwiki/images/ui-bg_flat_75_ffffff_40x100.png)
    + xxx MIME unknown (application/octet-stream - image/* - (?i-xsm:^image\/.*$) 
    +
    + +--- prepend signals the file on analysis
    +xxx prepend signals a returns failure : mime is unknown, the match is a failure
    +vvv prepend signals a return success.
    + + +This is nasty-scary results ! Something missed me or this mime-filecheck is plain nuts ? + +*Question 1* : How many files have been analysed : **3055** (yet on a tiny tiny wiki) +
    grep "^ --- " mime | wc -l
    +3055
    +
    + +*Question 2* : How many time it fails : *all the time* +
    + grep "^ xxx " mime | wc -l
    +3055
    +
    + +*Question 1bis* : Doh btw , how many files have been re-analysed ? ** 2835 ** OMG !! +
    grep "^ --- " mime | sort -u | wc -l
    +220
    +
    + +## Conclusion + +- Only the system command *file -bi* works. While it is **should** be easy on the cpu , it's also hard on the I/O -> VM :( +- Something nasty with the mime implementation and/or my system configuration -> Hints ? :D +- Need to cache during the rebuild : a same page needs not being rechecked for its mime while it's locked ! + + +--mathdesc + +> > if ($mimeinfo_ok) { +> > my $mimetype=File::MimeInfo::Magic::magic($file); +> > } +> +> That seems strange to me, `my` restricts scope of $mimetype to enclosing if block, thus, assigned value will be dropped - I think, it is the problem. +> Try removing that stray `my`. +> +> --isbear diff --git a/doc/bugs/Spurious___60__p__62___elements_added_to_tags_in_inliine_pages.mdwn b/doc/bugs/Spurious___60__p__62___elements_added_to_tags_in_inliine_pages.mdwn index e3b1d858d..15f74c497 100644 --- a/doc/bugs/Spurious___60__p__62___elements_added_to_tags_in_inliine_pages.mdwn +++ b/doc/bugs/Spurious___60__p__62___elements_added_to_tags_in_inliine_pages.mdwn @@ -41,3 +41,19 @@ A fix is to change inlinepage.tmpl to remove new lines around tag links, as foll > > I don't have the prerequisites for the syntax plugin installed here > to debug it myself. --[[Joey]] + +> I don't think that this is specific to the [[syntax_(3rd_party)_plugin|plugins/contrib/syntax]]. +> It's happening on my pages that just use ordinary templates. +> I've documented my versions below. --[[daveloyall]] +> +> ikiwiki: 3.20140125 +> libtext-markdown-discount-perl: 0.11-1 +> libtext-multimarkdown-perl: 1.000034-1 +> libhtml-template-perl: 2.95-1 + +>> Can you show us the source code and output for a page that has this +>> bug? +>> +>> If you enable [[plugins/htmlbalance]], does the problem go away? +>> (If it does, then I think I might know what the bug is.) +>> --[[smcv]] diff --git a/doc/bugs/Underscores_in_links_don__39__t_appear.mdwn b/doc/bugs/Underscores_in_links_don__39__t_appear.mdwn new file mode 100644 index 000000000..b3cacdb6e --- /dev/null +++ b/doc/bugs/Underscores_in_links_don__39__t_appear.mdwn @@ -0,0 +1,20 @@ +Observed behavior: + +When I create a link like \[[cmd_test]] , the link appears as 'cmd test'. + +Expected behavior: + +I would like to be able to create links with underscores. I realize this is a feature, and I searched for ways to escape the underscore so it would appear, but I didn't find any. + +> as a workaround, you can use \[[cmd\_\_95\_\_test|cmd_test]] (which will link to a page named "cmd test" at the url location "cmd\_test") or \[[cmd\_\_95\_\_test]] (which will link to a page named "cmd\_test" at the url location "cmd\_\_95\_\_test"). i would, from my limited understanding of ikiwiki internals, consider the bug valid, and suggest that +> +> * explicit link text be not subject to de-escaping (why should it; this would be the short term solution) +> * escaped page names never be used in user visible parts of ikiwiki (in my opinion, a user should not need to know about those internals, especially as they are configuration dependant (wiki_file_regexp)) +> +> note that in [[ikiwiki/wikilink]], that very behavior is documented; it says that "\[[foo\_bar|Sandbox]]" will show as "foo bar". (although you can't tell that apart from "foo\_bar" easily because it's a hyperlink). +> +> i assume that this behavior stems from times when wikilinks and [[ikiwiki/directive]]s were not distinguished by \[[ vs \[[! but by the use of whitespace in directives, so whitespace had to be avoided in wikilinks. +> +> --[[chrysn]] + +> having hacked around in the [[plugins/link]] plugin, i can confirm that the link texts are explicitly de-escaped, and that when no pipe is inside the link (ie links like `\[[cmd_test]]`), the string `"cmd_test"` is regarded as a link (that will subsequently be converted to a readable text) rather than as a readable text (for which a suitable link target is found automatically). --[[chrysn]] diff --git a/doc/bugs/W3MMode_still_uses_http:__47____47__localhost__63__.mdwn b/doc/bugs/W3MMode_still_uses_http:__47____47__localhost__63__.mdwn index 3c28e379b..34eecef8c 100644 --- a/doc/bugs/W3MMode_still_uses_http:__47____47__localhost__63__.mdwn +++ b/doc/bugs/W3MMode_still_uses_http:__47____47__localhost__63__.mdwn @@ -22,3 +22,13 @@ Of course, the next time I rerun ikiwiki --setup, it will overwrite my wrapper-w I made a logfile of all the args, env, and stdin/stdout to/from my wrapper. If you're interested, I'll email it to you. I wasn't able to attach it here. -- [[terry|tjgolubi]] + +I confirm that the supplied w3mmode setup appears not to work. When I try to edit a page and save it, w3m tries to access an URL beginning http://localhost/ . The HTML source of the edit page contains a BASE URL beginning with http://localhost. It should not. Maybe this is a result of changes a while back, where use of absolute URLs was enforced in various places in Ikiwiki. + +-- Martin + +The problem is that IkiWiki::CGI::cgitemplate() and IkiWiki::CGI::redirect() use Perl's CGI::url() to determine the absolute URL of the CGI script when it is being executed. url() generates an URL beginning http://localhost. As w3m's serverless CGI mode is rather unusual, presumably there's no provision for the URL of a CGI script beginning file:///, even if there's a way to specify that. + +A quick workaround might be to force the use of $config{url} instead of $cgi->url as a base for URLs when w3mmode is set. + +-- Martin diff --git a/doc/bugs/Webedits_without_comment_don__39__t_make_it_through_git.mdwn b/doc/bugs/Webedits_without_comment_don__39__t_make_it_through_git.mdwn new file mode 100644 index 000000000..f0fc04775 --- /dev/null +++ b/doc/bugs/Webedits_without_comment_don__39__t_make_it_through_git.mdwn @@ -0,0 +1,53 @@ +If you edit via web, and don't enter a comment, the commit message for the ensuing Git commit is empty. Git by default will not commit with a blank commit message, so the edited file is still there in the working files for Ikiwiki but not committed into Git. + +A subsequent commit (including another web page edit with comments) will pull this change in with any new editing. We found this by having spam edits suddenly appear on various pages with no corresponding commits to match. + +IkiWiki/plugin/git.pm checks for a version of git greater than 1.5.4, and if greater, commits with a blank message and '--cleanup=verbatim'. The cleanup option doesn't let the message get committed. Relatively new versions of git support '--allow-empty-message' but I haven't been able to identify when that feature was added. Instead I opted for a default message. + + 544,545d543 + < # git will not commit with a blank comment, though this + < # can be overridden in later versions. + 547c545,553 + < $params{message}.="No commit message specified."; + --- + > # Force git to allow empty commit messages. + > # (If this version of git supports it.) + > my ($version)=`git --version` =~ /git version (.*)/; + > if ($version ge "1.5.4") { + > push @opts, '--cleanup=verbatim'; + > } + > else { + > $params{message}.="."; + > } + +The other option would be to change only line 549: + + push @opts, '--cleanup=verbatim'; + +to + + push @opts, '--allow-empty-message'; + +[[!tag bugs patch]] + +> This is already [[fixed|done]] since 3.20130711. git versions since 1.7.2 +> are affected. Here's the commit if you want to backport it: +> [[b162563|http://source.ikiwiki.branchable.com/?p=source.git;a=commitdiff;h=b162563dc1c6126953e66cdcc508f389b9d39d8e]]. +> +> As a general comment on synthesizing commit messages, I personally don't +> think ikiwiki should invent an untranslated English commit message +> if the user didn't provide one - using an obviously trivial commit message, +> ".", seems more honest. OTOH, the `bzr` and `mercurial` plugins both use +> an untranslated "no message given", and `darcs` uses "empty message". +> It should either consistently use ".", or consistently use gettext(x) +> for some standardized value of x, perhaps "no message given". Joey, +> any preference? +> +> The other RCS plugins (`cvs`, `svn`, `tla`) never need to deal with an +> empty commit message because they prepend something like "web commit +> from smcv", so the message can never be empty. +> +> (Patches are usually easier to read/apply if you use "unified diff" +> (`diff -u` or `git diff`), by the way.) +> +> --[[smcv]] diff --git a/doc/bugs/__91____91____33__inline_postform__61__no__93____93___doesn__39__t_disable_it.mdwn b/doc/bugs/__91____91____33__inline_postform__61__no__93____93___doesn__39__t_disable_it.mdwn new file mode 100644 index 000000000..7b97b40b3 --- /dev/null +++ b/doc/bugs/__91____91____33__inline_postform__61__no__93____93___doesn__39__t_disable_it.mdwn @@ -0,0 +1,24 @@ +[[!tag patch users/smcv/ready]] +[[!template id=gitbranch branch=smcv/ready/postform-no +author="[[Simon McVittie|smcv]]" +browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/postform-no]] + +The [[ikiwiki/directive/inline]] directive generates a form if +it has either rootpage, or postform with a "yes-like" value. This +means that + + \[[!inline pages=... rootpage=sandbox postform=no]] + +does have a form. I would expect it not to (although +mentioning rootpage there is useless). + +See also [[forum/How_to_disable_"Add_a_new_post_titled:"_submission_form?]]. + +My `ready/postform-no` branch also contains a trivial regression test for +`inline`. So far the only thing it really tests is that this bug was fixed, +not the actual inlining of pages, but it's a start. + +--[[smcv]] + +>> this looks simple, straightforward and good to me --[[chrysn]] +>>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/aggregate_plugin_should_honour_a_post__39__s_mctime.mdwn b/doc/bugs/aggregate_plugin_should_honour_a_post__39__s_mctime.mdwn index 865637ea4..0bfbad5ce 100644 --- a/doc/bugs/aggregate_plugin_should_honour_a_post__39__s_mctime.mdwn +++ b/doc/bugs/aggregate_plugin_should_honour_a_post__39__s_mctime.mdwn @@ -13,3 +13,5 @@ appropriately, so that ikiwiki reflects the actual time of the post via the >> I'll have to debug this, it's not working here... and this is an ikiwiki aggregator scraping another ikiwiki site. >>> Any news about this? --[[Joey]] + +>>>> That would be useful to avoid "flooding" with old content when something new is added with aggregate and then listed with the inline directive. -- [hugo](https://hroy.eu/hugo) diff --git a/doc/bugs/assumes___34__git_push_origin__34___is_sufficient.mdwn b/doc/bugs/assumes___34__git_push_origin__34___is_sufficient.mdwn new file mode 100644 index 000000000..369be8277 --- /dev/null +++ b/doc/bugs/assumes___34__git_push_origin__34___is_sufficient.mdwn @@ -0,0 +1,18 @@ +[[!template id=gitbranch branch=smcv/ready/git-push-origin-master + browse="http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/git-push-origin-master" + author="[[smcv]]"]] +[[!tag patch]] + +git's behaviour when doing "git push origin" is configurable, and the +default is going to change in 2.0. In particular, if you've set +push.default to "nothing" (the "explicit is better than implicit" option), +the regression test will warn: + + fatal: You didn't specify any refspecs to push, and push.default + is "nothing". + 'git push origin' failed: at .../lib/IkiWiki/Plugin/git.pm line 220. + +The solution is to do "git push origin master" instead (but with the +configured remote and branch names). --[[smcv]] + +> [[fixed|done]] --[[Joey]] diff --git a/doc/bugs/blogspam_marks_me_as_spam_on_ipv6.mdwn b/doc/bugs/blogspam_marks_me_as_spam_on_ipv6.mdwn new file mode 100644 index 000000000..9b415a84a --- /dev/null +++ b/doc/bugs/blogspam_marks_me_as_spam_on_ipv6.mdwn @@ -0,0 +1,8 @@ +I just got this message trying to post to this wiki: + + Error: Sorry, but that looks like spam to blogspam: No reverse DNS entry for 2001:1928:1:9::1 + +So yeah, it seems I have no reverse DNS for my IPv6 address, which may +be quite common for emerging IPv6 deployments... + +This may be related to [[blogspam_options whitelist vs. IPv6?]]. diff --git a/doc/bugs/bug_in_cgiurl_port.mdwn b/doc/bugs/bug_in_cgiurl_port.mdwn new file mode 100644 index 000000000..373657814 --- /dev/null +++ b/doc/bugs/bug_in_cgiurl_port.mdwn @@ -0,0 +1,15 @@ +I think there's a bug in the code that determines if the cgiurl is relative +to the url. If one has a different port than the other, they're not +relative, and I hear Fil encountered an issue where the wrong port was then +used. --[[Joey]] + +> I tested, setting cgiurl to a nonstandard port. After rebuilding, +> pages used the full url. So I don't see a bug here, or am missing +> something from my memory of the report (which was done the bad way, on +> IRC). [[done]] --[[Joey]] + +> > Sorry about wittering on IRC instead of reporting proper bugs. +> > +> > The setup I have is nginx in front of apache, so that nginx is listening on port 80, apache is on port 81, and ikiwiki is being served by apache. After upgrading to 3.20120203 (backported to squeeze) I found that the URLs in the edit page all have the port set as :81 ... but now that I look at it more closely, that is the case for several ikiwiki-hosting controlled sites, but not for a few other sites that are also on the same machine, so it must be some difference between the settings for the sites, either in ikiwiki, or apache, or perhaps even nginx. Anyway, on the affected sites, explicitly including a port :80 in the cgiurl fixes the problem. + +> > So, for the moment, this bug report is a bit useless, until I find out what is causing the ikiwiki-hosting sites to be beffuddled, so it should probably stay closed -[[fil]] diff --git a/doc/bugs/can__39__t_upload_a_simple_png_image:_prohibited_by_allowed__95__attachments___40__file_MIME_type_is_application__47__octet-stream....mdwn b/doc/bugs/can__39__t_upload_a_simple_png_image:_prohibited_by_allowed__95__attachments___40__file_MIME_type_is_application__47__octet-stream....mdwn new file mode 100644 index 000000000..627b2c827 --- /dev/null +++ b/doc/bugs/can__39__t_upload_a_simple_png_image:_prohibited_by_allowed__95__attachments___40__file_MIME_type_is_application__47__octet-stream....mdwn @@ -0,0 +1,91 @@ +When uploading a PNG file on the wiki, through the webinterface or anonymous git, i get: + + icon.png prohibited by allowed_attachments (file MIME type is application/octet-stream, not application/vnd.oasis.opendocument.*) + +`attachment_allowed_attachments` is set to: + + virusfree() and (mimetype(image/*) or mimetype(text/*) or mimetype(application/x-gzip) or mimetype(application/vnd.oasis.opendocument.*)) and maxsize(2048kb) + +Maybe a bug in the [[plugins/filecheck]] plugin? + +This is ikiwiki 3.20130904.1~bpo70+1 on Debian wheezy, with some patches applied, namely: + + * [[todo/option_to_send_only_the_diff_in_notifyemail]] + * [[bugs/syslog_fails_with_non-ASCII_wikinames]] + * [[bugs/notifyemail_fails_with_some_openid_providers]] + * [[bugs/crashes_in_the_python_proxy_even_if_disabled]] + +Weird... --[[anarcat]] + +> Well, the pagespec seems to be matching correctly, given that it thinks the mime type is application/octet-stream. +> If File::MimeInfo::Magic is installed, ikiwiki uses it. If not, or if it fails to find any mime type, it falls back to using `file -bi`, +> and if that fails, it falls back to a default of application/octet-stream. --[[Joey]] + +> > File::MimeInfo::Magic is installed: +> > +> > ii libfile-mimeinfo-perl 0.16-1 all Perl module to determine file types +> > +> > it turns out there's (still) a problem with the way we use the module. This test code: +> > +> > #!/usr/bin/perl -w +> > my $file='icon.png'; +> > use File::MimeInfo::Magic; +> > print "mime::magic: " . File::MimeInfo::Magic::magic($file) . "\n"; +> > print "mime::default: " . File::MimeInfo::Magic::default($file) . "\n"; +> > +> > ...returns: +> > +> > mime::magic: image/png +> > mime::default: application/octet-stream +> > +> > `file -ib` returns the right thing (`image/png; charset=binary`). +> > +> > So it *should* work: it seems that the `::default` code kicks in even if the `::magic` one actually works. +> > +> > I have traced down the problem to this block of code: +> > +> > if (! defined $mimetype || $mimetype !~s /;.*//) { +> > # Fall back to default value. +> > $mimetype=File::MimeInfo::Magic::default($file) +> > +> > If you take a look deeply, this will fire up the default if there's no semicolon in the mimetype, which is expected for `file` calls, but not for `::magic()` calls. So `::magic()` works, but then the `::default` kicks in anyways. +> > +> > [[!template id=gitbranch branch=anarcat/dev/magic-fails author="[[anarcat]]"]] +> > +> > I have a stupid [[patch]] in my git repo which just appends a semicolon to the `::magic()` output, but maybe this should be done in another way... +> > +> > --[[anarcat]] + +> > > [[!template id=gitbranch branch=ready/more-magic author="[[smcv]]" browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/ready/more-magic]] +> > > If the regex match isn't necessary and it's just about deleting the +> > > parameters, I think I'd prefer +> > > +> > > if (! defined $mimetype) { +> > > ... +> > > } +> > > $mimetype =~ s/;.*//; +> > > +> > > as done in my `ready/more-magic` branch. +> > > +> > > I'm a little hesitant to do that without knowing why Joey implemented it +> > > the way it is, but as far as I can tell it's just an oversight. +> > > +> > > Or, if the result of the s/// is checked for a reason, and it's +> > > about catching a result from file(1) that +> > > is not, in fact, a MIME type at all (empty string or error message +> > > or something), maybe something more like this? +> > > +> > > if (! defined $mimetype || $mimetype !~ s{[-\w]+/[-\w]+(?:;.*)?}{}) +> > > +> > > (or whatever the allowed characters in MIME types are). --[[smcv]] + +> > > > I don't mind either way, but i feel this should be fixed for the next release, as I need to reapply this patch at every upgrade now. -- [[anarcat]] + +> > > > > This is still a problem in 3.20140831. -- [[anarcat]] + +> > > > > > I still don't think appending a semicolon is the right answer: +> > > > > > at best it's equivalent to what I suggested, and at worst it's +> > > > > > disabling a check that does have some reason behind it. +> > > > > > I've turned the version I suggested above into a proper branch. +> > > > > > Review by someone who can commit to ikiwiki.git would be appreciated. +> > > > > > --[[smcv]] diff --git a/doc/bugs/cannot_clone_documented_git_repo.mdwn b/doc/bugs/cannot_clone_documented_git_repo.mdwn new file mode 100644 index 000000000..4f2ec66f3 --- /dev/null +++ b/doc/bugs/cannot_clone_documented_git_repo.mdwn @@ -0,0 +1,16 @@ + smcv@vasks:~$ git clone git://git.ikiwiki.info/ + Cloning into git.ikiwiki.info... + fatal: read error: Connection reset by peer + +I tried this from a UK consumer ISP, my virtual server in the +UK, and vasks (aka alioth.debian.org) in the Netherlands, +with the same results. I can't update my clone from `origin` +either; for the moment I'm using the github mirror instead. +--[[smcv]] + +> Strange.. The git-daemon was not running, but one child was running +> waiting on an upload-pack, but not accepting new connections. Nothing +> in the logs about what happened to the parent. The monitor that checks +> services are running was satisfied with the child.. I've made it +> restart if the parent pid is no longer running, which should avoid +> this problem in the future. --[[Joey]] [[done]] diff --git a/doc/bugs/cannot_decode_wide_characters_error_with_utf-8_encoding.mdwn b/doc/bugs/cannot_decode_wide_characters_error_with_utf-8_encoding.mdwn new file mode 100644 index 000000000..2b02f3b98 --- /dev/null +++ b/doc/bugs/cannot_decode_wide_characters_error_with_utf-8_encoding.mdwn @@ -0,0 +1,7 @@ +During creation the new page with utf-8 codepage and non-Latin characters, +Pressing on either `Save Page` or `Preview` button results in +> `Error: cannot decode with wide characters at /usr/lib/perl5/vendor_perl/5.16.1/i686-linux/Encode.pm line 215` + +Editing the wiki page with non-Latin characters using webinterface also fails with the same error. +Additionally, embedding graphviz graphs non-Latin, leads to he same error. +Observed in ikiwiki versions 3.20130904 and version 3.20140102 diff --git a/doc/bugs/capitalized_attachment_names.mdwn b/doc/bugs/capitalized_attachment_names.mdwn new file mode 100644 index 000000000..b10781bf7 --- /dev/null +++ b/doc/bugs/capitalized_attachment_names.mdwn @@ -0,0 +1,14 @@ +Given an uploaded image via: \[\[!img NAME.svg alt="image"\]\] + +Viewing the generated page shows the following error: + +"\[\[!img Error: failed to read name.svg: Exception 420: no decode delegate for this image format `/home/user/path/name.svg' @ error/svg.c/ReadSVGImage/2815\]\]" + +The caps in the image title were somehow converted to small letters and then the image is saved as a directory. Very puzzling. +I get the same error when image names are small letters. + +The error also occurs with png images. + +How do I fix this? + +Later investigation ... I got around the problem by creating the mark-up in a new directory. However, if I try to create a new directory with the same name as the directory containing the problem code, the problem re-emerges -- the old directory is apparently not overwritten. Perhaps this is an issue with the git storage. diff --git a/doc/bugs/changes_from_the_web_interface_fail_to_get_committed.mdwn b/doc/bugs/changes_from_the_web_interface_fail_to_get_committed.mdwn new file mode 100644 index 000000000..67a48a2e3 --- /dev/null +++ b/doc/bugs/changes_from_the_web_interface_fail_to_get_committed.mdwn @@ -0,0 +1,71 @@ +For some reason, on a wiki hosted locally using [ikiwiki-hosting](http://ikiwiki-hosting.branchable.com), web edits do not get committed and pushed to the central repository anymore. + +For example, I just did an edit on the web interface, which went on without error, but then the modified files are not committed: + +[[!format txt """ +o-cats@marcos:~/source$ git status +# On branch master +# Changes not staged for commit: +# (use "git add ..." to update what will be committed) +# (use "git checkout -- ..." to discard changes in working directory) +# +# modified: 2014/summer.mdwn +# +no changes added to commit (use "git add" and/or "git commit -a") +"""]] + +The files and the .cgi are owned by the right user: + +[[!format txt """ +o-cats@marcos:~/source$ ls -al 2014/summer.mdwn +-rw-r--r-- 1 o-cats o-cats 2812 Nov 28 23:35 2014/summer.mdwn +o-cats@marcos:~/source$ ls -al /var/www/o-cats/ikiwiki.cgi +-rwxr-xr-x 1 o-cats o-cats 15596 Oct 4 12:16 /var/www/o-cats/ikiwiki.cgi +"""]] + +The virtual host is configured to run as the right user: + +[[!format txt """ + + ServerAdmin root@localhost + ServerName foo.example.com + + SuexecUserGroup o-cats o-cats + + UserDir disabled + + DocumentRoot /home/o-cats/public_html + + DirectoryIndex index.html index + AllowOverride None + + + Options Indexes MultiViews + AllowOverride None + Order allow,deny + allow from all + + + Options ExecCGI + AllowOverride None + Order allow,deny + allow from all + + ScriptAlias /ikiwiki.cgi /var/www/o-cats/ikiwiki.cgi + + ErrorLog /var/log/ikiwiki-hosting/o-cats/error.log + LogLevel warn + CustomLog /var/log/ikiwiki-hosting/o-cats/access.log combined + + + ErrorDocument 404 "/ikiwiki.cgi" + +"""]] + +What's going on all of a sudden? This is Debian wheezy. --[[anarcat]] + +> Oh... it seems like this is related yet again to the disruptive git upgrade: +> +> [Thu Nov 28 23:35:01 2013] [error] [client 2001:1928:1:9::1] Aborting commit due to empty commit message., referer: http://foo.example.com +> +> So this is [[fixed|done]] as of 3.20130711. Shouldn't we backport to wheezy now? :) --[[anarcat]] diff --git a/doc/bugs/conditional_preprocess_during_scan.mdwn b/doc/bugs/conditional_preprocess_during_scan.mdwn index 23b9fd2cc..739be8286 100644 --- a/doc/bugs/conditional_preprocess_during_scan.mdwn +++ b/doc/bugs/conditional_preprocess_during_scan.mdwn @@ -1,4 +1,4 @@ -[[!template id=gitbranch branch=GiuseppeBilotta/scanif author="Giuseppe Bilotta"]] +[[!template id=gitbranch branch=GiuseppeBilotta/scanif author="[[GiuseppeBilotta]]"]] When a directive that should be run during scan preprocessing is inside an if directive, it doesn't get called because the if preprocessing does @@ -55,3 +55,58 @@ reprocessed is done so in the same conditions as the original call. >> with vicious conditional dependency circles that would break/unbreak >> depending on which pass we are in. And I believe this is an intrinsic >> limitation of the system, which cannot be solved at all. + +>>> One way forward that I can think of for this issue is to +>>> have a way to tell `\[[!if]]` which answer it should assume for +>>> scanning purposes, so it would assume that answer when running +>>> in the scan phase, and really evaluate the pagespec when running +>>> in the render phase. For instance: +>>> +>>> \[[!if test="enabled(foo)" scan_assume=yes then=""" +>>> \[[!foo]] +>>> """]] +>>> +>>> could maybe scan \[[!foo]] unconditionally. +>>> +>>> This makes me wonder whether `\[[!if]]` was too general: by having +>>> the full generality of pagespecs, it reduces its possible uses to +>>> "those contexts where pagespecs work". +>>> +>>> Another possibility might be to have "complex" pagespecs and sort +>>> orders (those whose correct answer requires scanning to have completed, +>>> like `link()` and sorting by `meta(title)`) throw an error when used in +>>> the scan phase, but simple pagespecs like `enabled()` and `glob()`, and +>>> simple sort orders like `title` and `path`, could continue to work? +>>> My `wip-too-soon` work-in-progress branch is heading in this direction, +>>> although it currently makes `pagespec_match` fail completely and does +>>> not even allow "simple" pagespecs and sort orders. +>>> +>>> At the moment, if a pagespec cannot be evaluated, `\[[!if]]` will +>>> produce neither the `then` clause nor the `else` clause. This could +>>> get pretty confusing if it is run during the scan phase and produces +>>> an error, then run during the render phase and succeeds: if you had, +>>> say, +>>> +>>> \[[!if run_during_scan=1 test="link(foo)" then=""" +>>> there is a link to foo +>>> \[[!tag there_is_a_link_to_foo]] +>>> """ else=""" +>>> there is no link to foo +>>> \[[!tag there_is_no_link_to_foo]] +>>> """]] +>>> +>>> then the resulting page would contain one of the snippets of text, +>>> but its metadata would contain neither of the tags. Perhaps the plugin +>>> would have to remember that it failed during the scan phase, so that +>>> it could warn about the failure during the render phase instead of, +>>> or in addition to, producing its normal output? +>>> +>>> Of the conditional-specific tests, `included()` and `destpage(glob)` +>>> can never match during scan. +>>> +>>> Does anyone actually use `\[[!if]]` in ways that they would want to +>>> be active during scan, other than an `enabled(foo)` test? +>>> I'm increasingly tempted to add `\[[!ifenabled foo]]` to solve +>>> that single case, and call that a solution to this bug... +>>> +>>> --[[smcv]] diff --git a/doc/bugs/crashes_in_the_python_proxy_even_if_disabled.mdwn b/doc/bugs/crashes_in_the_python_proxy_even_if_disabled.mdwn new file mode 100644 index 000000000..3d5b05618 --- /dev/null +++ b/doc/bugs/crashes_in_the_python_proxy_even_if_disabled.mdwn @@ -0,0 +1,74 @@ +[[!template id=gitbranch branch=anarcat/dev/proxy-utf8-fail author="[[anarcat]]"]] + +ikiwiki 3.20130904.1~bpo70+1 + +rebuilding the whole wiki: + +[[!format txt """ +anarcat@marcos:ikiwiki*$ sudo ikisite changesetup wiki.anarc.at --rebuild +Subroutine import redefined at /usr/share/perl5/IkiWiki/Plugin/translinks.pm line 19. +Subroutine getsetup redefined at /usr/share/perl5/IkiWiki/Plugin/translinks.pm line 29. +Subroutine pagetemplate redefined at /usr/share/perl5/IkiWiki/Plugin/translinks.pm line 38. +Subroutine otherlanguagesloop redefined at /usr/share/perl5/IkiWiki/Plugin/translinks.pm line 51. +Use of uninitialized value $body in split at /usr/share/perl5/Text/MultiMarkdown.pm line 1131. +uncaught exception: 'ascii' codec can't encode character u'\xe9' in position 289: ordinal not in range(128) +Traceback (most recent call last): + File "/usr/lib/ikiwiki/plugins/proxy.py", line 309, in run + self._in_fd, self._out_fd) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 192, in handle_rpc + ret = self._dispatcher.dispatch(method, params) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 84, in dispatch + return self._dispatch(method, params) + File "/usr/lib/python2.7/SimpleXMLRPCServer.py", line 420, in _dispatch + return func(*params) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 253, in hook_proxy + "{0} hook `{1}' returned: [{2}]".format(type, name, ret)) +UnicodeEncodeError: 'ascii' codec can't encode character u'\xe9' in position 289: ordinal not in range(128) + +Traceback (most recent call last): + File "/usr/lib/ikiwiki/plugins/rst", line 86, in + proxy.run() + File "/usr/lib/ikiwiki/plugins/proxy.py", line 317, in run + self.error('uncaught exception: {0}\n{1}'.format(e, tb)) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 298, in error + self.rpc('error', msg) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 233, in rpc + *args, **kwargs) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 173, in send_rpc + raise GoingDown() +proxy.py.GoingDown +error: ikiwiki failed +"""]] + +\xe9 is "é" in latin1, it may be the last letter of my name. no clue how it got there. suspecting this is related to the fix in [[bugs/proxy.py_utf8_troubles]], since this was not happening before the upgrade from squeeze. --[[anarcat]] + +> Ooops... turns out the plugin *was* enabled, through the `rst` plugin. After disabling it, the crash is gone, but one page isn't rendered anymore: +> +> removing art/histoireinternet/index.html, no longer built by art/histoireinternet.rst +> +> Here is that source file: http://anarc.at/art/histoireinternet.rst - and it seems encoded properly: +> +> $ curl -s http://anarc.at/art/histoireinternet.rst | iconv -f utf8 -t latin1 | iconv -f latin1 -t utf8 > /dev/null +> $ +> +> So I am not sure what is going on here... --[[anarcat]] + +>> Python is decoding what it receives from IkiWiki using the default `ascii` +>> codec. To match IkiWiki's "all source text is UTF-8" assumption, the +>> Python proxy should explicitly decode incoming text from bytes +>> (`str`) to `unicode` using the `utf8` codec instead. +>> +>> Python's conservative default is "`ascii`, regardless of locale" - +>> this minimizes the chance of silently incorrect decoding, but +>> unfortunately also maximizes the chance of crashing. --[[smcv]] + +> > > Right, I know that. The trick is to find the rabbit hole. :P +> > > +> > > And I found it. With my dev/proxy-utf8-fail, this doesn't fail anymore. Yay, a [[patch]] ready for commit! --[[anarcat]] + +> > > > I don't see that branch in your git repo, could you repost it please? +> > > > (I'm trying to review some of the pending patches.) --[[smcv]] + +>>>>> Ooops.. I forgot to push the branch, it should be good now! --[[anarcat]] + +>>>>>> [[merged|done]] --[[Joey]] diff --git a/doc/bugs/cutpaste.pm:_missing_filter_call.mdwn b/doc/bugs/cutpaste.pm:_missing_filter_call.mdwn index 4b22fd06c..de4296000 100644 --- a/doc/bugs/cutpaste.pm:_missing_filter_call.mdwn +++ b/doc/bugs/cutpaste.pm:_missing_filter_call.mdwn @@ -1,7 +1,7 @@ Consider this: - $ wget http://schwinge.homeip.net/~thomas/tmp/cutpaste_filter.tar.bz2 - $ wget http://schwinge.homeip.net/~thomas/tmp/cutpaste_filter.patch + $ wget http://nic-nac-project.de/~schwinge/ikiwiki/cutpaste_filter.tar.bz2 + $ wget http://nic-nac-project.de/~schwinge/ikiwiki/0001-cutpaste.pm-missing-filter-call.patch $ tar -xj < cutpaste_filter.tar.bz2 $ cd cutpaste_filter/ diff --git a/doc/bugs/debwiki_shortcut_creates_buggy_URLs_to_subpages.mdwn b/doc/bugs/debwiki_shortcut_creates_buggy_URLs_to_subpages.mdwn new file mode 100644 index 000000000..f83f960ce --- /dev/null +++ b/doc/bugs/debwiki_shortcut_creates_buggy_URLs_to_subpages.mdwn @@ -0,0 +1,5 @@ +E.g. [[!debwiki Derivatives/Guidelines]]. + +Maybe we should use `%S` instead of `%s` in the shortcut definition? + +> seems reasonable, [[done]] --[[smcv]] diff --git a/doc/bugs/definition_lists_should_be_bold.mdwn b/doc/bugs/definition_lists_should_be_bold.mdwn new file mode 100644 index 000000000..a72206b8c --- /dev/null +++ b/doc/bugs/definition_lists_should_be_bold.mdwn @@ -0,0 +1,27 @@ +Definition lists do not look great here... + +Here is an example. + +
    +
    this is a term
    +
    and this is its definition.
    +
    + +(This wiki doesn't support Markdown's extended definition lists, but still, this is valid markup.) + +I believe `
    ` should be made bold. I have added this to my `local.css`, and I would hate to add this all the time forever: + + /* definition lists look better with the term in bold */ + dt + { + font-weight: bold; + } + +:) How does that look? I can provide a patch for the base wiki if you guys really want... ;) -- [[anarcat]] + +> What you dislike seems to be the default rendering of definition lists by +> browsers. I don't think it's ikiwiki's place to override browser defaults +> for standard markup in the document body, at least not in the default +> antitheme. --[[Joey]] + +> > How about in the actiontab theme then? :) diff --git a/doc/bugs/do_not_let_big_brother_spy_on_our_users_on_login.mdwn b/doc/bugs/do_not_let_big_brother_spy_on_our_users_on_login.mdwn new file mode 100644 index 000000000..6d259d047 --- /dev/null +++ b/doc/bugs/do_not_let_big_brother_spy_on_our_users_on_login.mdwn @@ -0,0 +1,79 @@ +In the login page, the icons of: + + * livejournal.com + * myopenid.com - which is [closing](http://tech.slashdot.org/story/13/09/04/228229/myopenid-to-shut-down-in-february) + * verisign.com + * yahoo.com + * aol.com + * claimid.com + * flickr.com - which should be the same as yahoo + * wordpress.com + * google.com + +... are all hotlinked. Which means that on every ikiwiki out there, whenever someone logs in, the web browser of that person actually report backs to all those entities, some of which are known to collaborate with the US government in illegal spying of american citizens and, well, the world at large (see [[!wikipedia PRISM]], but also the patriot act and various warrantless wiretapping provisions established since 2001). + +In the old days, we used to call those [[!wikipedia web bugs]]. Nowadays, they seem so pervasive that we don't even notice. Nevertheless, I think it would be important to remove those snitches from the ikiwiki home page. + +A simple fix would be to ship those icons with ikiwiki and serve them locally, but there may be legal issues with redistributing those icons in the source code... Would it be covered by fair use? The [upstream library](https://code.google.com/p/openid-selector/) doesn't actually exhibit that problem, and ships those icons directly as a [PNG sprite](https://code.google.com/p/openid-selector/source/browse/#svn%2Ftrunk%2Fimages). -- [[anarcat]] + +> it's not exactly about OpenID, but the german heise newspaper group has +> switched away from directly including like/+1 buttons on their websites, and +> replaced them with locally hosted buttons which have to be clicked once to +> enable the buttons themselves and a second time to effect anything. +> [here's the article](http://www.h-online.com/features/Two-clicks-for-more-privacy-1783256.html). +> they've had [trouble with facebook (german)](http://www.heise.de/newsticker/meldung/Facebook-beschwert-sich-ueber-datenschutzfreundlichen-2-Klick-Button-2-Update-1335658.html) +> -- tl;dt: facebook complained +> about them using their "like"-button logo for something that's not a like +> button, they replaced the whole facebook logo there with a plain-text "F" (as +> you see on the bottom of the page). google's +1 seems not to have been an +> issue. i assume it will need case-by-case decisions to fully comply with all +> legal stuff involved. (from a practical point of view, things are not that +> strict, as `apt-file find facebook.png` and `apt-file find flickr.png` +> reveal.) --[[chrysn]] + +>> The fundamental problem here is that we want to balance these +>> somewhat incompatible goals: +>> +>> * show users a provider icon that they'll recognise at a glance +>> * don't infringe copyright +>> * don't distribute non-DFSG-licensed things in the source package +>> * don't let miscellaneous OpenID providers track our users +>> +>> A "quick hack" version of removing these would be to have an option to +>> disable the friendly JavaScript OpenID selector and go back to a simple +>> input box. I might implement that option anyway - on websites mainly used +>> by technologists, the OpenID selector is a bit of a waste of time. +>> +>>> Not done yet. -s +>>> +>>>> FWIW, I don't think we should implement this. The current selector is +>>>> fine: if elite technologists don't want the selector, they can just +>>>> turn off javascript. :) -- [[anarcat]] +>> +>> One way to have recognisable icons would be to ship DFSG imitations of +>> the "real" logos in the underlay. Between gnome-online-accounts and +>> Empathy, we can probably find most of them (mostly or perhaps all done by +>> Jakub Steiner). +>> +>>> [[!template id=gitbranch branch=smcv/ready/openid author="[[smcv]]"]] +>>> [[!tag patch]] +>>> Here's a git branch. I deleted the shut-down ClaimID and MyOpenID providers, +>>> used icons from GNOME Online Accounts and Wordpress where available, and +>>> drew my own for the rest. +>>> [See it in use here](http://blueview.hosted.pseudorandom.co.uk/ikiwiki.cgi?do=prefs) +>>> -s +>>>> +>>>> Awesome work Simon! I owe you a beer. [[merged|done]] --[[Joey]] +>>>> +>>>> Same here, thanks for this!!! -- [[anarcat]] +>> +>> If people want the "real" logos, we could have some code to make IkiWiki +>> download the favicons into transient underlay (which I think is +>> higher-priority?), or into a higher-priority underlay if necessary, +>> during the wiki build, so they'll be served from the wiki's own server. +>> +>>> Not done yet. I'm not sure whether I'm going to bother, but I'd review +>>> someone else's implementation. -s +>> +>>>> Doesn't seem to be a priority to me either. --[[anarcat]] +>> --[[smcv]] diff --git a/doc/bugs/editing_gitbranch_template_is_really_slow.mdwn b/doc/bugs/editing_gitbranch_template_is_really_slow.mdwn new file mode 100644 index 000000000..22733e6fe --- /dev/null +++ b/doc/bugs/editing_gitbranch_template_is_really_slow.mdwn @@ -0,0 +1,67 @@ +On this wiki, editing `templates/gitbranch.mdwn` causes a really slow +refresh, orders of magnitude slower than a full rebuild: a large number of +pages depend on that template, or link to a page that embeds that template, +and so on. + +I suspect that, as with my optimization pass for `album`'s benefit, the +costly thing is evaluating lots of pagespecs. I'm profiling it to see +whether the problem is there are any low-hanging fruit. + +Easy to reproduce offline: + +* comment out the `exclude` option in `docwiki.setup` +* `/usr/bin/perl -Iblib/lib ikiwiki.in -setup docwiki.setup -rebuild` +* `touch templates/gitbranch.mdwn` +* `/usr/bin/perl -Iblib/lib ikiwiki.in -setup docwiki.setup -refresh` + +NYTProf says: + + # spent 279s (237+41.8) within IkiWiki::bestlink which was called 13988949 times, avg 20µs/call: + # 13150827 times (222s+37.2s) by IkiWiki::PageSpec::match_link at line 2692, avg 20µs/call + # 829606 times (14.9s+4.51s) by IkiWiki::PageSpec::match_link at line 2687, avg 23µs/call + ... + sub bestlink ($$) { + +which is about half the execution time (458s on my laptop). + +Adding code to log each call to match_backlink indicates that a large part +of the problem is that it evaluates the pagespec +`backlink(plugins/goodstuff)` up to a million times, with various pages and locations. + +--[[smcv]] + +> [[!template id=gitbranch branch=smcv/ready/perf +author="[[Simon McVittie|smcv]]" +browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/perf]] +> [[!tag patch users/smcv/ready]] +> +> Previously, if a page like `plugins/trail` contained a conditional like +> +> \[[!if test="backlink(plugins/goodstuff)" all=no]] +> +> (which it gets via `templates/gitbranch`), then the +> [[plugins/conditional]] plugin would give `plugins/trail` a dependency on +> `(backlink(plugins/goodstuff)) and plugins/trail`. This dependency is +> useless: that pagespec can never match any page other than +> `plugins/trail`, but if `plugins/trail` has been modified or deleted, +> then it's going to be rendered or deleted *anyway*, so there's no point +> in spending time evaluating match_backlink for it. +> +> Conversely, the influences from the result were not taken into account, +> so `plugins/trail` did not have the +> `{ "plugins/goodstuff" => $DEPEND_LINKS }` dependency that it should. +> +> We should invert that, depending on the influences but not on the test. +> +> This is at least an order of magnitude faster: when I edit the docwiki +> as described above, a refresh takes 37s with nytprof overhead, compared +> with 458s with nytprof overhead before this change. Without nytprof, +> that refresh takes 14s, which is faster than the 24s rebuild again. +> I didn't record how long the refresh took without nytprof before this +> change, but it was something like 200s. +> +> `bestlink` is still the single most expensive function in this refresh +> at ~ 9.5s, with `match_glob` at ~ 5.2s as the runner-up. +> --[[smcv]] + +>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/empty_div_element.mdwn b/doc/bugs/empty_div_element.mdwn new file mode 100644 index 000000000..7e28730fd --- /dev/null +++ b/doc/bugs/empty_div_element.mdwn @@ -0,0 +1,35 @@ +For some more flexibility in creating a stylesheet for ikiwiki, it would be nice if there were a few unused elements on the page that one can move around and assign content to using CSS. + +For instance, something like this: + +
    +
    + +etc. For bonus points, the number could be configurable. To avoid empty content, style.css should have something like this: + + .aux { + display: none; + } + +This can then be used to move things around. For instance, I have on my website's CSS stylesheet the following: + + #aux1 { + position: fixed; + width: 150px; + height: 150px; + bottom: 0px; + left: 0px; + background-image: url("wouter3.png"); + background-position: top right; + background-repeat: no-repeat; + background-origin: content-box; + display-block; + } + +which adds my hackergochi to the bottom left of the webpage, with some margin. + +I tried looking for something like this, but I couldn't find it. Perhaps I just didn't look in the right places, though; apologies if that is the case. + +> This can easily be achieved by modifying [[templates]]. Simply copy the default page template to the template directory of your wiki, and modify it to add your empty divs. +> +> -- [[Louis|spalax]] diff --git a/doc/bugs/enabling_or_disabling_plugin_x_does_not_rebuild_pages_that_use_enabled__40__x__41__.mdwn b/doc/bugs/enabling_or_disabling_plugin_x_does_not_rebuild_pages_that_use_enabled__40__x__41__.mdwn new file mode 100644 index 000000000..4b4adb2c6 --- /dev/null +++ b/doc/bugs/enabling_or_disabling_plugin_x_does_not_rebuild_pages_that_use_enabled__40__x__41__.mdwn @@ -0,0 +1,11 @@ +If you have a page like + + \[[!if test="enabled(smileys)" then=":-P"]] + +then enabling or disabling the smileys plugin will not rebuild it. + +Unfortunately, I can't think of a good way to solve this without +introducing a special case for `enabled()` in Render.pm, either a +new dependency type `"enabled(smileys)" => $DEPENDS_ENABLED` +or a special case that treats `"enabled(smileys)" => $DEPENDS_PRESENCE` +differently. --[[smcv]] diff --git a/doc/bugs/encoding_issue_in_blogspam_plugin.mdwn b/doc/bugs/encoding_issue_in_blogspam_plugin.mdwn new file mode 100644 index 000000000..92318d165 --- /dev/null +++ b/doc/bugs/encoding_issue_in_blogspam_plugin.mdwn @@ -0,0 +1,34 @@ +[[!tag patch]] + +
    +From 5ad35b2805ca50478f07d810e57e7c9b8f4eddea Mon Sep 17 00:00:00 2001
    +From: Changaco <changaco@changaco.net>
    +Date: Tue, 4 Jun 2013 02:54:35 +0200
    +Subject: [PATCH] fix encoding issue in blogspam plugin
    +
    +RPC::XML uses ascii as default encoding, we have to tell it to use utf8.
    +
    +Without this, ikiwiki returns "failed to get response from blogspam server"
    +every time a non-ascii character is used in a content that needs checking.
    +
    +---
    + IkiWiki/Plugin/blogspam.pm | 1 +
    + 1 file changed, 1 insertion(+)
    +
    +diff --git a/IkiWiki/Plugin/blogspam.pm b/IkiWiki/Plugin/blogspam.pm
    +index d32c2f1..e48ed72 100644
    +--- a/IkiWiki/Plugin/blogspam.pm
    ++++ b/IkiWiki/Plugin/blogspam.pm
    +@@ -53,6 +53,7 @@ sub checkconfig () {
    + 	eval q{
    + 		use RPC::XML;
    + 		use RPC::XML::Client;
    ++		$RPC::XML::ENCODING = 'utf-8';
    + 	};
    + 	error $@ if $@;
    + }
    +-- 
    +1.8.3
    +
    + +[[done]] --[[Joey]] diff --git a/doc/bugs/error_handlers_with_gettext_can_clobber___36____64__.mdwn b/doc/bugs/error_handlers_with_gettext_can_clobber___36____64__.mdwn new file mode 100644 index 000000000..719c1ef25 --- /dev/null +++ b/doc/bugs/error_handlers_with_gettext_can_clobber___36____64__.mdwn @@ -0,0 +1,29 @@ +[[!template id=gitbranch branch=smcv/ready/careful-eval author="[[smcv]]" + browse="http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/careful-eval"]] +[[!tag patch]] + +As noted in the [[!cpan Try::Tiny]] man page, eval/$@ can be quite +awkward in corner cases, because $@ has the same properties and problems +as C's errno. While writing a regression test for definetemplate +in which it couldn't find an appropriate template, I received + + Error: failed to process template + deftmpl + +instead of the intended + + Error: failed to process template + deftmpl template deftmpl not + found + +which turned out to be because the "catch"-analogous block called +gettext before it used $@, and gettext can call define_gettext, +which uses eval. + +Fixed in my branch smcv/ready/careful-eval. Another possibility +for fixing this would be to depend on something like Try::Tiny, +which is already indirectly recommended by ikiwiki, because +[[!cpan RPC::XML]], [[!cpan XML::Feed]], etc., depend on it. +--[[smcv]] + +[[fixed in 3.20140227|done]] --s diff --git a/doc/bugs/feedpages_does_not_prevent_tags_from_being_aggregated.mdwn b/doc/bugs/feedpages_does_not_prevent_tags_from_being_aggregated.mdwn new file mode 100644 index 000000000..a004154df --- /dev/null +++ b/doc/bugs/feedpages_does_not_prevent_tags_from_being_aggregated.mdwn @@ -0,0 +1,32 @@ +I added a feedpages directive to `blog/index.mdwn` to not pick up anything tagged `tags/random/hidden` yet that still happenend. + + ~git/richardhartmann.de/blog % grep hidden index.mdwn + \[[!inline pages="./posts/*/*/* and !*/Discussion" feedpages="./posts/*/*/* and !*/Discussion and not tagged(tags/random/hidden)" show="10" actions=yes rootpage="blog"]] + ~git/richardhartmann.de/blog % grep hidden posts/2013/05/17-Debian_Release_Critical_Bug_report_for_Week_20.mdwn + \[[!tag tags/tech/floss/debian tags/tech/floss/debian/rc-stats/8.0-jessie tags/random/hidden]] + ~git/richardhartmann.de/blog % + +If you need more information, please let me know. + +Richard + +> I don't think this is a bug. You have a syntax error in your pagespec: +> "not" is not a recognised keyword in [[pagespecs|ikiwiki/pagespec]], +> so `and not tagged(...)` should be `and !tagged(...)`. Presumably inline +> falls back to `pages` when `feedpages` doesn't work. +> +> By posting the pagespec here with insufficient escaping (which I've fixed) +> you caused *this* ikiwiki instance's HTML to contain an error message +> illustrating that syntax error :-) +> +> Error: syntax error in pagespec "(./posts/*/*/* and !*/Discussion) and (./posts/*/*/* and !*/Discussion and not tagged(tags/random/hidden))" +> +> [[done]]. --[[smcv]] + +> > As per IRC: Thanks. As an aside, shouldn't this ikiwiki instance ignore directives in normal text? The problem may be non-trivial, but still... -- Richard + +>>> "Normal text" is exactly where directives go, so, not really. +>>> If you mean verbatim text (e.g. indentation in Markdown): the fact that +>>> directives still expand to HTML, which is then treated as verbatim, is an +>>> unfortunate result of how ikiwiki interacts with pages' markup languages +>>> (directives and wikilinks happen before markup is converted to HTML). --[[smcv]] diff --git a/doc/bugs/feeds_get_removed_in_strange_conditions.mdwn b/doc/bugs/feeds_get_removed_in_strange_conditions.mdwn new file mode 100644 index 000000000..deec208ba --- /dev/null +++ b/doc/bugs/feeds_get_removed_in_strange_conditions.mdwn @@ -0,0 +1,57 @@ +For some time now, in circumstances that I've had enormous troubles +trying to track, I've seen feeds getting removed by ikiwiki when +apparently unrelated pages got changed, with the message: + +> removing somepath/somepage/somefeed, no longer built by some/unrelated/page + +I've finally been able to find how and why it happens. The situation is +the following: + +* page A has an inline directive that (directly) generates a feed F +* page B inlines A, thus (indirectly) generating F again +* page B is rendered after page A + +The feed removal happens when changes are made to prevent B from +inlining A; for example, because B is a tag page and A is untagged B, or +because B includes A through a pagespec that no longer matches A. In +this case, this happens: + +* page A is built, rendering F +* page B is built, _not_ rendering F, which it used to render +* F is removed because it is not built by B anymore + +Note that although this issue is triggered (for me) from the changes I +proposed last year to allow feed generation from nested inlines +coalescing it to be page-based instead of destpage-based +(bb8f76a4a04686def8cc6f21bcca80cb2cc3b2c9 and +72c8f01b36c841b0e83a2ad7ad1365b9116075c5) there is potential for it +popping up in other cases. + +Specifically, the logic for the removal of dependent pages currently +relies on the assumption that each output has a single generator. My +changes caused this assumption to be violated, hence the error, but +other cases may pop up for other plugins in the future. + +I have a [patch] fixing this issue (for feeds specifically, i.e. only +the problem I am actually having) on top of my `mystuff` branch, but +since that also has heaps of other unrelated stuff, you may want to just +[pick it from my gitweb][gw]. + +[gw]: (http://git.oblomov.eu/ikiwiki/patch/671cb26cf50643827f258270d9ac8ad0b1388a65) + +The patch changes the `will_render()` for feeds to be based on the page +rather than on the destpage, matching the fact that for nested inlines +it's the inner page that is ultimately responsible for generating the +feed. + +I've noticed that it requires at least _two_ full rebuilds before the +index is again in a sensible state. (On the first rebuild, all feeds +from nested inlines are actually _removed_.) + +While the patch is needed because there are legitimate cases in which +nested feeds are needed (for example, I have an index page that inlines +index pages for subsection of my site, and I want _those_ feed from +being visible), there are other cases when one may want to skip feed +generation from nested inlines. + +--[[GiuseppeBilotta]] diff --git a/doc/bugs/garbled_non-ascii_characters_in_body_in_web_interface.mdwn b/doc/bugs/garbled_non-ascii_characters_in_body_in_web_interface.mdwn new file mode 100644 index 000000000..657b86baa --- /dev/null +++ b/doc/bugs/garbled_non-ascii_characters_in_body_in_web_interface.mdwn @@ -0,0 +1,126 @@ +since my latest jessie upgrade here, charsets are all broken when editing a page. the page i'm trying to edit is [this wishlist](http://anarc.at/wishlist/), and it used to work fine. now, instead of: + +`Voici des choses que vous pouvez m'acheter si vous êtes le Père Nowel (yeah right):` + +... as we see in the rendered body right now, when i edit the page i see: + +`Voici des choses que vous pouvez m'acheter si vous �tes le P�re Nowel (yeah right):` + +... a typical double-encoding nightmare. The actual binary data is this for the word "Père" according to `hd`: + +~~~~ +anarcat@marcos:ikiwiki$ echo "Père" | hd +00000000 50 c3 a8 72 65 0a |P..re.| +00000006 +anarcat@marcos:ikiwiki$ echo "P�re" | hd +00000000 50 ef bf bd 72 65 0a |P...re.| +00000007 +~~~~ + +> I don't know what that is, but it isn't the usual double-UTF-8 encoding: +> +> >>> u'è'.encode('utf-8') +> '\xc3\xa8' +> >>> u'è'.encode('utf-8').decode('latin-1').encode('utf-8') +> '\xc3\x83\xc2\xa8' +> +> A packet capture of the incorrect HTTP request/response headers and body +> might be enlightening? --[[smcv]] +> +> > Here are the headers according to chromium: +> > +> > ~~~~ +> > GET /ikiwiki.cgi?do=edit&page=wishlist HTTP/1.1 +> > Host: anarc.at +> > Connection: keep-alive +> > Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8 +> > User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 +> > Referer: http://anarc.at/wishlist/ +> > Accept-Encoding: gzip,deflate,sdch +> > Accept-Language: fr,en-US;q=0.8,en;q=0.6 +> > Cookie: openid_provider=openid; ikiwiki_session_anarcat=XXXXXXXXXXXXXXXXXXXXXXX +> > +> > HTTP/1.1 200 OK +> > Date: Mon, 08 Sep 2014 21:22:24 GMT +> > Server: Apache/2.4.10 (Debian) +> > Set-Cookie: ikiwiki_session_anarcat=XXXXXXXXXXXXXXXXXXXXXXX; path=/; HttpOnly +> > Vary: Accept-Encoding +> > Content-Encoding: gzip +> > Content-Length: 4093 +> > Keep-Alive: timeout=5, max=100 +> > Connection: Keep-Alive +> > Content-Type: text/html; charset=utf-8 +> > ~~~~ +> > +> > ... which seem fairly normal... getting more data than this is a little inconvenient since the data is gzip-encoded and i'm kind of lazy extracting that from the stream. Chromium does seem to auto-detect it as utf8 according to the menus however... not sure what's going on here. I would focus on the following error however, since it's clearly emanating from the CGI... --[[anarcat]] + +Clicking on the Cancel button yields the following warning: + +~~~~ +Error: Cannot decode string with wide characters at /usr/lib/x86_64-linux-gnu/perl/5.20/Encode.pm line 215. +~~~~ + +> Looks as though you might be able to get a Python-style backtrace for this +> by setting `$Carp::Verbose = 1`. +> +> The error is that we're taking some string (which string? only a backtrace +> would tell you) that is already flagged as Unicode, and trying to decode +> it from byte-blob to Unicode again, analogous to this Python: +> +> some_bytes.decode('utf-8').decode('utf-8') +> +> --[[smcv]] +> > +> > I couldn't figure out where to set that Carp thing - it doesn't work simply by setting it in /usr/bin/ikiwiki - so i am not sure how to use this. However, with some debugging code in Encode.pm, i was able to find a case of double-encoding - in the left menu, for example, which is the source of the Encode.pm crash. +> > +> > It seems that some unicode semantics changed in Perl 5.20, or more precisely, in Encode.pm 2.53, according to [this](https://code.activestate.com/lists/perl-unicode/3314/). 5.20 does have significant Unicode changes, but I am not sure they are related (see [perldelta](https://metacpan.org/pod/distribution/perl/pod/perldelta.pod)). Doing more archeology, it seems that Encode.pm is indeed where the problem started, all the way back in [commit 8005a82](https://github.com/dankogai/p5-encode/commit/8005a82d8aa83024d72b14e66d9eb97d82029eeb#diff-f3330aa405ffb7e3fec2395c1fc953ac) (august 2013), taken from [pull request #11](https://github.com/dankogai/p5-encode/pull/11) which expressively forbids double-decoding, in effect failing like python does in the above example you gave (Perl used to silently succeed instead, a rather big change if you ask me). +> > +> > So stepping back, it seems that this would be a bug in Ikiwiki. It could be in any of those places: +> > +> > ~~~~ +> > anarcat@marcos:ikiwiki$ grep -r decode_utf8 IkiWiki* | wc -l +> > 31 +> > ~~~~ +> > +> > Now the fun part is to determine which one should be turned off... or should we duplicate the logic that was removed in decode_utf8, or make a safe_decode_utf8 for ourselves? --[[anarcat]] + +The apache logs yield: + +~~~~ +[Mon Sep 08 16:17:43.995827 2014] [cgi:error] [pid 2609] [client 192.168.0.3:47445] AH01215: Died at /usr/share/perl5/IkiWiki/CGI.pm line 467., referer: http://anarc.at/ikiwiki.cgi?do=edit&page=wishlist +~~~~ + +Interestingly enough, I can't reproduce the bug here (at least in this page). Also, editing the page through git works fine. + +I had put ikiwiki on hold during the last upgrade, so it was upgraded separately. The bug happens both with 3.20140613 and 3.20140831. The major thing that happened today is the upgrade from perl 5.18 to 5.20. Here's the output of `egrep '[0-9] (remove|purge|install|upgrade)' /var/log/dpkg.log | pastebinit -b paste.debian.net` to give an idea of what was upgraded today: + +http://paste.debian.net/plain/119944 + +This is a major bug which should probably be fixed before jessie, yet i can't seem to find a severity statement in reportbug that would justify blocking the release based on this - unless we consider non-english speakers as "most" users (i don't know the demographics well enough). It certainly makes ikiwiki completely unusable for my users that operate on the web interface in french... --[[anarcat]] + +Note that on this one page, i can't even get the textarea to display and i immediately get `Error: Cannot decode string with wide characters at /usr/lib/x86_64-linux-gnu/perl/5.20/Encode.pm line 215`: http://anarc.at/ikiwiki.cgi?do=edit&page=hardware%2Fserver%2Fmarcos. + +Also note that this is the same as [[forum/"Error: cannot decode string with wide characters" on Mageia Linux x86-64 Cauldron]], I believe. The backtrace I get here is: + +~~~~ +Error: Cannot decode string with wide characters at /usr/lib/x86_64-linux-gnu/perl/5.20/Encode.pm line 215. Encode::decode_utf8("**Menu**\x{d}\x{a}\x{d}\x{a} * [[\x{fffd} propos|index]]\x{d}\x{a} * [[Logiciels|software]]"...) +called at /usr/share/perl5/IkiWiki/CGI.pm line 117 IkiWiki::decode_form_utf8(CGI::FormBuilder=HASH(0x2ad63b8)) +called at /usr/share/perl5/IkiWiki/Plugin/editpage.pm line 90 IkiWiki::cgi_editpage(CGI=HASH(0xd514f8), CGI::Session=HASH(0x27797e0)) +called at /usr/share/perl5/IkiWiki/CGI.pm line 443 IkiWiki::__ANON__(CODE(0xfaa460)) +called at /usr/share/perl5/IkiWiki.pm line 2101 IkiWiki::run_hooks("sessioncgi", CODE(0x2520138)) +called at /usr/share/perl5/IkiWiki/CGI.pm line 443 IkiWiki::cgi() +called at /usr/bin/ikiwiki line 192 eval {...} +called at /usr/bin/ikiwiki line 192 IkiWiki::main() +called at /usr/bin/ikiwiki line 231 +~~~~ + +so this would explain the error on cancel, but doesn't explain the weird encoding i get when editing the page... ... + +... and that leads me to this crazy patch which fixes all the above issue, by avoiding double-decoding... go figure that shit out... + +[[!template id=gitbranch branch=anarcat/dev/safe_unicode author="[[anarcat]]"]] + +> [[Looks good to me|users/smcv/ready]] although I'm not sure how valuable +> the `$] < 5.02 || ` test is - I'd be tempted to just call `is_utf8`. --[[smcv]] + +>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/graphviz_demo_generates_empty_graph.mdwn b/doc/bugs/graphviz_demo_generates_empty_graph.mdwn new file mode 100644 index 000000000..5b96f148e --- /dev/null +++ b/doc/bugs/graphviz_demo_generates_empty_graph.mdwn @@ -0,0 +1,15 @@ +The following code in our sandbox generates an empty graph: + + [[!graph src="""" + google [ href="http://google.com/" ] + sandbox [ href=\[[SandBox]] ] + help [ href=\[[ikiwiki/formatting]] ] + newpage [ href=\[[NewPage]] ] + + google -> sandbox -> help -> newpage -> help -> google; + """"]] + +It is the exact same thing that on the [[ikiwiki/directive/graph/]] directive documentation, from the [[plugins/graphviz]] plugin. This is ikiwiki 3.20120203 on Debian wheezy and graphviz is installed (2.26.3-10). Note that the first demo actually works. See --[[anarcat]] + +> Looking at the example shows too many double quoted. [[fixed|done]] +> --[[Joey]] diff --git a/doc/bugs/http_proxy_for_openid.mdwn b/doc/bugs/http_proxy_for_openid.mdwn index 566896ec3..4a9c1b3eb 100644 --- a/doc/bugs/http_proxy_for_openid.mdwn +++ b/doc/bugs/http_proxy_for_openid.mdwn @@ -79,8 +79,7 @@ Brian May >>>>> installed, even with the above commit, `openid` won't be able to >>>>> traverse a proxy. --[[schmonz]] -[[!template id=gitbranch branch=schmonz/proxies author="[[schmonz]]"]] +[[!template id=gitbranch branch=schmonz/proxy author="[[schmonz]]"]] ->>>>> I bollixed up my git, recloned, and reapplied the diffs, so ->>>>> that commit won't exist anymore. My proxy-related changes are ->>>>> now on a branch. --[[schmonz]] +>>>>>> I've redone this from scratch, much more simply, on a new +>>>>>> branch. --[[schmonz]]. diff --git a/doc/bugs/image_rescaling_distorts_with_small_pictures.mdwn b/doc/bugs/image_rescaling_distorts_with_small_pictures.mdwn new file mode 100644 index 000000000..6425c1ece --- /dev/null +++ b/doc/bugs/image_rescaling_distorts_with_small_pictures.mdwn @@ -0,0 +1,49 @@ +If you use the rescaling feature of the directive [[ikiwiki/directive/img/]] with a smaller image it will distort. E.g. an image with 150x250 rescaled into size=200x200. --bastla + +> More specifically: `img` normally preserves aspect ratio: +> `size=200x200` normally means "as large as possible, keeping +> the width 200px or less, the height 200px or less, and the +> aspect ratio correct". So a 4:3 image with `size=200x200` +> would actually come out 200px wide and 150px tall. +> +> However, when (desired width is specified) && (desired height is specified) +> && ((width > desired width) || (height > desired height)), +> it uses exactly the desired size, without preserving aspect ratio. +> --smcv + +>> [[!template id=gitbranch branch=chrysn/imgforpdf-and-more author="[[chrysn]]"]] +>> +>> [[!tag patch]] +>> +>> i've implemented a fix for this along with a unit test. +>> +>> the patch branch is based on the imgforpdf branch +>> ([[bugs/svg and pdf conversion fails]]), because it would not cleanly merge. +>> the branch also enhances on how images are handled in preview, falling back +>> to data: urls if the image has not been rendered in a saved version. please +>> review. --[[chrysn]] + +>>> Mostly [[looks good to me|users/smcv/ready]]. +>>> +>>> Minor things, which wouldn't stop me merging it if I could: +>>> +>>> * `$imgdatalink = "data:image/".$im->Get("magick").";base64,".encode_base64($blob[0]);`: +>>> is the ImageMagick file type always valid as the second part of +>>> a MIME type? +>>> * In this code: +>>> +>>> +open (my $outhtmlfd, "<", "$outpath.html"); +>>> +local $/=undef; +>>> +my $outhtml = <$outhtmlfd>; +>>> +close $outhtmlfd; +>>> +>>> no block is closed, so the "local" is ineffective, so the `<>` operator +>>> remains in read-entire-file mode afterwards. To avoid odd side-effects, +>>> I would suggest using `readfile()` like `t/trail.t` does. +>>> +>>> [[!template id=gitbranch branch=smcv/ready/imgforpdf-and-more author="[[chrysn]], [[smcv]]" + browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/imgforpdf-and-more]] +>>> I've used `readfile()` (but not done anything about the ImageMagick file type) +>>> in my copy of the branch. +>>> +>>> --[[smcv]] diff --git a/doc/bugs/images_in_inlined_pages_have_wrong_relative_URL.mdwn b/doc/bugs/images_in_inlined_pages_have_wrong_relative_URL.mdwn index 8cda7a70f..e73c7e662 100644 --- a/doc/bugs/images_in_inlined_pages_have_wrong_relative_URL.mdwn +++ b/doc/bugs/images_in_inlined_pages_have_wrong_relative_URL.mdwn @@ -12,4 +12,11 @@ If I then inline that page, the (relative) URL no longer points to the right pla > However, there is a simple way to avoid both problems: Use WikiLinks > and/or the [[img_directive|ikiwiki/directive/img]]. --[[Joey]] +> > For some inline HTML (e.g. SVG embedded with `` tags, it +> > would be nice to have a URL directive for URL-only WikiLinks. +> > Something like: +> > ``. +> > This would be a more general solution than an [[SVG-specific +> > fix|todo/svg]]. --[[wtk]] + [[!tag done]] diff --git a/doc/bugs/ipv6_address_in_comments.mdwn b/doc/bugs/ipv6_address_in_comments.mdwn new file mode 100644 index 000000000..90391650a --- /dev/null +++ b/doc/bugs/ipv6_address_in_comments.mdwn @@ -0,0 +1,19 @@ +If I make a comment from an ipv4 address +I see the commenter's ipv4 address logged in the comment file. + +If I make a comment from an ipv6 address +I see nothing. + +There is a sanity check in /usr/share/perl5/IkiWiki/Plugin/comments.pm +line 447 (according to today's version) there is an ipv4 specific regexp. + +I removed the regexp and used the value without this added check and it fixed +the problem for me. Not sure if this is the best solution. --[[cstamas]] + +[[patch]] + +[[!tag ipv6]] + +> [[done]] --[[Joey]] + +> > Thank you! --[[cstamas]] diff --git a/doc/bugs/linkmap_displays_underscore_escapes.mdwn b/doc/bugs/linkmap_displays_underscore_escapes.mdwn new file mode 100644 index 000000000..14164d076 --- /dev/null +++ b/doc/bugs/linkmap_displays_underscore_escapes.mdwn @@ -0,0 +1,35 @@ +[[!template id=gitbranch branch=chrysn/linkmapenhancement author="[[chrysn]]"]] + +[[ikiwiki/directive/linkmap]]s display the file name instead of the pagetitle, showing unsightly underscore escapes and underscores instead of blanks to users. + +the attached [[!taglink patch]] fixes this; from its commit message: + + display the pagetitle() in linkmaps + + without this patch, linkmaps display underscores and underscore escape + sequences in the rendered output. + + this introduces a pageescape function, which invoces pagetitle() to get + rid of underscore escapes and wraps the resulting utf8 string + appropriately for inclusion in a dot file (using dot's html encoding + because it can represent the '\"' dyad properly, and because it doesn't + need special-casing of newlines). + +the output will look much better (at least in my wikis) with the "[[bugs/pagetitle function does not respect meta titles]]" issue fixed. + +> [[Looks good to me|users/smcv/ready]]. +> +> I don't think it's correct for `pagetitle()` to output `\[[!meta title]]` +> though, as discussed on the linked bug: it appears in an assortment of +> contexts where the full formal title of the page seems inappropriate. +> If you want linkmap to use `\[[!meta title]]`, I think it would be +> better to give it a `show` parameter, like `\[[!map]]` has? +> --[[smcv]] + +>> sounds good; i'll have a look at it the next time i touch the linkmap +>> plugin. the patch at hand would be a starting point for that. --[[chrysn]] + +the patch is stored in [[the patch.pl]] as created by git-format-patch, and can +be pulled from the abovementioned branch. + +> update 2014-06-29: branch still merges cleanly and works. --[[chrysn]] diff --git a/doc/bugs/linkmap_displays_underscore_escapes/the_patch.pl b/doc/bugs/linkmap_displays_underscore_escapes/the_patch.pl new file mode 100644 index 000000000..6b56c553e --- /dev/null +++ b/doc/bugs/linkmap_displays_underscore_escapes/the_patch.pl @@ -0,0 +1,68 @@ +From efbb1121ffdc146f5c9a481a51f23ad151b9f240 Mon Sep 17 00:00:00 2001 +From: chrysn +Date: Thu, 15 Mar 2012 14:38:42 +0100 +Subject: [PATCH] display the pagetitle() in linkmaps + +without this patch, linkmaps display underscores and underscore escape +sequences in the rendered output. + +this introduces a pageescape function, which invoces pagetitle() to get +rid of underscore escapes and wraps the resulting utf8 string +appropriately for inclusion in a dot file (using dot's html encoding +because it can represent the '\"' dyad properly, and because it doesn't +need special-casing of newlines). +--- + IkiWiki/Plugin/linkmap.pm | 17 +++++++++++++++-- + 1 files changed, 15 insertions(+), 2 deletions(-) + +diff --git a/IkiWiki/Plugin/linkmap.pm b/IkiWiki/Plugin/linkmap.pm +index ac26e07..b5ef1a1 100644 +--- a/IkiWiki/Plugin/linkmap.pm ++++ b/IkiWiki/Plugin/linkmap.pm +@@ -5,6 +5,7 @@ use warnings; + use strict; + use IkiWiki 3.00; + use IPC::Open2; ++use HTML::Entities; + + sub import { + hook(type => "getsetup", id => "linkmap", call => \&getsetup); +@@ -22,6 +23,18 @@ sub getsetup () { + + my $mapnum=0; + ++sub pageescape { ++ my $item = shift; ++ # encoding explicitly in case ikiwiki is configured to accept <> or & ++ # in file names ++ my $title = pagetitle($item, 1); ++ # it would not be necessary to encode *all* the html entities (<> would ++ # be sufficient, &" probably a good idea), as dot accepts utf8, but it ++ # isn't bad either ++ $title = encode_entities($title); ++ return("<$title>"); ++} ++ + sub preprocess (@) { + my %params=@_; + +@@ -63,7 +76,7 @@ sub preprocess (@) { + my $show=sub { + my $item=shift; + if (! $shown{$item}) { +- print OUT "\"$item\" [shape=box,href=\"$mapitems{$item}\"];\n"; ++ print OUT pageescape($item)." [shape=box,href=\"$mapitems{$item}\"];\n"; + $shown{$item}=1; + } + }; +@@ -74,7 +87,7 @@ sub preprocess (@) { + foreach my $endpoint ($item, $link) { + $show->($endpoint); + } +- print OUT "\"$item\" -> \"$link\";\n"; ++ print OUT pageescape($item)." -> ".pageescape($link).";\n"; + } + } + print OUT "}\n"; +-- +1.7.9.1 diff --git a/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn b/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn new file mode 100644 index 000000000..ad52d780a --- /dev/null +++ b/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn @@ -0,0 +1,114 @@ +The [[ikiwiki/directive/listdirectives]]` directive doesn't register a link between the page and the subpages. This is a problem because then the [[ikiwiki/directive/orphans]] directive then marks the directives as orphans... Maybe it is a but with the orphans directive however... A simple workaround is to exclude those files from the orphans call... --[[anarcat]] + +> There's a distinction between wikilinks (matched by `link()`, +> `backlink()` etc.) and other constructs that produce a +> hyperlink. Some directives count as a wikilink (like `tag`) +> but many don't (notably `inline`, `map`, `listdirectives`, +> and `orphans` itself). As documented in +> [[ikiwiki/directive/orphans]], orphans will tend to list +> pages that are only matched by inlines/maps, too. +> +> The rule of thumb seems to be that a link to a particular +> page counts as a wikilink, but a directive that lists +> pages matching some pattern does not; so I think +> `listdirectives` is working as intended here. +> `orphans` itself obviously shouldn't count as a wikilink, +> because that would defeat the point of it :-) +> +> Anything that uses a [[ikiwiki/pagespec]] to generate links, +> like `inline` and `map`, can't generate wikilinks, because +> wikilinks are gathered during the scan phase, and pagespecs +> can't be matched until after the scan phase has finished +> (otherwise, it'd be non-deterministic whether all wikilinks +> had been seen yet, and `link()` in pagespecs wouldn't work +> predictably). +> +> I suggest just using something like: +> +> \[[!orphans pages="* and !blog/* and !ikiwiki/directive/*"]] +> +> This wiki's example of listing [[plugins/orphans]] has a +> more elaborate pagespec, which avoids bugs, todo items etc. +> as well. +> +> --[[smcv]] + +> No follow-up or objection for a while, so considering this to +> be working as designed. --[[smcv]] + +> > Seems I'm a bit late to butt in, but would it be possible to have two +> > further phases after the scan phase, the first running map and inline +> > and the second orphan? Then map and inline could log or register their +> > links (obviously somewhere were it won't change the result of the link function) +> > and orphan could take them into account. This logging could be +> > turned on by parameter to not waste time for users not needing this and +> > make it tunable (i.e. so that the user can decide which map directives count and which don't) +> > +> > For someone using map and especially autoindex the output of the orphans directive +> > is simply wrong/useless (at least it is for me). And there is no easy workaround like for listdirectives +> > -- [[holger]] + +>>> Hmm. I think this can be done without introducing any "phases", +>>> even, but it would require each plugin that generates links according +>>> to a pagespec to have either a conditional call into the orphans plugin, +>>> or a call to a new core function in ikiwiki that exists solely to +>>> support the orphans plugin. Something like this, maybe: +>>> +>>> # in map.pm, inline.pm, pagestats.pm etc., at scan time +>>> if (IkiWiki::Plugin::orphans->can("add_reachable")) { +>>> IkiWiki::Plugin::orphans::add_reachable($page, $pagespec); +>>> } +>>> +>>> # in orphans.pm (pseudocode; note that this does not *evaluate* +>>> # $pagespec, only stores it, so it's OK to do this at scan time) +>>> sub needsbuild ($pages) +>>> for each page in $pages +>>> clear $pagestate{location}{orphans}{reachable} +>>> sub reachable ($location, $pagespec) +>>> add $pagespec to @{$pagestate{location}{orphans}{reachable}} +>>> +>>> # in preprocess function in orphans.pm (pseudocode) +>>> # executed at build time, not at scan time, so pagespecs work +>>> +>>> for each maybe_orphan with no links to it +>>> for each location with a list of reachable pagespecs +>>> make the page with the orphans directive depend on \ +>>> the page that is the location +>>> for each of those pagespecs +>>> if pagespec matches orphan +>>> take orphan off the list +>>> go to next orphan +>>> output list of orphans +>>> +>>> (Maybe parentlinks should also annotate the parent/ancestors of +>>> each page as reachable from that page.) +>>> +>>> Do other people (mainly Joey) think that'd be acceptable, or +>>> too intrusive? +>>> +>>> Taking this off the list of resolved bugs again while we think about it. +>>> +>>> I suspect that in the presence of autoindex, what you really want might +>>> be less "there's a link to it" and more "there's a path to it from +>>> the root of the wiki", which is why I called the proposed function +>>> "add_reachable". On the other hand, maybe that's too computationally +>>> intensive to actually do; I haven't tried it. +>>> --[[smcv]] +>>>> +>>>> (I'll interpet Joeys silence as a good sign ;-). Is there a difference between "link to it" and "path to it"? If we assume autoindex produces bonafide "first class" links there shouldn't be one!? +>>>> +>>>> So far your idea sounds great, says me without any knowledge of the source. I'll try to grok it. Is there a medium for silly questions, a wiki seems not the right fit for that? -- [[holger]] +>>>>> Yes, there *has* to be a difference between a first class wikilink +>>>>> and the thing to which `map` and `inline` can contribute. +>>>>> `map` and `inline` use a pagespec to decide what they include, +>>>>> and pagespecs can't be evaluated and get a correct answer until the +>>>>> set of links has been collected, because their results often depend +>>>>> on the set of links. Otherwise, suppose you had a page `foo` whose only +>>>>> contents were this: +>>>>> +>>>>> \[[!inline pages="!backlink(foo)"]] +>>>>> +>>>>> If `inline` generated links, it would inline exactly those pages that +>>>>> it doesn't inline. That's never going to end well :-) --[[smcv]] +>>>>>> We have to differentiate between what users of ikiwiki consider first class links and what internally is happening. For the user any link contributing to the structured access tree is first class. The code on the other hand has to differentiate between the static links, then generated links, then orphan links. Three "passes", even your proposed solution could be seen as adding another pass since the orphan plugin has to run after all the plugins generating (first class user) links. -- [[holger]] + diff --git a/doc/bugs/map_generates_malformed_HTML.mdwn b/doc/bugs/map_generates_malformed_HTML.mdwn new file mode 100644 index 000000000..890a6ef7f --- /dev/null +++ b/doc/bugs/map_generates_malformed_HTML.mdwn @@ -0,0 +1,36 @@ +[[!template id=gitbranch branch=smcv/ready/map author="[[Simon McVittie|smcv]]"]] +[[!tag patch]] + +`\[[!map]]` can generate bad HTML with unbalanced open/close tags +(in XML terms: "not well-formed") in certain situations. This +appears to be a regression caused by fixing +[[maps with nested directories sometimes make ugly lists]], which +suppressed some redundant `
      ` pairs, but appears not to +have the ideal logic for this, leading to malformed HTML. + +In particular, on a site with these pages: + +* alpha + * 1 + * i + * ii + * iii + * iv + * 2 + * a + * b + * 3 +* beta + +the maps "`alpha/1 or beta`", "`alpha/1/i* or alpha/2/a or beta`" and +"`alpha/1/i* or alpha/2/a`" have malformed HTML. + +My `ready/map` branch adds a regression test and makes it pass. + +The fix is not particularly elegant - it generates the previous +HTML with redundant `
      ` pairs, marks the redundant +pairs, and edits them out afterwards - but it works. If anyone can come +up with a cleaner algorithm that avoids generating the redundant tags +in the first place, that would be even better. --[[smcv]] + +> [[merged|done]] (not thrilled at this solution, but it works) --[[Joey]] diff --git a/doc/bugs/must_save_before_uploading_more_than_one_attachment.mdwn b/doc/bugs/must_save_before_uploading_more_than_one_attachment.mdwn index 20d5dc8e6..bd5ddc6d5 100644 --- a/doc/bugs/must_save_before_uploading_more_than_one_attachment.mdwn +++ b/doc/bugs/must_save_before_uploading_more_than_one_attachment.mdwn @@ -26,7 +26,19 @@ Is this a problem on my site or does anyone else see this? >>> The right fix would probably be for `do=create` to allow replacing a page >>> in the transient underlay without complaining (like the behaviour that ->>> `do=edit` normally has). That wouldn't help you unless [[plugins/autoindex]] +>>> `do=edit` normally has). + +>>>> ... which it turns out it already does. --[[smcv]] + +>>> That wouldn't help you unless [[plugins/autoindex]] >>> defaulted to making transient pages (`autoindex_commit => 0`), but if we >>> can fix [[removal_of_transient_pages]] then maybe that default can change? >>> --[[smcv]] + +>>>> It turns out that with `autoindex_commit => 0`, the failure mode is +>>>> different. The transient map is created when you attach the +>>>> attachment. When you save the page, it's written into the srcdir, +>>>> the map is deleted from the transientdir, and the ctime/mtime +>>>> in the indexdb are those of the file in the srcdir, but for some +>>>> reason the HTML output isn't re-generated (despite a refresh +>>>> happening). --[[smcv]] diff --git a/doc/bugs/nonexistent_pages_in_inline_pagenames_do_not_add_a_dependency.mdwn b/doc/bugs/nonexistent_pages_in_inline_pagenames_do_not_add_a_dependency.mdwn new file mode 100644 index 000000000..486be0363 --- /dev/null +++ b/doc/bugs/nonexistent_pages_in_inline_pagenames_do_not_add_a_dependency.mdwn @@ -0,0 +1,44 @@ +In commit aaa72a3a8, Joey noted: + +> bestlink returns '' if no existing page matches a link. This propigated +> through inline and other plugins, causing uninitialized value warnings, and +> in some cases (when filecheck was enabled) making the whole directive fail. +> +> Skipping the empty results fixes that, but this is papering over another +> problem: If the missing page is later added, there is not dependency +> information to know that the inline needs to be updated. Perhaps smcv will +> fix that later. + +Potential ways this could be addressed: + +* Add a presence dependency on everything the reference could match: + so if the `inline` is on `a/b/c` and the missing page is `m`, + add a `$depends_simple` `$DEPEND_PRESENCE` dependency on `a/b/c/m`, + `a/b/m`, `a/m`, `m` and (if configured) `$config{userdir}/m` + +* Make the page names in `\[[!inline pagenames=...]]` count as wikilinks, + changing the behaviour of `link()` and backlinks, but causing appropriate + rebuilds via the special cases in `IkiWiki::Render` + +* Extend the special cases in `IkiWiki::Render` to consider a superset of + wikilinks, to which `pagenames` would add its named pages, without + affecting `link()` and backlinks + +(Note that `\[[!inline pages=...]]` cannot count as wikilinks, because +pagespecs can contain `link()`, so can't be evaluated until we know what +wikilinks exist, at which point it's too late to add more wikilinks.) + +I think the presence dependency is probably the cleanest approach? +--[[smcv]] + +> I think it was possibly a mistake to use wikilink style lookup for +> `pagenames`. --[[Joey]] + +[[!tag patch]] [[!template id=gitbranch branch=smcv/literal-pagenames author="[[smcv]]"]] +>> I used the linking rules to make references to +>> "nearby" pages convenient, but if you'd prefer "absolute" +>> semantics, my `ready/literal-pagenames` branch does that. For +>> my main use-case for `pagenames` ([[plugins/contrib/album]]) +>> it's fine either way. --[[smcv]] + +>>> Ok, [[merged|done]]. I think it's more consistent this way. --[[Joey]] diff --git a/doc/bugs/notifyemail_fails_with_some_openid_providers.mdwn b/doc/bugs/notifyemail_fails_with_some_openid_providers.mdwn new file mode 100644 index 000000000..dd5016619 --- /dev/null +++ b/doc/bugs/notifyemail_fails_with_some_openid_providers.mdwn @@ -0,0 +1,91 @@ +[[!template id=gitbranch branch=anarcat/dev/openid_email author="[[anarcat]]"]] + +This bug affects [[plugins/notifyemail]] but is probably caused more by [[plugins/openid]]. When using OpenID to login to a site, no email notification is sent to the user (pagespec set to `*`) when a modification is done on the wiki. I believe this is because the OpenID plugin assumes the email comes from the OpenID provider - which is not necessarily going to succeed if, for privacy reason, the OpenID provider refuses to transmit the email to ikiwiki. + +In the OpenID plugin, the email is actually fetched when authenticating and is stored in the session, like so: + +[[!format perl """ +sub auth ($$) { +# [...] + my @extensions; + if ($vident->can("signed_extension_fields")) { + @extensions=grep { defined } ( + $vident->signed_extension_fields('http://openid.net/extensions/sreg/1.1'), + $vident->signed_extension_fields('http://openid.net/srv/ax/1.0'), + ); + } + my $nickname; + foreach my $ext (@extensions) { + foreach my $field (qw{value.email email}) { + if (exists $ext->{$field} && + defined $ext->{$field} && + length $ext->{$field}) { + $session->param(email => $ext->{$field}); + if (! defined $nickname && + $ext->{$field}=~/(.+)@.+/) { + $nickname = $1; + } + last; + } + } + +"""]] + +This is based on the assumption that the openid provider supports "sreg" or "ax" extensions, which is not mandatory, and even then, the provider is not forced to provide the email. + +Earlier in the plugin, the email field is actually hidden: + +[[!format perl """ +sub formbuilder_setup (@) { + my %params=@_; + + my $form=$params{form}; + my $session=$params{session}; + my $cgi=$params{cgi}; + + if ($form->title eq "preferences" && + IkiWiki::openiduser($session->param("name"))) { + $form->field(name => "openid_identifier", disabled => 1, + label => htmllink("", "", "ikiwiki/OpenID", noimageinline => 1), + value => "", + size => 1, force => 1, + fieldset => "login", + comment => $session->param("name")); + $form->field(name => "email", type => "hidden"); + } +} +"""]] + +I believe this could be worked around simply by re-enabling that field and allowing the user to specify an email there by hand, making a note that the OpenID provider's email is used by default. + +The dumbest [[!taglink patch]] that actually fixes the problem for me is in the branch mentionned above. + +It would probably be better to add a comment on the field as indicated above, but it's a good proof of concept. + +Any other ideas? --[[anarcat]] + +> Note: it seems that my email *is* given by my OpenID provider, no idea why this is not working, but the fix proposed in my branch works. --[[anarcat]] + +>> Note: this is one of two patches i need to apply at every upgrade. The other being [[can__39__t_upload_a_simple_png_image:_prohibited_by_allowed__95__attachments___40__file_MIME_type_is_application__47__octet-stream...]]. --[[anarcat]] + +>>> Is there any sort of check that the owner of the given email address +>>> wants to receive email from us, or way for the owner of that email +>>> address to stop getting the emails? +>>> +>>> With passwordauth, if someone maliciously subscribes my email +>>> address to high-traffic pages or something (by using it as the +>>> email address of their wiki login), I can at least use +>>> password-recovery to hijack their account and unsubscribe myself. +>>> If they're signing in with an OpenID not associated with my +>>> email address and then changing the email address in the userdb +>>> to point to me, I don't think I can do that. +>>> +>>> With OpenID, I think we're just trusting that the OpenID provider +>>> wouldn't give us an unverified email address, which also seems +>>> a little unwise. +>>> +>>> It might be better to give ikiwiki a concept of verifying an +>>> email address (the usual send-magic-token flow) and only be +>>> willing to send notifications to a verified address? +>>> +>>> --[[smcv]] diff --git a/doc/bugs/opendiscussion_should_respect_the_discussion_option.mdwn b/doc/bugs/opendiscussion_should_respect_the_discussion_option.mdwn index e4bc736e3..cacd2b73b 100644 --- a/doc/bugs/opendiscussion_should_respect_the_discussion_option.mdwn +++ b/doc/bugs/opendiscussion_should_respect_the_discussion_option.mdwn @@ -1,6 +1,11 @@ +[[!template id=gitbranch branch=smcv/ready/less-open author="[[smcv]]"]] +[[!tag patch]] + The [[plugins/opendiscussion]] plugin allows pages named according to the `discussionpage` setting to be edited anonymously, even if `discussion => 0` is set. (If it respected the `discussion` option, the combination of `opendiscussion` and `moderatedcomments` might be good for blogs.) + +[[done]] --[[smcv]] diff --git a/doc/bugs/opendiscussion_should_respect_the_discussion_option/discussion.mdwn b/doc/bugs/opendiscussion_should_respect_the_discussion_option/discussion.mdwn new file mode 100644 index 000000000..a5c951671 --- /dev/null +++ b/doc/bugs/opendiscussion_should_respect_the_discussion_option/discussion.mdwn @@ -0,0 +1,26 @@ +This would be great to see fixed. It's perplexing to have discussion => 0 in my configuration, not have any discussion links on my site, but still be able to add a discussion page by URL hacking something like this: /cgi-bin/ikiwiki/ikiwiki.cgi?page=posts%2Fdiscussion&do=edit. + +spammers have figured that little trick out so I am consitently getting spammed checked into my git repository. + +I'm not really sure if this patch introduced other problems, but it seems to have fixed my site: + + 0 mcclelland@chavez:~/.ikiwiki/IkiWiki/Plugin$ diff -u /usr/share/perl5/IkiWiki/Plugin/opendiscussion.pm opendiscussion.pm + --- /usr/share/perl5/IkiWiki/Plugin/opendiscussion.pm 2012-05-07 11:31:24.000000000 -0400 + +++ opendiscussion.pm 2012-07-29 17:49:28.000000000 -0400 + @@ -25,7 +25,7 @@ + my $cgi=shift; + my $session=shift; + + - return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i; + + return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i && $config{discussion}; + return "" if pagespec_match($page, "postcomment(*)"); + return undef; + } + 1 mcclelland@chavez:~/.ikiwiki/IkiWiki/Plugin$ + +If libdir is configured to be ~/.ikiwiki in your ikiwiki.settings file, and you are running Debian, you can do the following: + + mkdir -p ~/.ikiwiki/IkiWiki/Plugin + cp /usr/share/perl5/IkiWiki/Plugin/opendiscussion.pm ~/.ikiwiki/IkiWiki/Plugin/ + +And then apply the patch above to ~/.ikiwiki/Ikiwiki/Plugin/opendiscussion.pm. diff --git a/doc/bugs/openid_login_fails_wirth_Could_not_determine_ID_provider_from_URL.mdwn b/doc/bugs/openid_login_fails_wirth_Could_not_determine_ID_provider_from_URL.mdwn new file mode 100644 index 000000000..073c10d14 --- /dev/null +++ b/doc/bugs/openid_login_fails_wirth_Could_not_determine_ID_provider_from_URL.mdwn @@ -0,0 +1,200 @@ +On some ikiwikis that I run, I get the following error on OpenID logins: + + no_identity_server: Could not determine ID provider from URL. + +> Is this fixed now that [[!debbug 738493]] has been fixed? --[[smcv]] + +> > No, it isn't. I still get: `no_identity_server: Could not determine ID provider from URL.` from the latest ikiwiki in jessie (3.20140831), with liblwpx-paranoidagent-perl 1.10-3. Debugging tells me it's still related to the `500 Can't verify SSL peers without knowing which Certificate Authorities to trust` error, so probably because `Mozilla::CA` is not packaged ([[!debbug 702124]]). I still had to apply the patch to disable SSL verification at the end of this file. However, setting `$ENV{PERL_LWP_SSL_CA_PATH} = '/etc/ssl/certs';` seems to work now, so the following dumb patch works: +> > +> > ~~~~ +> > --- /usr/bin/ikiwiki.orig 2014-09-08 15:48:35.715868902 -0400 +> > +++ /usr/bin/ikiwiki 2014-09-08 15:50:29.666779878 -0400 +> > @@ -225,4 +225,5 @@ +> > } +> > } +> > +> > +$ENV{PERL_LWP_SSL_CA_PATH} = '/etc/ssl/certs'; +> > main; +> > ~~~~ +> > +> > may not be the best place to fiddle around with this, but then again it makes sense that it applies to the whole program. it should probably be reported upstream as well. also in my git repo. -- [[anarcat]] +> > +> > > This seems Debian-specific. I would be inclined to consider this to be +> > > a packaging/system-integration (i.e. non-upstream) bug in +> > > `liblwpx-paranoidagent-perl` rather than an upstream bug in IkiWiki; +> > > it certainly seems inappropriate to put this Debian-specific path +> > > in upstream IkiWiki. If it can't be fixed in LWPX::ParanoidAgent for +> > > whatever reason, applying it via some sort of sed in ikiwiki's +> > > `debian/rules` might be more reasonable? --[[smcv]] +> > > +> > > > by "upstream", i did mean `liblwpx-paranoidagent-perl`. so yeah, maybe this should be punted back into that package's court again. :( --[[anarcat]] +> > > > +> > > > done, by bumping the severity of [[!debbug 744404]] to release-criticial. --[[anarcat]] +> > > > +> > > > > ooh cool, the bug was fixed already with an upload, so this should probably be considered [[done]] at this point, even without the patch below! great! -- [[anarcat]] + +[[!template id=gitbranch branch=anarcat/dev/ssl_ca_path author="[[anarcat]]"]] + +I seem recall having that error before, and fixing it, but it always seems to come back and I forget how to fix it. So I'll just open this bug and document it if i can figure it out... -- [[users/anarcat]] + +The Perl module manual says: + +> "no_identity_server" +> (CV) Tried to do discovery on a URL that does not seem to have any providers at all. + +Yet on the server side, I see no request coming in on the OpenID provider... + +Adding debugging helps in figuring out wtf is going on: + +~~~~ +anarcat@marcos:~$ diff -u ~/src/ikiwiki/IkiWiki/Plugin/openid.pm /usr/share/perl5/IkiWiki/Plugin/openid.pm +--- /home/anarcat/src/ikiwiki/IkiWiki/Plugin/openid.pm 2014-02-03 20:21:09.502878631 -0500 ++++ /usr/share/perl5/IkiWiki/Plugin/openid.pm 2014-04-13 11:45:25.413297420 -0400 +@@ -257,6 +256,7 @@ + return Net::OpenID::Consumer->new( + ua => $ua, + args => $q, ++ debug => 1, + consumer_secret => sub { return shift()+$secret }, + required_root => auto_upgrade_https($q, $cgiurl), + ); +~~~~ + +In my case, I see: + + +~~~~ +[Sun Apr 13 11:45:35.796531 2014] [cgi:error] [pid 7299] [client 162.223.3.24:39547] AH01215: [DEBUG Net::OpenID::Consumer] Cache MISS for https://id.koumbit.net/anarcat, referer: http://cats.orangeseeds.org/ikiwiki.cgi?do=signin&action=verify&openid_identifier=https%3A%2F%2Fid.koumbit.net%2Fanarcat +[Sun Apr 13 11:45:35.842520 2014] [cgi:error] [pid 7299] [client 162.223.3.24:39547] AH01215: [DEBUG Net::OpenID::Consumer] Cache MISS for https://id.koumbit.net/anarcat, referer: http://cats.orangeseeds.org/ikiwiki.cgi?do=signin&action=verify&openid_identifier=https%3A%2F%2Fid.koumbit.net%2Fanarcat +[Sun Apr 13 11:45:35.845603 2014] [cgi:error] [pid 7299] [client 162.223.3.24:39547] AH01215: [DEBUG Net::OpenID::Consumer] semantic info (https://id.koumbit.net/anarcat) = , referer: http://cats.orangeseeds.org/ikiwiki.cgi?do=signin&action=verify&openid_identifier=https%3A%2F%2Fid.koumbit.net%2Fanarcat +[Sun Apr 13 11:45:35.845672 2014] [cgi:error] [pid 7299] [client 162.223.3.24:39547] AH01215: [DEBUG Net::OpenID::Consumer] fail(no_identity_server) Could not determine ID provider from URL., referer: http://cats.orangeseeds.org/ikiwiki.cgi?do=signin&action=verify&openid_identifier=https%3A%2F%2Fid.koumbit.net%2Fanarcat +~~~~ + +There are three places in the code the original error message happens: + +* Net::OpenID::claimed_identity +* Net::OpenID::verified_identity +* Net::OpenID::_find_openid_server + +We'll look at the last one because it's where the URL data is actually fetched. + +[[!format perl """ +sub _find_openid_server { + my Net::OpenID::Consumer $self = shift; + my $url = shift; + my $final_url_ref = shift; + + my $sem_info = $self->_find_semantic_info($url, $final_url_ref) or + return; + + return $self->_fail("no_identity_server") unless $sem_info->{"openid.server"}; + $sem_info->{"openid.server"}; +} +"""]] + +From there we look at `_find_semantic_info()`, which is supposed to hit the OpenID server, but doesn't somehow.... By cranking up debugging, we can see that the consumer fails to verify the HTTPS signature on the host: + +~~~~ +[Sun Apr 13 11:58:30.284511 2014] [cgi:error] [pid 11141] [client 162.223.3.24:39563] AH01215: [DEBUG Net::OpenID::Consumer] url dump (https://id.koumbit.net/anarcat, SCALAR(0x3275ac0)) = 500 Can't verify SSL peers without knowing which Certificate Authorities to trust, referer: http://cats.orangeseeds.org/ikiwiki.cgi?do=signin&action=verify&openid_identifier=https%3A%2F%2Fid.koumbit.net%2Fanarcat +[Sun Apr 13 11:58:30.284551 2014] [cgi:error] [pid 11141] [client 162.223.3.24:39563] AH01215: , referer: http://cats.orangeseeds.org/ikiwiki.cgi?do=signin&action=verify&openid_identifier=https%3A%2F%2Fid.koumbit.net%2Fanarcat +[Sun Apr 13 11:58:30.284573 2014] [cgi:error] [pid 11141] [client 162.223.3.24:39563] AH01215: This problem can be fixed by either setting the PERL_LWP_SSL_CA_FILE, referer: http://cats.orangeseeds.org/ikiwiki.cgi?do=signin&action=verify&openid_identifier=https%3A%2F%2Fid.koumbit.net%2Fanarcat +[Sun Apr 13 11:58:30.284593 2014] [cgi:error] [pid 11141] [client 162.223.3.24:39563] AH01215: envirionment variable or by installing the Mozilla::CA module., referer: http://cats.orangeseeds.org/ikiwiki.cgi?do=signin&action=verify&openid_identifier=https%3A%2F%2Fid.koumbit.net%2Fanarcat +[Sun Apr 13 11:58:30.284597 2014] [cgi:error] [pid 11141] [client 162.223.3.24:39563] AH01215: , referer: http://cats.orangeseeds.org/ikiwiki.cgi?do=signin&action=verify&openid_identifier=https%3A%2F%2Fid.koumbit.net%2Fanarcat +~~~~ + +To get this little wonder, I had to change the `_find_semantic_info()` as followed: + +[[!format perl """ +sub _find_semantic_info { + my Net::OpenID::Consumer $self = shift; + my $url = shift; + my $final_url_ref = shift; + + my $doc = $self->_get_url_contents($url, $final_url_ref); + $self->_debug("url dump ($url, $final_url_ref) = " . $doc) if $self->{debug}; + my $info = _document_to_semantic_info($doc); + $self->_debug("semantic info ($url) = " . join(", ", map { $_.' => '.$info->{$_} } keys %$info)) if $self->{debug}; + + return $info; +} +"""]] + +A minimal test case would be: + +~~~~ +perl -e 'use LWPx::ParanoidAgent; + print $LWPx::ParanoidAgent::VERSION, " $]: "; + print length(LWPx::ParanoidAgent->new->get + ("https://id.koumbit.net/anarcat") + ->decoded_content), "\n";' +~~~~ + +And the results vary according to the version of perl: + +* wheezy: 1.07 5.014002: 5720 +* jessie: 1.10 5.018002: 398 + +Thanks [jwz](http://www.jwz.org/blog/2014/03/apple-broke-lwp-in-a-new-and-exciting-way-on-10-9-2/) for that.. Mozilla::CA *could* have been packaged in Debian, except it overlaps with the `ca-certificates` package, so it was [basically barred entry](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=702124). + +I tried the workaround of hardcoding the path to the CA root, using `PERL_LWP_SSL_CA_PATH=/etc/ssl/certs`, but then I hit *another* bug in LWP: [#738493](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=738493). + +Note that this bug is similar to [[bugs/ssl_certificates_not_checked_with_openid/]], but backwards: it checks the SSL certs but then fails to verify. + +I filed this bug in the Debian BTS as [#702124](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=702124). Downgrading to wheezy's version of LWPx::ParanoidAgent doesn't fix the problem, instead i get this error: + + 500 Can't read entity body: Resource temporarily unavailable + +... yet the commandline client works fine... I'm out of ideas for this sucker. + +Update: i found a way to reproduce the problem even with LWPx::ParanoidAgent 1.07: + +~~~~ +$ perl -e 'use LWPx::ParanoidAgent; + print $LWPx::ParanoidAgent::VERSION, " $]\n"; + $ua = new LWPx::ParanoidAgent; for (my $i = 0; $i< 10 ; $i++) { $c = LWPx::ParanoidAgent->new->get + ("https://id.koumbit.net/anarcat") + ->decoded_content; if (length($c) < 100) { print $c; } else { print length($c),"\n";}}' +1.07 5.018002 +5720 +500 Can't read entity body: Ressource temporairement non disponible +500 Can't read entity body: Ressource temporairement non disponible +500 Can't read entity body: Ressource temporairement non disponible +500 Can't read entity body: Ressource temporairement non disponible +500 Can't read entity body: Ressource temporairement non disponible +500 Can't read entity body: Ressource temporairement non disponible +500 Can't read entity body: Ressource temporairement non disponible +500 Can't read entity body: Ressource temporairement non disponible +500 Can't read entity body: Ressource temporairement non disponible +~~~~ + +Workaround - disable error checking: + +~~~~ +--- /home/anarcat/src/ikiwiki/IkiWiki/Plugin/openid.pm 2014-02-03 20:21:09.502878631 -0500 ++++ /usr/share/perl5/IkiWiki/Plugin/openid.pm 2014-04-13 16:00:06.875744596 -0400 +@@ -237,7 +237,7 @@ + + my $ua; + eval q{use LWPx::ParanoidAgent}; +- if (! $@) { ++ if (! $@ && 0) { + $ua=LWPx::ParanoidAgent->new; + } + else { +~~~~ + +> I get the same trouble with OpenID and some locally installed versions of IkiWiki on Debian wheezy (server) as well as on 13.10 Ubuntu (laptop). To be precise I hit the *other* bug in LWP: [#738493](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=738493). +> +> My only workaround for now was to fix `PERL_LWP_SSL_VERIFY_HOSTNAME` to 0 directly in `ikiwiki` :-( -- [[users/bbb]] + +~~~~ +--- /usr/bin/ikiwiki.orig 2014-09-08 15:48:35.715868902 -0400 ++++ /usr/bin/ikiwiki 2014-09-08 15:48:38.895947911 -0400 +@@ -225,4 +225,5 @@ + } + } + ++$ENV{PERL_LWP_SSL_VERIFY_HOSTNAME} = 0; + main; +~~~~ + diff --git a/doc/bugs/osm_KML_maps_do_not_display_properly_on_google_maps.mdwn b/doc/bugs/osm_KML_maps_do_not_display_properly_on_google_maps.mdwn new file mode 100644 index 000000000..2b20240c4 --- /dev/null +++ b/doc/bugs/osm_KML_maps_do_not_display_properly_on_google_maps.mdwn @@ -0,0 +1,14 @@ +[[!template id=gitbranch branch=anarcat/master author="[[anarcat]]"]] + +I know this sounds backwards, but it seems to me that the KML-generated map should be displayable on google maps. KML is the standard Google uses for google maps, and since we use it, we should interoperate with them. God knows why this is failing, but it is and should probably be fixed for the sake of interoperability: -- [[users/anarcat]] + +> The KML only needs a Document tag because it uses "shared styles" -- don't ask me what this is. Here is a [[patch]]: [[https://reseaulibre.deuxpi.ca/0001-Add-Document-tag-to-OSM-plugin-KML-output.patch]] --[[deuxpi]] + +> > I applied the patch to my master branch and tested it on the above URL: it works... mostly. The icons for the elements on the actual map seem incorrect (some are the proper icons, some others are the ugly default blue pin of google maps, weird) but I think this is a step in the right direction. Thus, this should be merged. -- [[anarcat]] + +>>> I've cherry-picked this patch, but from the description it does not +>>> sound "fixed" enough to close this bug. (OTOH, perhaps only google can +>>> fix it, so it people are happy with the state of affairs I won't insist +>>> this bug be left open.) --[[Joey]] + +> > > > I am happy with this right now, so let's mark this as [[done]]. I do agree this seems like a google bug, so let's move on. --[[anarcat]] diff --git a/doc/bugs/osm_KML_maps_icon_path_have_a_trailing_slash.mdwn b/doc/bugs/osm_KML_maps_icon_path_have_a_trailing_slash.mdwn new file mode 100644 index 000000000..a3a88d138 --- /dev/null +++ b/doc/bugs/osm_KML_maps_icon_path_have_a_trailing_slash.mdwn @@ -0,0 +1,34 @@ +This is not a problem on Apache webservers because they, oddly enough, ignore trailing slashes on paths (maybe some `PATH_INFO` magic, no idea). But basically, in our wiki, the paths to the icon tags are generated with a trailing slash. An excerpt of our [KML file](http://wiki.reseaulibre.ca/map/pois.kml): + + + +Notice the trailing `/` after the `icon.png`. This breaks display on nginx - the file that gets served isn't the icon, but the frontpage for some reason. I followed the [[setup instructions|tips/dot cgi]] for Nginx that I just had to write because there weren't any, so maybe I screwed up some part, but it does seem to me that the trailing slash is wrong regardless. + +(Also notice how the style tag is being turned over backwards by the HTML sanitizer here, cute. :P) + +I wrote a crude hack for this, but this strikes me as a similar problem to the one we found in [[bugs/osm linkto() usage breaks map rendering]]. However, I am at a loss how to fix this cleanly because we cannot `will_render()` the tag icons, as they are already generated out there! Weird. Anyways, here's the stupid [[patch]]: + +[[!format diff """ +diff --git a/IkiWiki/Plugin/osm.pm b/IkiWiki/Plugin/osm.pm +index a7baa5f..c9650d0 100644 +--- a/IkiWiki/Plugin/osm.pm ++++ b/IkiWiki/Plugin/osm.pm +@@ -192,6 +192,7 @@ sub process_waypoint { + } + } + $icon = urlto($icon, $dest, 1); ++ $icon =~ s!/*$!!; # hack - urlto shouldn't be appending a slash in the first place + $tag = '' unless $tag; + register_rendered_files($map, $page, $dest); + $pagestate{$page}{'osm'}{$map}{'waypoints'}{$name} = { +"""]] + +I'm not writing this to a branch out of sheer shame of my misunderstanding. ;) There also may be a workaround that could be done in Nginx too. --[[anarcat]] + +> [[applied|done]], but I'm not happy with this either --[[Joey]] diff --git a/doc/bugs/osm_linkto__40____41___usage_breaks_map_rendering.mdwn b/doc/bugs/osm_linkto__40____41___usage_breaks_map_rendering.mdwn new file mode 100644 index 000000000..89c08b73c --- /dev/null +++ b/doc/bugs/osm_linkto__40____41___usage_breaks_map_rendering.mdwn @@ -0,0 +1,23 @@ +[[!template id=gitbranch branch=anarcat/master author="[[anarcat]]"]] + +Under some circumstances that remain unclear to me, the usage of `urlto()` in the revised version of the [[plugins/osm]] plugin break the map totally. The javascript console in Chromium tells me the following: + + GET http://mesh.openisp.ca/map/pois.kml/ 404 (Not Found) + +Indeed, that URL yields a 404. The proper URL is . --[[anarcat]] + +## Proposed solution + +The problem seems to be caused by `urlto()` being called for the `osm` +directive before the generated files are registered with `will_render()` +from the `waypoint` directive. Proposed patch adds a function that is +called from the `preprocess` hook for both directives that registers the +files. + +Here is a [[patch]] to IkiWiki/Plugin/osm.pm: + +--[[deuxpi]] + +I confirm the patch works, and I added it to my master branch. --[[anarcat]] + +> [[applied|done]]. Thanks guys. --[[Joey]] diff --git a/doc/bugs/osm_plugin_error_TypeError:_mapProjection_is_null.mdwn b/doc/bugs/osm_plugin_error_TypeError:_mapProjection_is_null.mdwn new file mode 100644 index 000000000..42e2edb2c --- /dev/null +++ b/doc/bugs/osm_plugin_error_TypeError:_mapProjection_is_null.mdwn @@ -0,0 +1,8 @@ +[[!template id=gitbranch branch=cbaines/osm-layers-patch author="[[cbaines]]"]] + +Using the osm plugin with a simple \[[!osm]] directive does not seem to work, a "TypeError: mapProjection is null" is given. I believe this is because the client side Javascript uses the options.layers, which is always Null. + +[[!tag patch]] +I have produced a patch for this issue, but beware, while it appears to fix the problem for me, I have little understanding of perl and the existing code base. + +> It looks sound, but I have yet to test it. --[[anarcat]] diff --git a/doc/bugs/osm_sometimes_looses_some_nodes.mdwn b/doc/bugs/osm_sometimes_looses_some_nodes.mdwn new file mode 100644 index 000000000..9de1b4e23 --- /dev/null +++ b/doc/bugs/osm_sometimes_looses_some_nodes.mdwn @@ -0,0 +1,5 @@ +I have heard repeated reports on that editing a page that has a waypoint in it will sometimes make that waypoint disappear from the main map. I have yet to understand why that happens or how, but multiple users have reported that. + +A workaround is to rebuild the whole wiki, although sometimes re-editing the same page will bring the waypoint back on the map. + +I have been able to reproduce this by simply creating a new node. It will not show up on the map until the wiki is rebuilt or the node is resaved. -- [[anarcat]] diff --git a/doc/bugs/pages_under_templates_are_invalid.mdwn b/doc/bugs/pages_under_templates_are_invalid.mdwn index f7e115d48..c031543c1 100644 --- a/doc/bugs/pages_under_templates_are_invalid.mdwn +++ b/doc/bugs/pages_under_templates_are_invalid.mdwn @@ -14,3 +14,6 @@ Maybe just encode all < and > when compling pages within the templates fol > I never noticed this bug, since it only happens if the htmlscrubber is > disabled. --[[Joey]] + +>> My `templatebody` branch on [[template creation error]] fixes this. +>> --[[smcv]] diff --git a/doc/bugs/pagetitle_function_does_not_respect_meta_titles.mdwn b/doc/bugs/pagetitle_function_does_not_respect_meta_titles.mdwn index c6e3cd4fd..15d28f989 100644 --- a/doc/bugs/pagetitle_function_does_not_respect_meta_titles.mdwn +++ b/doc/bugs/pagetitle_function_does_not_respect_meta_titles.mdwn @@ -279,3 +279,11 @@ So, looking at your meta branch: --[[Joey]] >>>> for the po plugin, because I want to merge the po plugin soon. >>>> If #2 gets tackled later, we will certianly have all kinds of fun. >>>> no matter what is done for the po plugin. --[[Joey]] + +>>>>> For the record: I've gotten used to the lack of this feature, +>>>>> and it now seems much less important to me than it was when +>>>>> initially developing the po plugin. So, I'm hereby officially +>>>>> removing this from my plate. If anyone else wants to start from +>>>>> scratch, or from my initial work, I'm happy to review the +>>>>> po-related part of things -- just drop me an email in this +>>>>> case. --[[intrigeri]] diff --git a/doc/bugs/password_reset_fails_with___34__Wide_character_in_subroutine_entry__34__.mdwn b/doc/bugs/password_reset_fails_with___34__Wide_character_in_subroutine_entry__34__.mdwn new file mode 100644 index 000000000..b9452a5ef --- /dev/null +++ b/doc/bugs/password_reset_fails_with___34__Wide_character_in_subroutine_entry__34__.mdwn @@ -0,0 +1,29 @@ +Similar to [[bugs/syslog_fails_with_non-ASCII_wikinames]], this bug happens when the wiki name has non-ascii characters in the site name. In my case, it has the "CⒶTS" string. + +We get the following error in a password reset: + + Error: Wide character in subroutine entry at /usr/share/perl5/Mail/Sendmail.pm line 308. + +Help! :) --[[anarcat]] + +> I assume this means Mail::Sendmail doesn't know how to send Unicode +> strings, so any string passed to it (or any message body, or something?) +> will need to be passed through `encode_utf8()`. It looks as though +> Mail::Sendmail also defaults to +> +> Content-Type: 'text/plain; charset="iso-8859-1"' +> +> so it'll need a `'Content-Type' => 'text/plain; charset="utf-8"'` +> too. +> +> I'm disappointed to see how many of the library modules used by ikiwiki +> are not Unicode-clean... but then again, Mail::Sendmail was last released +> in 2003 so it's hardly surprising. I wonder whether [[!cpan Email::Sender]] +> is any better? +> +> (If you know Python 2, the analogous situation would be "doesn't +> know how to send unicode objects, so you have to get a str object +> with `a_unicode_object.encode('utf-8')`".) --[[smcv]] + +>> Shameless plug: [[todo/passwordauth:_sendmail_interface]]. Though, I have +>> no idea whether that is UTF-8-safe. --[[tschwinge]] diff --git a/doc/bugs/po:_po4a_too_strict_on_html_pages.mdwn b/doc/bugs/po:_po4a_too_strict_on_html_pages.mdwn index eba59a682..d672d1c04 100644 --- a/doc/bugs/po:_po4a_too_strict_on_html_pages.mdwn +++ b/doc/bugs/po:_po4a_too_strict_on_html_pages.mdwn @@ -20,3 +20,5 @@ enabled: inserting a html tag without closing it is enough. > will be releasing that soon. I will cherry-pick the fix into at least > my debian-stable branch too. I don't know if this is worth doing a whole > security advisory for. --[[Joey]] + +[[done]] diff --git a/doc/bugs/poll_in_inline.mdwn b/doc/bugs/poll_in_inline.mdwn new file mode 100644 index 000000000..61c144915 --- /dev/null +++ b/doc/bugs/poll_in_inline.mdwn @@ -0,0 +1,6 @@ +When the poll directive appears in an inline, clicking on the button is +supposed to vote and go to the page for that poll. Instead, I see it always +apparantly skip counting my vote, and redirect to the page for that poll. +--[[Joey]] + +> [[fixed|done]] --[[Joey]] diff --git a/doc/bugs/possible_to_post_comments_that_will_not_be_displayed.mdwn b/doc/bugs/possible_to_post_comments_that_will_not_be_displayed.mdwn new file mode 100644 index 000000000..83d662cbf --- /dev/null +++ b/doc/bugs/possible_to_post_comments_that_will_not_be_displayed.mdwn @@ -0,0 +1,34 @@ +[[!template id=gitbranch branch=smcv/ready/comments author="[[smcv]]" +browse="http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/comments"]] +[[!tag patch users/smcv/ready]] + +The ability to post comments depends on several factors: + +* `comments_pagespec` controls whether comments on a particular + page will be displayed +* `comments_closed_pagespec` controls whether comments on + a particular page are allowed +* the `check_canedit` call controls whether comments are allowed + for a particular combination of page and user + +If `check_canedit` says that a user can post a comment +(in particular, if [[plugins/opendiscussion]] is enabled or +[[plugins/lockedit]] is disabled or permissive), +and `comments_closed_pagespec` does not contradict it, +then users who construct a `do=comment` CGI URL manually +can post comments that will not be displayed. I don't think +this is a security flaw as such, which is why I'm not +reporting it privately, but it violates least-astonishment. + +My `ready/comments` branch fixes this, by changing the test +at submission time from (pseudocode) + + !comments_closed_pagespec && check_canedit + +to + + comments_pagespec && !comments_closed_pagespec && check_canedit + +--[[smcv]] + +> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/possibly_po_related_error.mdwn b/doc/bugs/possibly_po_related_error.mdwn new file mode 100644 index 000000000..2a65ae606 --- /dev/null +++ b/doc/bugs/possibly_po_related_error.mdwn @@ -0,0 +1,20 @@ +A site got stuck like this: + +
      +/home/b-fusioninventory/public_html/documentation/index.es.html independently created, not overwriting with version from documentation.es
      +
      + +I tried rebuilding it, and the rebuild failed like this: + +
      +building recentchanges/change_ef4b9f92821335d96732c4b2c93ed96bc84c2f0d._change, which depends on templates/page.tmpl
      +removing recentchanges/change_9ca1de878ea654566ce4a8a031d1ad8ed135ea1c/index.html, no longer built by recentchanges/change_9ca1de878ea654566ce4a8a031d1ad8ed135ea1c
      +internal error: recentchanges/change_9ca1de878ea654566ce4a8a031d1ad8ed135ea1c._change cannot be found in /home/b-fusioninventory/source or underlay
      +
      + +This internal error seems like the root cause of the original failure. +ikiwiki crashed and did not record that it wrote the index.es.html file. + +Deleting the indexdb and rebuilding cleaned up the problem. + +This needs more investigation. --[[Joey]] diff --git a/doc/bugs/preprocessing_loop_control_too_tight.mdwn b/doc/bugs/preprocessing_loop_control_too_tight.mdwn new file mode 100644 index 000000000..7cf92af57 --- /dev/null +++ b/doc/bugs/preprocessing_loop_control_too_tight.mdwn @@ -0,0 +1,23 @@ +the preprocessing hook makes sure that no infinite loops occur by restricting the depth of nested directives to 3. + +this is insufficient in some situations in which sidebars are conditionally assembled from templates. + +given there are no limits on the number of directives per page and the number of edits a user can do in a particular time frame, i assume that raising that limit slightly won't make the DoS attacks that can be done against ikiwiki too much worse. + +i'd like to suggest 8 as a new value for recursion depth limit. most people can wrap their minds around a depth 3 nested directive setup, but when you reach a depth of 8, it's likely to be easier to write a dedicated plugin. + +
      +diff --git a/IkiWiki.pm b/IkiWiki.pm
      +index 75c9579..ad0f8b0 100644
      +--- a/IkiWiki.pm
      ++++ b/IkiWiki.pm
      +@@ -1487 +1487 @@ sub preprocess ($$$;$$) {
      +-                       if ($preprocessing{$page}++ > 3) {
      ++                       if ($preprocessing{$page}++ > 8) {
      +
      + +[[!tag patch]] + +> [[Seems reasonable|users/smcv/ready]] --smcv + +>> [[done]] --[[Joey]] diff --git a/doc/bugs/proxy.py_utf8_troubles.mdwn b/doc/bugs/proxy.py_utf8_troubles.mdwn new file mode 100644 index 000000000..7e8f70e59 --- /dev/null +++ b/doc/bugs/proxy.py_utf8_troubles.mdwn @@ -0,0 +1,35 @@ +when writing an external plugin using `proxy.py`, the getstate and setstate +functions don't accept unicode data: + + uncaught exception: 'ascii' codec can't encode character u'\xe4' in position 25: ordinal not in range(128) + Traceback (most recent call last): + File "proxy.py", line 309, in run + self._in_fd, self._out_fd) + File "proxy.py", line 192, in handle_rpc + ret = self._dispatcher.dispatch(method, params) + File "proxy.py", line 84, in dispatch + return self._dispatch(method, params) + File "/usr/lib/python2.7/SimpleXMLRPCServer.py", line 420, in _dispatch + return func(*params) + File "proxy.py", line 251, in hook_proxy + ret = function(self, *args) + File "/home/chrysn/git/ikiwiki-plugins//plugins/my_plugin", line 49, in data2html + proxy.setstate(kwargs['page'], 'meta', 'title', unicode_containing_umlauts) + File "proxy.py", line 291, in setstate + return self.rpc('setstate', page, id, key, value) + File "proxy.py", line 233, in rpc + *args, **kwargs) + File "proxy.py", line 178, in send_rpc + cmd, data)) + UnicodeEncodeError: 'ascii' codec can't encode character u'\xe4' in position 25: ordinal not in range(128) + +the culprit is the last `_debug_fn` invocation in `send_rpc` (line 178), where +unicode data is format-fed into a string. while this could be circumvented by +making the formatting string a unicode string, that would cause trouble with +python3 and we'd just move the problem to the stderr writing later on; instead, +"`cmd, data))`" should become "`cmd, repr(data)))`" and everything is fine. +debug output doesn't look that pretty any more, but is safe. + +--[[chrysn]] + +> ok, [[done]] --[[Joey]] diff --git a/doc/bugs/pythonproxy-utf8_again.mdwn b/doc/bugs/pythonproxy-utf8_again.mdwn new file mode 100644 index 000000000..f068782b4 --- /dev/null +++ b/doc/bugs/pythonproxy-utf8_again.mdwn @@ -0,0 +1,70 @@ +[[!template id=gitbranch branch=chrysn/more-proxy-utf8-fail author="[[chrysn]]"]] +[[!template id=gitbranch author="[[chrysn]], [[smcv]]" branch=smcv/ready/more-proxy-utf8-fail + browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/more-proxy-utf8-fail]] + +the recently introduced fixes for [[crashes in the python proxy even if disabled]] +caused the typical python2 implicit conversion failures ("'ascii' codec +can't...") on my debian sid system -- to fix it, i had to revert commit 154c4ea9e. + +i did not dig down all the way to the xml / xmlrpc modules, but my impression +is that some module changed its behavior between stable and sid and now +generates `unicode` strings instead of `str`. + +a [[patch]] to allow both versions by inspecting the types and en-/decoding on +demand should work both for anarcat's and my case. i did not test the python3 +version, but i'm pretty sure it was already broken after the abovementioned +patch. + +-- [[chrysn]] + +> update 2014-06-29: the problem persists, but i found it is not trivial to +> reproduce. to demonstrate, use this test plugin: +> +> #!/usr/bin/env python +> # -*- coding: utf-8 -*- +> +> from proxy import IkiWikiProcedureProxy +> +> def preprocess(self, proxy, *args): +> return repr(self.rpc('pagetype', 'schön')) +> +> proxy = IkiWikiProcedureProxy(__name__) +> proxy.hook('preprocess', preprocess, id='testdirective') +> proxy.run() +> +> note that when the 'schön' is stored in a variable, the exception changes -- +> it seems to me that the issue is related to the way exceptions are encoded. +> +> the suggested patch still applies and solves the issue. --[[chrysn]] + +>> In this patch band: +>> +>> - xml = _IkiWikiExtPluginXMLRPCHandler._read(in_fd).decode('utf8') +>> + response = _IkiWikiExtPluginXMLRPCHandler._read(in_fd) +>> + if isinstance(response, unicode): +>> + xml = response.encode('utf8') +>> +>> I think you mean `response.decode`, not `response.encode`. +>> +>> Other than that it looks good to me. I like the use of `repr` in debug +>> messages. --[[smcv]] + +>>> afaict, encode is fine there -- the relevant methods in python2 are +>>> `unicode.encode` which gives a `str`, and `str.decode` which usually gives +>>> a `unicode`. (i'd happily ditch python2 and port all plugins to python3, +>>> where this is all easier, but my [[todo/vCard rendering]] still uses an +>>> ancient module.) --[[chrysn]] + +>>>> You were right about this, `encode` is appropriate to go from `unicode` +>>>> to `str` under Python 2. However, Python 3 is still broken. +>>>> +>>>> My `ready/more-proxy-utf8-fail` branch, based on yours, +>>>> [[fixes the `rst` test when run under Python 3|bugs/rst_plugin_hangs_when_used_with_Python_3]] +>>>> and hopefully also fixes this one. Please check that it still +>>>> fixes your test-case too. +>>>> +>>>> Joey, I think this is [[ready for merge|users/smcv/ready]] even if it +>>>> doesn't fix chrysn's bug - it does fix Python 3 support +>>>> in general. --[[smcv]] + +>>>>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/recentchanges_sets_has__95__diffurl__61__1_when_diffurl_is_empty.mdwn b/doc/bugs/recentchanges_sets_has__95__diffurl__61__1_when_diffurl_is_empty.mdwn new file mode 100644 index 000000000..6c6e24b02 --- /dev/null +++ b/doc/bugs/recentchanges_sets_has__95__diffurl__61__1_when_diffurl_is_empty.mdwn @@ -0,0 +1,18 @@ +recentchanges.pm sets the template variable HAS_DIFFURL to 1 based solely on whether or not diffurl is defined. I found that diffurl was defined, but empty. The recentchanges template depends on this for recentchangesdiff to properly function -- diff toggling is dependent on HAS_DIFFURL evaluating to false. Adding a check for a non-zero length diffurl fixed the issue for me. A patch against ikiwiki-3.20121212 is as follows: + + --- a/IkiWiki/Plugin/recentchanges.pm 2013-01-27 20:08:59.000000000 -0800 + +++ b/IkiWiki/Plugin/recentchanges.pm 2013-01-27 20:08:30.000000000 -0800 + @@ -181,7 +181,8 @@ sub store ($$$) { + else { + $_->{link} = pagetitle($_->{page}); + } + - if (defined $_->{diffurl}) { + + if (defined $_->{diffurl} && + + length($_->{diffurl}) > 0) { + $has_diffurl=1; + } + + +(There should be one more line at the bottom with a single space on it...) + +> [[applied|done]] --[[Joey]] diff --git a/doc/bugs/redirect.mdwn b/doc/bugs/redirect.mdwn new file mode 100644 index 000000000..87f6a67e7 --- /dev/null +++ b/doc/bugs/redirect.mdwn @@ -0,0 +1,53 @@ +I suppose this isn't technically a bug, but whetever. + +I want symbolic links to be rendered as HTTP redirects. For example, +if we do this, + + touch foo.mkdwn + ln -s foo.mkdwn bar.mkdwn + git push baz.branchable.com + +then the following command should print 302 + + curl -o /dev/null -s -w "%{http_code}" http://baz.thomaslevine.com/bar/ + +> An interesting idea, but it conflicts somewhat with wanting symlinks to be +> treated as the referenced file when it's safe to do so, which would be +> great for [[todo/git-annex support]], and also good to avoid duplication +> for files in system-wide underlays. +> +> Also, I don't think this is possible without help from the web server +> configuration: for instance, under Apache, I believe the only way to get +> an HTTP 302 redirect is via Apache-specific `.htaccess` files or +> system-level Apache configuration. +> +> In current ikiwiki, you can get a broadly similar effect by either +> using \[[!meta redir=foo]] (which does a HTML `` redirect) +> or reconfiguring the web server. --[[smcv]] + +>> The CGI spec (http://www.ietf.org/rfc/rfc3875) says that a CGI can cause a redirect by returning a Location: header. +>> So it's possible; desirable (due to your point about conflicting with git-annex support) is a different matter. + +>>> One of the major things that separates ikiwiki from other wiki software +>>> is that ikiwiki is a wiki compiler: ordinary page-views are purely +>>> static HTML, and the CGI only gets involved when you do something +>>> that really has to be dynamic (like an edit). +>>> +>>> However, there is no server-independent static content that ikiwiki +>>> could write out to the destdir that would result in that redirect. +>>> +>>> If you're OK with requiring the [[plugins/404]] plugin (and a +>>> web server where it works, which I think still means Apache) then +>>> it would be possible to write a plugin that detected symlinks, +>>> stored them in the `%wikistate`, and used them to make the +>>> [[plugins/404]] plugin (or its own hook similar to the one +>>> in that plugin) do a 302 redirect instead of a 404. +>>> Similarly, a plugin that assumed a suitable Apache +>>> configuration with fairly broad `AllowOverrides`, +>>> and wrote out `.htaccess` files, would be a feasible thing +>>> for someone to write. +>>> +>>> I don't think this is a bug; I think it's a request for a +>>> feature that not everyone will want. The solution to those +>>> is for someone who wants the feature to +>>> [[write a plugin|plugins/write]]. --[[smcv]] diff --git a/doc/bugs/removal_of_transient_pages.mdwn b/doc/bugs/removal_of_transient_pages.mdwn index 2667a2b83..6d0caf42e 100644 --- a/doc/bugs/removal_of_transient_pages.mdwn +++ b/doc/bugs/removal_of_transient_pages.mdwn @@ -25,3 +25,54 @@ pages, until this is fixed. --[[Joey]] >>>> to affect by web edits. The `-f` check seems rather redundant, >>>> surely if it's in `%pagesources` ikiwiki has already verified it's >>>> safe. --[[Joey]] + +---- + +[[!template id=gitbranch branch=smcv/ready/transient-rm author="[[Simon McVittie|smcv]]"]] + +Here's a branch. It special-cases the `$transientdir`, but in such a way +that the special case could easily be extended to other locations where +deletion should be allowed. + +It also changes `IkiWiki::prune()` to optionally stop pruning empty +parent directories at the point where you'd expect it to (for instance, +previously it would remove the `$transientdir` itself, if it turns out +to be empty), and updates callers. + +The new `prune` API looks like this: + + IkiWiki::prune("$config{srcdir}/$file", $config{srcdir}); + +with the second argument optional. I wonder whether it ought to look +more like `writefile`: + + IkiWiki::prune($config{srcdir}, $file); + +although that would be either an incompatible change to internal API +(forcing all callers to update to 2-argument), or being a bit +inconsistent between the one-and two-argument forms. Thoughts? + +--[[smcv]] + +> I've applied the branch as-is, so this bug is [[done]]. +> `prune` is not an exported API so changing it would be ok.. +> I think required 2-argument would be better, but have not checked +> all the call sites to see if the `$file` is available split out +> as that would need. --[[Joey]] + +[[!template id=gitbranch branch=smcv/ready/prune author="[[Simon McVittie|smcv]]"]] + +>> Try this, then? I had to make some changes to `attachment` +>> to make the split versions available. I suggest reviewing +>> patch-by-patch. + +>>> Branch updated; I'd missed a use of prune in ikiwiki.in itself. +>>> Unfortunately, this means it does still need to support the +>>> "undefined top directory" case: there isn't an obvious top +>>> directory for wrappers. --[[smcv]] + +>> I also tried to fix a related bug which I found while testing it: +>> the special case for renaming held attachments didn't seem to work. +>> (`smcv/wip/rename-held`.) Unfortunately, it seems that with that +>> change, the held attachment is committed to the `srcdir` when you +>> rename it, which doesn't seem to be the intention either? --[[smcv]] diff --git a/doc/bugs/renaming_a_page_destroyed_some_links.mdwn b/doc/bugs/renaming_a_page_destroyed_some_links.mdwn new file mode 100644 index 000000000..fd7a80bd4 --- /dev/null +++ b/doc/bugs/renaming_a_page_destroyed_some_links.mdwn @@ -0,0 +1,12 @@ +When renaming a page here, ikiwiki destroyed unrelated links from unrelated pages. You can see the effect [here](http://mesh.openisp.ca/recentchanges/#diff-dc8dfa96efd3a4d649f571c3aa776f20b3ce0131), or by checking out the git tree (`git://mesh.openisp.ca/ +`) and looking at commit `dc8dfa96efd3a4d649f571c3aa776f20b3ce0131`. + +The renamed page was `configuration/bat-hosts` to `configuration/batman/bat-hosts` and the deleted links were ``\[[AUR | https://aur.archlinux.org/]]` and `\[[CHANGELOG|http://svn.dd-wrt.com:8000/browser/src/router/batman-adv/CHANGELOG]]`. --[[anarcat]] + +> Nevermind that, that commit was unrelated to the rename and probably an operator error. - No, actually, I just reproduced this again - see [another example](http://mesh.openisp.ca/recentchanges/#diff-d67dc2f0fdc149b13122fd6cba887a01c693e949). + +>> Looks like these all involve the wacky wikilink form that includes an +>> external url in the link. Fixed rename code to know about those. +>> [[done]] --[[Joey]] + +>>> Phew!!! Thanks a *lot* for that one, it was really annoying! :) --[[anarcat]] diff --git a/doc/bugs/rst_plugin_fails_with___34__uncaught_exception:___39__ascii__39___codec_can__39__t_encode_character__34__.mdwn b/doc/bugs/rst_plugin_fails_with___34__uncaught_exception:___39__ascii__39___codec_can__39__t_encode_character__34__.mdwn new file mode 100644 index 000000000..1893e7089 --- /dev/null +++ b/doc/bugs/rst_plugin_fails_with___34__uncaught_exception:___39__ascii__39___codec_can__39__t_encode_character__34__.mdwn @@ -0,0 +1,40 @@ + I get this error when enabling the `rst` plugin. I am running IkiWiki +3.20130904.1ubuntu1 on Ubuntu 14.04 in a non-English UTF-8 locale; the +pages can also contain characters in UTF-8 encoding. + + uncaught exception: 'ascii' codec can't encode character u'\xa9' in position 13: ordinal not in range(128) + Traceback (most recent call last): + File "/usr/lib/ikiwiki/plugins/proxy.py", line 309, in run + self._in_fd, self._out_fd) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 192, in handle_rpc + ret = self._dispatcher.dispatch(method, params) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 84, in dispatch + return self._dispatch(method, params) + File "/usr/lib/python2.7/SimpleXMLRPCServer.py", line 420, in _dispatch + return func(*params) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 253, in hook_proxy + "{0} hook `{1}' returned: [{2}]".format(type, name, ret)) + UnicodeEncodeError: 'ascii' codec can't encode character u'\xa9' in position 13: ordinal not in range(128) + + Traceback (most recent call last): + File "/usr/lib/ikiwiki/plugins/rst", line 86, in + proxy.run() + File "/usr/lib/ikiwiki/plugins/proxy.py", line 317, in run + self.error('uncaught exception: {0}\n{1}'.format(e, tb)) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 298, in error + self.rpc('error', msg) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 233, in rpc + *args, **kwargs) + File "/usr/lib/ikiwiki/plugins/proxy.py", line 173, in send_rpc + raise GoingDown() + proxy.py.GoingDown + +A fix is akin to the one for +: change +`...format(type, name, ret)` in `proxy.py` line 253 to `format(type, +name, repr(ret))` (which should not hurt since it's a message +for debugging purposes only). + + +> this is [[fixed|done]] in commit [154c4ea9](http://source.ikiwiki.branchable.com/?p=source.git;a=commit;h=154c4ea9e65d033756330a7f8c5c0fa285380bf0) +> (november 2013), which is included in 3.20140227. --[[chrysn]] diff --git a/doc/bugs/rst_plugin_hangs_when_used_with_Python_3.mdwn b/doc/bugs/rst_plugin_hangs_when_used_with_Python_3.mdwn new file mode 100644 index 000000000..ca0738ad5 --- /dev/null +++ b/doc/bugs/rst_plugin_hangs_when_used_with_Python_3.mdwn @@ -0,0 +1,35 @@ +During ikiwiki make phase the rst process hangs: +[ps output](http://dpaste.com/21TQQKT) +[gdb backtrace 1](http://dpaste.com/0VQBW6D) +[gdb backtrace 1](http://dpaste.com/1VHS88Y) + +working with python 2.7 +[http://dpaste.com/0985A91](http://dpaste.com/0985A91) +not working with python3.3~3.4 +[http://dpaste.com/0ACNK3W](http://dpaste.com/0ACNK3W) + +> Retitled this bug report since it seems to be specific to Python 3. +> +> The `rst` plugin is probably more commonly used with Python 2. +> It seems likely that there is some Python-3-specific bug in `proxy.py`, +> perhaps introduced by [commit 154c4ea + "properly encode and decode from/to utf8 when sending rpc to ikiwiki"]( +http://source.ikiwiki.branchable.com/?p=source.git;a=commitdiff;h=154c4ea9e65d033756330a7f8c5c0fa285380bf0). +> +> I can reproduce this on Debian by installing `python3-docutils` +> and changing the first line of `plugins/proxy.py`, the first +> line of `plugins/pythondemo`, the first line of `plugins/rst` +> and the `system()` call in `t/rst.t` to use `python3` instead +> of `python`. --[[smcv]] + +looks like the problem is in proxy.py +ml = _IkiWikiExtPluginXMLRPCHandler._read(in_fd).decode('utf8') + +without decode('utf8') is working + +> That call was introduced +> [[to fix a bug under Python 2|bugs/crashes_in_the_python_proxy_even_if_disabled]] +> so it cannot just be removed, but I've put a proposed branch on +> [[this related bug|bugs/pythonproxy-utf8_again]]. [[!tag patch]] --smcv + +tested and fixed with patch [http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/38bd51bc1bab0cabd97dfe3cb598220a2c02550a](http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/38bd51bc1bab0cabd97dfe3cb598220a2c02550a) and patch [http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/81506fae8a6d5360f6d830b0e07190e60a7efd1c](http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/81506fae8a6d5360f6d830b0e07190e60a7efd1c) diff --git a/doc/bugs/search_plugin_finds_no_results_with_xapian_1.2.7.mdwn b/doc/bugs/search_plugin_finds_no_results_with_xapian_1.2.7.mdwn index 5509efefe..3bc430f68 100644 --- a/doc/bugs/search_plugin_finds_no_results_with_xapian_1.2.7.mdwn +++ b/doc/bugs/search_plugin_finds_no_results_with_xapian_1.2.7.mdwn @@ -8,3 +8,7 @@ I found that Debian stable is currently shipping 1.2.3, and on a hunch, I built > Debian has 1.2.7 now, and I have it installed and searching is working > fine with it. --[[Joey]] + +> I have this same issue. I tried xapian version 1.2.5. 1.2.8, 1.2.13. I will try and see if installing 1.2.3 fixes this issue. --[[Ramsey]] + +> 1.2.3 didn't fix the issue either --[[Ramsey]] diff --git a/doc/bugs/sidebar_not_updated_in_unedited_subpages.mdwn b/doc/bugs/sidebar_not_updated_in_unedited_subpages.mdwn new file mode 100644 index 000000000..c3e0ee18c --- /dev/null +++ b/doc/bugs/sidebar_not_updated_in_unedited_subpages.mdwn @@ -0,0 +1,9 @@ +I turned on the sidebar plugin, with global_sidebars on (in the web setup page), created a sidebar page in the root, and edited the sidebar a few times. + +I then noticed that all pages on the root had been updated with a sidebar, but no subpages (i.e. a/b). Only after editing a subpage did it get a sidebar. Editing sidebar itself only updated subpages with sidebars, the other subpages had not been refreshed (proven by their unchanged filesystem date) + +After calling ikiwiki --setup on the command line all pages were updated. So this seems to be a difference between web-started --setup and command-line --setup. Or it just doesn't work the first time --setup is called after sidebars are enabled. + + + + diff --git a/doc/bugs/structured_config_data_is_mangled.mdwn b/doc/bugs/structured_config_data_is_mangled.mdwn new file mode 100644 index 000000000..869d48e96 --- /dev/null +++ b/doc/bugs/structured_config_data_is_mangled.mdwn @@ -0,0 +1,61 @@ +Put something like this in the setup file: + +~~~ +conversion: + - from: odt + to: pdf + command: [unoconv, -f, pdf, -o, $OUTPUTDIR, $INPUTFILE] + - from: ditaa + to: png + command: [ditaa, $INPUTFILE, $OUTPUTFILE, -s, 0.7] +~~~ + +However `Dumper($config{conversion})` shows: + +~~~ +$VAR1 = [ + 'HASH(0x164e1a0)', + 'HASH(0x164e3c8)' + ]; +~~~ + +I think it is getting mangled in `sub merge` in `IkiWiki/Setup.pm` and its calls to `possibly_foolish_untaint` + +Workaround: force the array values to be strings, and then re-parse them using YAML::XS::Load: + +~~~ +conversion: + - | + from: [odt, odp] + to: pdf + command: [unoconv, -f, pdf, -o, $OUTPUTDIR, $INPUTFILE] + - | + from: ditaa + to: png + command: [ditaa, $INPUTFILE, $OUTPUTFILE, -s, 0.7] + +... + +sub checkconfig { + if (!defined $config{conversion} || ref $config{conversion} ne "ARRAY") { + error(sprintf(gettext("Must specify '%s' and it must be a list"), "conversion")); + } + for (my $i=0; $i < @{$config{conversion}}; $i++) { + $config{conversion}->[$i] = YAML::XS::Load($config{conversion}->[$i]) if + ref $config{conversion}->[$i] ne 'HASH'; + } +} +~~~ + +> `getsetup` defines config options to be one of: boolean, string, integer, +> pagespec, "internal" (non-user-visible string), ref to an array of one of +> those scalar types, or ref to a hash { string => one of those scalar types }. +> IkiWiki::Setup also appears to support regexps (qr//), although that's +> not documented (presumably they're treated the same as strings). +> +> Supporting arbitrary arrays/hashes as values would require some way to +> untaint the values recursively. +> +> Complex config data also can't be used with the [[plugins/websetup]] +> plugin, which currently supports everything that IkiWiki::Setup does, +> except for hashes. --[[smcv]] diff --git a/doc/bugs/svg_and_pdf_conversion_fails.mdwn b/doc/bugs/svg_and_pdf_conversion_fails.mdwn new file mode 100644 index 000000000..ac18fe8aa --- /dev/null +++ b/doc/bugs/svg_and_pdf_conversion_fails.mdwn @@ -0,0 +1,58 @@ +[[!template id=gitbranch branch=chrysn/imgforpdf author="[[chrysn]]"]] + +when using the [[img plugin|plugins/img]] with an svg file, it is supposed to +convert it into a png for display in all browsers, and because the typical use +case is rendering small preview versions. + +this currently doesn't work (at least with graphicsmagick-libmagick-dev-compat +1.3.18-1) due to the sequence imagemagick options are set, needs an extension +to work for pdfs (or any other imagemagick compatibile file) too, and should +have an additional parameter for page selection. + +i've provided a series of [[!taglink patch]]es in the chrysn/imgforpdf [[git]] +branch. + +i'd prefer to go a step further, and not only convert pdf and svg files to png, +but everything (with the possible exception of jpg files), as most other image +formats can't be displayed in a browser anyway -- but i didn't in this patch +series, as it would alter the file names of existing images, i don't know if +that needs special care or breaks something i don't use; this way, my patches +should be safe for inclusion. + +--[[chrysn]] + +> update 2014-06-29: the patch still applies and fixes the issue. in the +> meantime, i noticed that the desired effect doesn't happen when no explicit +> size is set. as scalable graphics don't necessarily have a natural size +> anyway, i don't consider that a showstopper. --[[chrysn]] + +>> This all looks good in principle, but I would like to do a more detailed +>> review, and test it with "real ImageMagick" in case its behaviour differs +>> from GraphicsMagick. +>> +>> An automated regression test for the desired behaviour in `t/` would +>> be great. There are SVGs and PNGs in the docwiki already; there are no +>> JPEGs or PDFs, but perhaps you could add a trivially small example +>> of each to `t/`? Imitating `t/tag.t` or `t/trail.t`, and skipping the +>> test if the required modules are missing like `t/podcast.t` does, +>> seems like it would work best. +>> +>> I agree that everything not in an interoperable web format should be +>> converted to PNG when it's scaled down, but yes, that's more likely +>> to be a breaking change, so it seems best to do that as a separate +>> branch. In practice I think this means JPEG -> JPEG and everything +>> else -> PNG, since JPEG is commonly used for photos and photo-like +>> images that don't compress well under lossless compression. --[[smcv]] + +>>> i've added a unit test and tested it with the [[!debsid perlmagick]] +>>> package, the [[!debsid graphicsmagick-libmagick-dev-compat]] package and +>>> the experimental [[!debpts libimage-magick-perl]] package (where the +>>> [[!debpts libmagickcore-6.q16-2-extra]] package is required too), in the +>>> meantime filing [[!debbug 753770]]. (why is it that it sometime seems i +>>> find more bugs in ikiwiki's dependencies than in itself when working with +>>> it?) +>>> +>>> the unit test also checks for file removal when it is not created any more, +>>> which works, so my biggest fear about the all-to-png change is unwarranted. +>>> i'll have a look at that some time, but i think as things are, this is +>>> ready now, please review again. --[[chrysn]] diff --git a/doc/bugs/syslog_fails_with_non-ASCII_wikinames.mdwn b/doc/bugs/syslog_fails_with_non-ASCII_wikinames.mdwn new file mode 100644 index 000000000..0d40d232a --- /dev/null +++ b/doc/bugs/syslog_fails_with_non-ASCII_wikinames.mdwn @@ -0,0 +1,32 @@ +[[!template id=gitbranch branch=anarcat/dev/syslog_utf8 author="[[anarcat]]"]] + +[[this feature|todo/syslog_should_show_wiki_name]] made it so syslog doesn't work anymore if the site being logged has non-ASCII characters it in. + +Specifically, my wiki was named "CⒶTS", and nothing was showing up in syslog. When I changed that to "C@TS", it worked again. + +My guess is this sits somewhere here: + +[[!format perl """ + return eval { + Sys::Syslog::syslog($type, "[$config{wikiname}] %s", join(" ", @_)); + }; +"""]] + +Yet I am not sure how to fix that kind of problem in Perl... --[[anarcat]] + +> If I remove the "eval" above, I get: +> +> Error: Wide character in syswrite at /usr/lib/perl/5.14/Sys/Syslog.pm line 485. +> +> I have improved a little the error handling in log_message() so that we see *something* when syslog fails, see the branch documented above. I can also confirm that reverting [[todo/syslog_should_show_wiki_name]] fixes the bug. Finally, I have a unit test that reproduces the problem in git, and a working patch for the bug, again in git. +> +> > One last note: I noticed that this problem also happens elsewhere in ikiwiki. For example, the [[plugins/notifyemail]] plugin will silently fail to send notifications if the pages contain unicode. The [[plugins/notifychanges]] plugin I am working on (in [[todo/option to send only the diff in notifyemail]]) seems to be working around the issue so far, but there's no telling which similar problem are out there. + +>> I'd merge it. --[[smcv]] + +>>> I've merged it, but I don't feel it fixes this bug. --[[Joey]] + +>>>> (I removed the patch tag to take it off the patches list.) +>>>> +>>>> What else is needed? Systematic classification of outputs into +>>>> those that do and don't cope with Unicode? --[[smcv]] diff --git a/doc/bugs/template__95__syntax_test_is_incomplete.mdwn b/doc/bugs/template__95__syntax_test_is_incomplete.mdwn new file mode 100644 index 000000000..d50b727e8 --- /dev/null +++ b/doc/bugs/template__95__syntax_test_is_incomplete.mdwn @@ -0,0 +1,10 @@ +[[!template id=gitbranch branch=smcv/ready/template-syntax-test + browse="http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/template-syntax-test" + author="[[smcv]]"]] +[[!tag patch]] + +`t/template_syntax.t` looks as though it's meant to check the syntax of +`doc/templates/*.mdwn` as well as `templates/*.tmpl`, but it doesn't. +Patch in my git repository. --[[smcv]] + +> [[merged|done]] --[[Joey]] diff --git a/doc/bugs/template_creation_error.mdwn b/doc/bugs/template_creation_error.mdwn new file mode 100644 index 000000000..d1fb788f5 --- /dev/null +++ b/doc/bugs/template_creation_error.mdwn @@ -0,0 +1,270 @@ +Hi, +I am trying to build a template. The compilation of this template results in a weird exception. I have isolated the cause of the exception to the following point: + +If i have this in the template code: + +\[[!inline
      +pages="\"
      +template=extract-entry
      +\]]
      + +There is no problem at all. I can use the template with the desired result. But if I try to use this (just adding the "show" parameter): + +\[[!inline
      +pages="\"
      +template=extract-entry
      +show=\
      +\]]
      + +I get this exception on the Git bash console: + +
      +$ git push
      +Counting objects: 7, done.
      +Delta compression using up to 8 threads.
      +Compressing objects: 100% (4/4), done.
      +Writing objects: 100% (4/4), 410 bytes, done.
      +Total 4 (delta 3), reused 0 (delta 0)
      +remote: From /home/b-odelama-com/source
      +remote:    eb1421e..5e1bac5  master     -> origin/master
      +remote: Argument "\x{3c}\x{54}..." isn't numeric in numeric lt (<) at /usr/share/perl5/IkiWiki/Plugin/inline.pm line 231.
      +remote: Argument "\x{3c}\x{54}..." isn't numeric in numeric lt (<) at /usr/share/perl5/IkiWiki/Plugin/inline.pm line 231.
      +To ssh://b-odelama-com@odelama-com.branchable.com/
      +   eb1421e..5e1bac5  master -> master
      +
      + +Please, let me know what to do to avoid this kind of error. + +> When you add a template page `templates/foo.mdwn` for use +> the [[ikiwiki/directive/template]] directive, two things happen: +> +> 1. `\[[!template id=foo ...]]` becomes available; +> 2. a wiki page `templates/foo` is built, resulting in a HTML file, +> typically `templates/foo/index.html` +> +> The warnings you're seeing are the second of these: when ikiwiki +> tries to process `templates/foo.mdwn` as an ordinary page, without +> interpreting the `` directives, `inline` receives invalid +> input. +> +> This is a bit of a design flaw in [[plugins/template]] and +> [[plugins/edittemplate]], I think - ideally it would be possible to +> avoid parts of the page being interpreted when the page is being +> rendered normally rather than being used as a template. +> +> There *is* a trick to avoid parts of the page being interpreted when +> the page is being used as a template, while having them appear +> when it's rendered as a page: +> +> +> +> \[[!meta robots="noindex,nofollow"]] +> This template is used to describe a thing. Parameters: +> * name: the name of the thing +> * size: the size of the thing +> +> +> The thing is called and its size is +> +> I suppose you could maybe extend that to something like this: +> +> +> +> \[[!meta robots="noindex,nofollow"]] +> This template is used to describe a thing. Parameters: +> * name: the name of the thing +> * size: the size of the thing +> +> +> +> \[[!if test="included() and !included()" then=""" +> +> +> The thing is called and its size is +> +> """]] +> +> +> but that's far harder than it ought to be! +> +> Perhaps the right solution would be to change how the template plugin +> works, so that templates are expected to contain a new `definetemplate` +> directive: +> +> This template is used to describe a thing. Parameters: +> * name: the name of the thing +> * size: the size of the thing +> +> \[[!definetemplate """ +> The thing is called and its size is +> """]] +> +> with templates not containing a `\[[!definetemplate]]` being treated +> as if the whole text of the page was copied into a `\[[!definetemplate]]`, +> for backwards compatibility? +> +> --[[smcv]] + +>> OK, here is a branch implementing what I said. It adds the `definetemplate` +>> directive to [[plugins/goodstuff]] as its last commit. +>> +>> Templates with the current strange semantics will still work, until +>> IkiWiki breaks compatibility. +>> +>> Possible controversies: +>> +>> * Should the `definetemplate` plugin be core, or in goodstuff, or neither? +>> +>> * Should \[[!definetemplate]] be allowed on any page (with the implementation +>> of `template("foo")` looking for a `definetemplate` in `templates/foo`, +>> then a `definetemplate` in `foo`, then fall back to the current logic)? +>> If not, should \[[!definetemplate]] raise an error when used on a page not +>> in `templates/`, since it will have no practical effect there? +>> +>> * Is it OK to rely on `definetemplate` being enabled in the basewiki's +>> templates? +>> +>> * Should the "use definetemplate" wording in the documentation of +>> template and edittemplate be stronger? Should those plugins automatically +>> load definetemplate? +>> +>> --[[smcv]] + +>>> this looks like a good idea to me. +>>> +>>> * i'd put it in core, and add a transition for the time compatibility gets +>>> broken, provided the transitioning system will be used in that. templates +>>> can't be expected to just work as markdown+ikiwiki too. +>>> +>>> (it being in core would also solve my qualms about `section => "web"` / +>>> `\[[!tag type/web]]`). +>>> +>>> * if definetemplate gets deemed core, no "use definetemplate!" notes on the +>>> template/edittemplate pages will be required any more. +>>> +>>> * first i was sceptical of the approach of re-running scan to make sure the +>>> `my %templates` is filled, but it is indeed a practical solution. +>>> +>>> * the name "`definetemplate`" gives me the first impression that something +>>> is assigned (as in `#define`), but actually it highlights a region in the +>>> file. wouldn't "`templatebody`" be a better description of the meaning of +>>> the directive? +>>> +>>> --[[chrysn]] + +>>>> Thanks for your feedback! +>>>> Looking at its description on this wiki, I agree that `type/web` doesn't +>>>> fit, and core does seem better. I like your `templatebody` suggestion, +>>>> too, particularly if templates remain restricted to `/templates`. +>>>> I'll try to come up with better wording for the documentation to say +>>>> "use `templatebody`, like this", with a note about backwards +>>>> compatibility later. +>>>> +>>>> Rationale for `my %templates`: yes it does seem a bit odd, but +>>>> if I used `$pagestate{$tpage}{template}` instead of a `my` variable, +>>>> I'd sometimes _still_ have to force a `scan`, because +>>>> [[plugins/template]] has to expand the template at scan time so that +>>>> it can contain links etc. - so I have to make sure that if the +>>>> template has changed, it has already been scanned (scanning happens +>>>> in random order, so that can't be guaranteed). This means there's +>>>> no benefit in reading it back from the index, so it might as well +>>>> just be in-memory. +>>>> +>>>> I suppose an alternative way to do it would be to remember what was +>>>> passed to `needsbuild`, and only force a `scan` for templates that +>>>> were in that list - which potentially reduces CPU time and I/O a +>>>> little, in exchange for a bigger index. I could do that if Joey +>>>> wants me to, but I think the current approach is simpler, +>>>> so I'll stick with the current approach if it isn't vetoed. +>>>> --[[smcv]] + +>>>>> @name: even outside `/templates`, `\[[!templatebody]]` would be +>>>>> interpreted as "when this page is used as a template, this is what its +>>>>> contents should be", and be suitable. +>>>>> +>>>>> @`%templates`: my surprise wasn't to it not being in `%pagestate`, but +>>>>> rather that the `scan` function was used for it at all, rather than plain +>>>>> directive parsing that ignores everything else -- but i agree that it's +>>>>> the right thing to do in this situation. +>>>>> +>>>>> --[[chrysn]] + +---- + +[[!template id=gitbranch author="[[smcv]]" branch=smcv/ready/templatebody + browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/templatebody]] +[[!tag patch users/smcv/ready]] +Branch and directive renamed to `ready/templatebody` as chrysn suggested. +It's on-by-default now (or will be if that branch is merged). +Joey, any chance you could review this? + +There is one known buglet: `template_syntax.t` asserts that the entire +file is a valid HTML::Template, whereas it would ideally be doing the +same logic as IkiWiki itself. I don't think that's serious. --[[smcv]] + +> Looking over this, I notice it adds a hash containing all scanned +> files. This seems to me to be potentially a scalability problem on +> rebuild of a site with many pages. Ikiwiki already keeps a lot +> of info in memory, and this adds to it, for what is a fairly +> minor reason. It seems to me there should be a way to avoid this. --[[Joey]] + +>> Maybe. Are plugins expected to cope with scanning the same +>> page more than once? If so, it's just a tradeoff between +>> "spend more time scanning the template repeatedly" and +>> "spend more memory on avoiding it", and it would be OK to +>> omit that, or reduce it to a set of scanned *templates* +>> (in practice that would mean scanning each template twice +>> in a rebuild). --s +>>> [Commit f7303db5](http://source.ikiwiki.branchable.com/?p=source.git;a=commitdiff;h=f7303db5) +>>> suggests that scanning the same page more than once is problematic, +>>> so that solution is probably not going to work. +>>> +>>> The best idea I've come up with so far is to track whether +>>> we're in the scan or render phase. If we're in the scan +>>> phase, I think we do need to keep track of which pages +>>> we've scanned, so we don't do them again? (Or perhaps that's +>>> unnecessary - commit f7303db5 removed a scan call that's in +>>> the render phase.) If we're in the render phase, we can assume +>>> that all changed pages have been scanned already, so we can +>>> drop the contents of `%scanned` and rely on a single boolean +>>> flag instead. +>>> +>>> `%scanned` is likely to be no larger than `%rendered`, which +>>> we already track, and whose useful lifetime does not overlap +>>> with `%scanned` now. I was tempted to merge them both and call +>>> the result `%done_in_this_phase`, but that would lead to really +>>> confusing situations if a bug led to `render` being called sooner +>>> than it ought to be. +>>> +>>> My ulterior motive here is that I would like to formalize +>>> the existence of different phases of wiki processing - at the +>>> moment there are at least two phases, namely "it's too soon to +>>> match pagespecs reliably" and "everything has been scanned, +>>> you may use pagespecs now", but those phases don't have names, +>>> so [[plugins/write]] doesn't describe them. +>>> +>>> I'm also considering adding warnings +>>> if people try to match a pagespec before scanning has finished, +>>> which can't possibly guarantee the right result, as discussed in +>>> [[conditional_preprocess_during_scan]]. My `wip-too-soon` branch +>>> is a start towards that; the docwiki builds successfully, but +>>> the tests that use IkiWiki internals also need updating to +>>> set `$phase = PHASE_RENDER` before they start preprocessing. --s + +>>>> reviewing those modifications, i think this is a good way to go. along +>>>> with warning about pagespecs evaluated in scan phase, i think it should be +>>>> an error to invoke scan in the render phase; that would mean that +>>>> `readtemplate` needs to check whether it's invoked as a scan or not to +>>>> decide whether to scan the template page, but would be generally more +>>>> robust for future plugin writing. +>>>> +>>>> **addendum**: if the new phase state is used to create warnings/errors +>>>> about improper ikiwiki api use of plugins (which is something i'd +>>>> advocate), that should likewise warn if `add_link` actually adds a link in +>>>> the render phase. such a warning would have helped spotting the +>>>> link-related [[template evaluation oddities]] earlier. --[[chrysn]] diff --git a/doc/bugs/template_evaluation_oddities.mdwn b/doc/bugs/template_evaluation_oddities.mdwn new file mode 100644 index 000000000..06ef57375 --- /dev/null +++ b/doc/bugs/template_evaluation_oddities.mdwn @@ -0,0 +1,67 @@ +[[ikiwiki/directive/template]]s expose odd behavior when it comes to composing +links and directives: + +* the parameters are passed through the preprocessor twice, once on + per-parameter basis and once for the final result (which usually contains the + preprocessed parameters). + + one of the results it that you have to write: + + \[[!template id="infobox" body=""" + Just use the \\\[[!template]] directive! + """]] + + (that'd be three backslashes in front of the opening [.) + + + + this also means that parts which are not used by the template at all still + have their side effects without showing. + + furthermore, the evaluation sequence is hard to predict. this might or might + not be a problem, depending on whether someone comes up with a less contrived + example (this one assumes a ``\[[!literal value]]`` directive that just + returns value but protects it from the preprocessor): + + we can use `\[[!literal """[[!invalid example]]"""]]`, but we can't use + `\[[!template id=literalator value="""[[!invalid example]]"""]]` with a + 'literalator' template `\[[!literal """"""]]` because then the `invalid` directive comes to action in + the first (per-argument) preprocessor run + +* links in templates are not stored at all; they appear, but the backlinks + don't work unless the link is explicit in one of the arguments. + + \[[!template id="linker" destination="foo"]] + + with a 'linker' template like + + Go to \[[]]! + + would result in a link to 'destination', but would not be registered in the + scan phase and thus not show a backlink from 'foo'. + + (a ``\[[!link to=...]]`` directive, as suggested in + [[todo/flexible relationships between pages]], does get evaluated properly + though.) + + this seems to be due to linkification being called before preprocess rather + than as a part of it, or (if that is on purpose) by the template plugin not + running linkification as an extra step (not even once). + +(nb: there is a way to include the ``raw_`` value of a directive, but that only +refers to htmlification, not directive evaluation.) + +both those behaviors are non-intuitive and afaict undocumented. personally, i'd +swap them out for passing the parameters as-is to the template, then running +the linkifier and preprocessor on the final result. that would be as if all +parameters were queried `raw_` -- then again, i don't see where `raw_` makes +anything not work that worked originally, so obviously i'm missing something. + + +i think it boils down to one question: are those behaviors necessary for +compatibility reasons, and if yes, why? + +--[[chrysn]] diff --git a/doc/bugs/toc_displays_headings_from_sidebar.mdwn b/doc/bugs/toc_displays_headings_from_sidebar.mdwn new file mode 100644 index 000000000..469ca8a33 --- /dev/null +++ b/doc/bugs/toc_displays_headings_from_sidebar.mdwn @@ -0,0 +1,3 @@ +The [[/ikiwiki/directive/toc]] directive scrapes all headings from the page, including those in the sidebar. So, if the sidebar includes navigational headers, every page with a table of contents will display those navigational headers before the headers in that page's content. + +I'd like some way to exclude the sidebar from the table of contents. As discussed via Jabber, perhaps toc could have a config option to ignore headers inside a nav tag or a tag with id="sidebar". diff --git a/doc/bugs/trail_excess_dependencies.mdwn b/doc/bugs/trail_excess_dependencies.mdwn new file mode 100644 index 000000000..f806a62eb --- /dev/null +++ b/doc/bugs/trail_excess_dependencies.mdwn @@ -0,0 +1,95 @@ +I've just modified the trail plugin to use only presence, and not +content dependencies. Using content dependencies, particularly to the page +that defines the trail, meant that every time that page changed, *every* +page in the trail gets rebuilt. This leads to users setting up sites that +have horrible performance, if the trail is defined in, for example, the top +page of a blog. + +Unfortunatly, this change to presence dependencies has +introduced a bug. Now when an existing trail is removed, the pages in the +trail don't get rebuilt to remove the trail (both html display and state). + +> Actually, this particular case is usually OK. Suppose a trail `untrail` +> contains `untrail/a` (as is the case in the regression +> test I'm writing), and you build the wiki, then edit `untrail` to no +> longer be a trail, and refresh. `untrail` has changed, so it is +> rendered. Assuming that the template of either `untrail` or another +> changed page happens to contain the `TRAILS` variable (which is not +> guaranteed, but is highly likely), `I::P::t::prerender` +> is invoked. It notices that `untrail/a` was previously a trail +> member and is no longer, and rebuilds it with the diagnostic +> "building untrail/a, its previous or next page has changed". +> +> Strictly speaking, I should change `I::P::t::build_affected` +> so it calls `prerender`, so we're guaranteed to have done the +> recalculation. Fixed in my branch. --[[smcv]] + +I think that to fix this bug, the plugin should use a hook to +force rebuilding of all the pages that were in the trail, when +the trail is removed (or changed). + +> The case of "the trail is changed" is still broken: +> if the order of items changes, or the trail is removed, +> then the logic above means it's OK, but if you +> change the `\[[!meta title]]` of the trail, or anything else +> used in the prev/up/next bar, the items won't show that +> change. Fixed in my branch. --[[smcv]] + +There's a difficulty in doing that: The needsbuild hook runs before the scan +hook, so before it has a chance to see if the trail directive is still there. +It'd need some changes to ikiwiki's hooks. + +> That's what `build_affected` is for, and trail already used it. --s + +(An improvement in this area would probably simplify other plugins, which +currently abuse the needsbuild hook to unset state, to handle the case +where the directive that resulted in that state is removed.) + +I apologise for introducing a known bug, but the dependency mess was too +bad to leave as-is. And I have very little time (and regrettably, even less +power) to deal with it right now. :( --[[Joey]] + +[[!template id=gitbranch branch=smcv/ready/trail author="[[Simon_McVittie|smcv]]"]] +[[!tag patch]] + +> I believe my `ready/trail` branch fixes this. There are regression tests. +> +> Here is an analysis of how the trail pages interdepend. +> +> * If *trail* contains a page *member* which does exist, *member* depends +> on *trail*. This is so that if the trail directive is deleted from +> *trail*, or if *trail*'s "friendly" title or trail settings are changed, +> the trail navigation bar in *member* will pick up that change. This is +> now only a presence dependency, which isn't enough to make those happen +> correctly. [Edited to add: actually, the title is the only thing that +> can affect *member* without affecting the order of members.] +> +> * If *trail* contains consecutive pages *m1* and *m2* in that order, +> *m1* and *m2* depend on each other. This is so that if one's +> "friendly" title changes, the other is rebuilt. This is now only +> a presence dependency, which isn't enough to make those happen +> correctly. In my branch, I explicitly track the "friendly" title +> for every page that's edited and is involved in a trail somehow. +> +> * If *trail* has *member* in its `pagenames` but there is no page called +> *member*, then *trail* must be rebuilt if *member* is created. This +> was always a presence dependency, and is fine. +> +> In addition, the `trail` plugin remembers the maps +> { trail => next item in that trail } and { trail => previous item in +> that trail } for each page. If either changes, the page gets rebuilt +> by `build_affected`, with almost the same logic as is used to update +> pages that link to a changed page. My branch extends this to track the +> "friendly title" of each page involved in a trail, either by being +> the trail itself or a member (or both). +> +> I think it's true to say that the trail always depends on every member, +> even if it doesn't display them. This might mean that we can use +> "render the trail page" as an opportunity to work out whether any of +> its members are also going to need re-rendering? +> [Edited to add: actually, I didn't need this to be true, but I made the +> regression test check it anyway.] +> +> --[[smcv]] + +>>> Thanks **very** much! [[done]] --[[Joey]] diff --git a/doc/bugs/trail_shows_on_cgi_pages.mdwn b/doc/bugs/trail_shows_on_cgi_pages.mdwn new file mode 100644 index 000000000..af1de3028 --- /dev/null +++ b/doc/bugs/trail_shows_on_cgi_pages.mdwn @@ -0,0 +1,12 @@ +When commenting on, or I think editing, a page that uses the trail +plugin, the trail is displayed across the top of the page. This should not +happen, probably. --[[Joey]] + +> [[!template id=gitbranch branch=smcv/ready/no-trails-if-dynamic author="[[smcv]]"]] +> [[!tag patch]] +> Fixed in my branch. --[[smcv]] + +>> [[merged|done]], although I am ambivilant about hiding the search box, +>> and unsure about hiding the sidebar. At least the latter fixes an +>> annoying layout problem with the comment page, where the textarea +>> appears below the sidebar due to its width. --[[Joey]] diff --git a/doc/bugs/trail_test_suite_failures.mdwn b/doc/bugs/trail_test_suite_failures.mdwn new file mode 100644 index 000000000..a3b7159ec --- /dev/null +++ b/doc/bugs/trail_test_suite_failures.mdwn @@ -0,0 +1,97 @@ +[[!template id=gitbranch branch=smcv/trail author=smcv]] [[!tag patch]] + +`t/trail.t` has some test suite failures. This is after applying +[[smcv]]'s patch that fixed some races that caused it to fail +sometimes. These remaining failures may also be intermittant, +although I can get them reliably on my laptop. I've added some debugging +output, which seems to point to an actual bug in the plugin AFAICS. --[[Joey]] + +> I can reproduce this reliably at 0a23666ddd but not 3.20120203. Bisecting +> indicates that it regressed in aaa72a3a80f, "inline: When the pagenames list +> includes pages that do not exist, skip them". +> +> I don't think this is the bug noted in the commit message - the inline +> containing `sorting/new` uses `pages`, not `pagenames`. --[[smcv]] + +>> It seems you removed `trail` support from `inline` in that commit. +>> Assuming that wasn't intentional, this is fixed in `smcv/trail`. +>> --[[smcv]] + +>>> Looks like a bad merge of some kind. pulled, [[done]] --[[Joey]] + +
      +ok 71 - expected n=sorting/end p=sorting/beginning in sorting/middle.html
      +not ok 72 - expected n=sorting/new p=sorting/middle in sorting/end.html
      +#   Failed test 'expected n=sorting/new p=sorting/middle in sorting/end.html'
      +#   at t/trail.t line 13.
      +#          got: 'n=sorting/linked2 p=sorting/middle'
      +#     expected: 'n=sorting/new p=sorting/middle'
      +not ok 73 - expected n=sorting/old p=sorting/end in sorting/new.html
      +#   Failed test 'expected n=sorting/old p=sorting/end in sorting/new.html'
      +#   at t/trail.t line 13.
      +#          got: undef
      +#     expected: 'n=sorting/old p=sorting/end'
      +not ok 74 - expected n=sorting/ancient p=sorting/new in sorting/old.html
      +#   Failed test 'expected n=sorting/ancient p=sorting/new in sorting/old.html'
      +#   at t/trail.t line 13.
      +#          got: undef
      +#     expected: 'n=sorting/ancient p=sorting/new'
      +not ok 75 - expected n=sorting/linked2 p=sorting/old in sorting/ancient.html
      +#   Failed test 'expected n=sorting/linked2 p=sorting/old in sorting/ancient.html'
      +#   at t/trail.t line 13.
      +#          got: undef
      +#     expected: 'n=sorting/linked2 p=sorting/old'
      +not ok 76 - expected n= p=sorting/ancient in sorting/linked2.html
      +#   Failed test 'expected n= p=sorting/ancient in sorting/linked2.html'
      +#   at t/trail.t line 13.
      +#          got: 'n= p=sorting/end'
      +#     expected: 'n= p=sorting/ancient'
      +ok 77
      +
      + +Here, the "new" page does not seem to be included into the trail as expected. +Looking at the rendered page, there is no trail directive output on it either. +--[[Joey]] + +
      +ok 90
      +not ok 91 - expected n=sorting/new p= in sorting/old.html
      +#   Failed test 'expected n=sorting/new p= in sorting/old.html'
      +#   at t/trail.t line 13.
      +#          got: undef
      +#     expected: 'n=sorting/new p='
      +not ok 92 - expected n=sorting/middle p=sorting/old in sorting/new.html
      +#   Failed test 'expected n=sorting/middle p=sorting/old in sorting/new.html'
      +#   at t/trail.t line 13.
      +#          got: undef
      +#     expected: 'n=sorting/middle p=sorting/old'
      +not ok 93 - expected n=sorting/linked2 p=sorting/new in sorting/middle.html
      +#   Failed test 'expected n=sorting/linked2 p=sorting/new in sorting/middle.html'
      +#   at t/trail.t line 13.
      +#          got: 'n=sorting/linked2 p='
      +#     expected: 'n=sorting/linked2 p=sorting/new'
      +ok 94 - expected n=sorting/linked p=sorting/middle in sorting/linked2.html
      +ok 95 - expected n=sorting/end p=sorting/linked2 in sorting/linked.html
      +ok 96 - expected n=sorting/a/c p=sorting/linked in sorting/end.html
      +ok 97 - expected n=sorting/beginning p=sorting/end in sorting/a/c.html
      +ok 98 - expected n=sorting/a/b p=sorting/a/c in sorting/beginning.html
      +not ok 99 - expected n=sorting/ancient p=sorting/beginning in sorting/a/b.html
      +#   Failed test 'expected n=sorting/ancient p=sorting/beginning in sorting/a/b.html'
      +#   at t/trail.t line 13.
      +#          got: 'n=sorting/z/a p=sorting/beginning'
      +#     expected: 'n=sorting/ancient p=sorting/beginning'
      +not ok 100 - expected n=sorting/z/a p=sorting/a/b in sorting/ancient.html
      +#   Failed test 'expected n=sorting/z/a p=sorting/a/b in sorting/ancient.html'
      +#   at t/trail.t line 13.
      +#          got: undef
      +#     expected: 'n=sorting/z/a p=sorting/a/b'
      +not ok 101 - expected n= p=sorting/ancient in sorting/z/a.html
      +#   Failed test 'expected n= p=sorting/ancient in sorting/z/a.html'
      +#   at t/trail.t line 13.
      +#          got: 'n= p=sorting/a/b'
      +#     expected: 'n= p=sorting/ancient'
      +ok 102
      +
      + +Haven't investigated, but looks like the same sort of problem, a +page expected to be in the trail isn't. --[[Joey]] diff --git a/doc/bugs/trails_depend_on_everything.mdwn b/doc/bugs/trails_depend_on_everything.mdwn new file mode 100644 index 000000000..8e9edcf43 --- /dev/null +++ b/doc/bugs/trails_depend_on_everything.mdwn @@ -0,0 +1,16 @@ +[[!template id=gitbranch branch=smcv/ready/trail-sort +author="[[Simon McVittie|smcv]]" +browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/trail-sort]] +[[!tag patch users/smcv/ready]] + +On [[trail's discussion page|plugins/trail/discussion]], [[kjs]] pointed out +that [[plugins/trail]] and [[plugins/contrib/album]] get excessive +dependencies on `internal(*)`. I tracked this down to their (ab)use of +`pagespec_match_list` with the pagespec `internal(*)` to sort a pre-existing +list of pages. + +They should just sort the pages instead; they'll already have all the +dependencies they need. My branch adds `IkiWiki::sort_pages` but does not +make it plugin API just yet. --[[smcv]] + +> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/transient_autocreated_tagbase_is_not_transient_autoindexed.mdwn b/doc/bugs/transient_autocreated_tagbase_is_not_transient_autoindexed.mdwn index 3eb1542d3..0673aa674 100644 --- a/doc/bugs/transient_autocreated_tagbase_is_not_transient_autoindexed.mdwn +++ b/doc/bugs/transient_autocreated_tagbase_is_not_transient_autoindexed.mdwn @@ -1,7 +1,74 @@ mkdir -p ikiwiki-tag-test/raw/a_dir/ ikiwiki-tag-test/rendered/ - echo '[[!taglink a_tag]]' > ikiwiki-tag-test/raw/a_dir/a_page.mdwn + echo '\[[!taglink a_tag]]' > ikiwiki-tag-test/raw/a_dir/a_page.mdwn ikiwiki --verbose --plugin tag --plugin autoindex --plugin mdwn --set autoindex_commit=0 --set tagbase=tag --set tag_autocreate=1 --set tag_autocreate_commit=0 ikiwiki-tag-test/raw/ ikiwiki-tag-test/rendered/ ls -al ikiwiki-tag-test/raw/.ikiwiki/transient/ ls -al ikiwiki-tag-test/rendered/tag/ Shouldn't `ikiwiki-tag-test/raw/.ikiwiki/transient/tag.mdwn` and `ikiwiki-tag-test/rendered/tag/index.html` exist? + +[[!tag patch users/smcv/ready]] +[[!template id=gitbranch branch=smcv/ready/autoindex author=smcv + browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/autoindex]] +[[!template id=gitbranch branch=smcv/ready/autoindex-more-often author=smcv + browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/autoindex-more-often]] + +> To have a starting point to (maybe) change this, my `ready/autoindex` +> branch adds a regression test for the current behaviour, both with +> and without `autoindex_commit` enabled. It also fixes an unnecessary +> and potentially harmful special case for the transient directory. +> +> The fact that files in underlays (including transient files) don't +> trigger autoindexing is deliberate. However, this is the second +> request to change this behaviour: the first was +> [[!debbug 611068]], which has a patch from Tuomas Jormola. +> On that bug report, Joey explains why it's undesirable +> for the original behaviour of autoindex (when the +> index isn't transient). +> +> I'm not sure whether the same reasoning still applies when the +> index is transient, though (`autoindex_commit => 0`), +> because the index pages won't be cluttering up people's +> git repositories any more? My `autoindex-more` branch changes +> the logic so it will do what you want in the `autoindex_commit => 0` +> case, and amends the appropriate regression test. --[[smcv]] + +>> the autoindex-more-often branch looks good to me in general. +>> +>> i do have doubts about the 3ba2ef1a patch ("remove unnecessary special case +>> for transient underlay"): now that we consider the complete transient +>> directory as well, the sequence in which the refresh hooks are called starts +>> to matter, and pages created by other plugins in a similar fashion as by +>> autoindex will only be included the next time refresh gets called. +>> +>> *addendum:* i just found where i discussed the issue of fighting transient +>> pages last, it was on [[todo/alias directive]]. the example cited there +>> (conflicts with autotag) would probably work here as well. (imagine a +>> `tags/project/completed` and a `tags/project/inprogress` exist, and a page +>> is tagge `tags/project`. will that be an autoindex or an autotag?) +>> +>> --[[chrysn]] + +>>> That's a fair point. I think what happens is down to commit vs. refresh +>>> timing. +>>> +>>> If pages tagged t/p/c, t/p/i and t/p are all created between one +>>> refresh and the next, with none of those tag pages existing, I think the +>>> answer is that they would all be autotags, because until t/p/c and +>>> t/p/i are created, there's no reason to need t/p as an autoindex. +>>> +>>> If there were already pages tagged t/p/c and t/p/i at the previous +>>> refresh, then t/p would already be an autoindex, and that's a +>>> valid page, so autotagging wouldn't touch it. +>>> +>>> I can't see much reason to prefer one over the other; the ideal answer +>>> is probably to have a tag-cloud *and* a list of child pages, but this +>>> seems a weird enough thing to do that I'd be OK with a wiki user +>>> having to disambiguate it themselves. "Whichever automatic process +>>> happens first, happens" is at least easy to explain, and I consider +>>> both autoindices and autotags to be time-saving conveniences rather +>>> than something fundamental. --s + +>>>> i think a behavior that does the right thing when there is a right thing +>>>> and *something* when there is ambiguity is ok for now; especially, it's +>>>> not up to the autoindex branch to come up with a solution to the general +>>>> problem. --[[chrysn]] diff --git a/doc/bugs/undefined_value_as_a_HASH_reference.mdwn b/doc/bugs/undefined_value_as_a_HASH_reference.mdwn new file mode 100644 index 000000000..228c3baac --- /dev/null +++ b/doc/bugs/undefined_value_as_a_HASH_reference.mdwn @@ -0,0 +1,68 @@ +Hello, + +does anyone have an idea why I see the following error when I run websetup (Setup button in Preferences)? + + Error: Can't use an undefined value as a HASH reference at /usr/share/perl5/IkiWiki/Plugin/websetup.pm line 82, line 97. + +Maybe, related to this is also + + $ ikiwiki --setup /etc/ikiwiki/auto-blog.setup + What will the blog be named? tmpblog + What revision control system to use? git + What wiki user (or openid) will be admin? wsh + + + Setting up tmpblog ... + Importing /home/wsh/tmpblog into git + Initialized empty shared Git repository in /home/wsh/tmpblog.git/ + Initialized empty Git repository in /home/wsh/tmpblog/.git/ + [master (root-commit) d6847e1] initial commit + 8 files changed, 48 insertions(+) + create mode 100644 .gitignore + create mode 100644 archives.mdwn + create mode 100644 comments.mdwn + create mode 100644 index.mdwn + create mode 100644 posts.mdwn + create mode 100644 posts/first_post.mdwn + create mode 100644 sidebar.mdwn + create mode 100644 tags.mdwn + Counting objects: 11, done. + Delta compression using up to 4 threads. + Compressing objects: 100% (9/9), done. + Writing objects: 100% (11/11), 1.53 KiB, done. + Total 11 (delta 0), reused 0 (delta 0) + Unpacking objects: 100% (11/11), done. + To /home/wsh/tmpblog.git + * [new branch] master -> master + Directory /home/wsh/tmpblog is now a clone of git repository /home/wsh/tmpblog.git + Reference found where even-sized list expected at /usr/share/perl5/IkiWiki/Setup.pm line 177, line 97. + Reference found where even-sized list expected at /usr/share/perl5/IkiWiki/Setup.pm line 224, line 97. + Use of uninitialized value $section in hash element at /usr/share/perl5/IkiWiki/Setup.pm line 226, line 97. + Use of uninitialized value $section in hash element at /usr/share/perl5/IkiWiki/Setup.pm line 227, line 97. + Use of uninitialized value $section in concatenation (.) or string at /usr/share/perl5/IkiWiki/Setup.pm line 233, line 97. + /etc/ikiwiki/auto-blog.setup: Can't use an undefined value as a HASH reference at /usr/share/perl5/IkiWiki/Setup.pm line 252, line 97. + + usage: ikiwiki [options] source dest + ikiwiki --setup configfile + +I'm on Debian unstable. + +Thanks, +-Michal + +> Some plugin has a broken getsetup hook, and is feeding a corrupted setup list in. Both the websetup and the auto.setup files cause all plugins to be loaded and all their setup to be available. +> +> This command will help you find the plugin. Here it prints some noise around the rst plugin, for unrelated reasons, +> but what you're looking for is the plugin printed before the "even sized list" message. + +
      +perl -le 'use warnings; use strict; use Data::Dumper; use IkiWiki; %config=IkiWiki::defaultconfig(); use IkiWiki::Setup; my @s=IkiWiki::Setup::getsetup(); foreach my $pair (@s) { print "plugin ".$pair->[0]; my $setup=$pair->[1]; if ($pair->[0] eq "rst") { print Dumper($setup)} my %s=@{$setup} }'
      +
      + +> I was able to replicate this by making a plugin's getsetup hook return a list reference, rather than a list, +> and have put in a guard against that sort of thing. +> --[[Joey]] + +>> Thanks. Your command didn't helped me, but with trial and error approach I found that the victim an old version asciidoc plugin. For some reason, asciidoc was never listed in the output of the command. --[[wentasah]] + +>>> Ok. My fix should prevent the problem, so [[done]] --[[Joey]] diff --git a/doc/bugs/utf8_warnings_are_meaningless.mdwn b/doc/bugs/utf8_warnings_are_meaningless.mdwn new file mode 100644 index 000000000..7c1efa0a0 --- /dev/null +++ b/doc/bugs/utf8_warnings_are_meaningless.mdwn @@ -0,0 +1,9 @@ +Hunting down what was generating + + utf8 "\xEB" does not map to Unicode at /usr/share/perl5/IkiWiki.pm line 873, <$in> chunk 1. + +lead me to a call to `utf8::valid`, which lead to http://perldoc.perl.org/utf8.html which says this is an "INTERNAL" function: + +> Main reason for this routine is to allow Perl's testsuite to check that operations have left strings in a consistent state. You most probably want to use `utf8::is_utf8()` instead. + +Apparently the main point of the function is to emit the warning in unit tests - problem is, in the ikiwiki context, the only useful thing to warn about would be the name of the file you're trying to parse, not the name of the source code. Alternatively, since the code does continue on with the data, *not* whining about it might be an option :-) but an actionable message would be better. diff --git a/doc/bugs/wiki_links_still_processed_inside_code_blocks.mdwn b/doc/bugs/wiki_links_still_processed_inside_code_blocks.mdwn index b2a8b0632..9f0a1d102 100644 --- a/doc/bugs/wiki_links_still_processed_inside_code_blocks.mdwn +++ b/doc/bugs/wiki_links_still_processed_inside_code_blocks.mdwn @@ -46,4 +46,22 @@ and have it render like: > there should give some strong hints how to fix this bug, though I haven't > tried to apply the method yet. --[[Joey]] +>> As far, as I can see, smileys bug is solved by checking for code/pre. In +>> this case, however, this is not applicable. WikiLinks/directives *should* be +>> expanded before passing text to formatter, as their expansion may contain +>> markup. Directives should be processed before, as they may provide *partial* +>> markup (eg `template` ones), that have no sense except when in the page +>> cotext. Links should be processed before, because, at least multimarkdown may +>> try to expand them as anchor-links. +>> +>> For now, my partial solution is to restrict links to not have space at the +>> start, this way in many cases escaping in code may be done in natural way +>> and not break copypastability. For example, shell 'if \[[ condition ]];' +>> will work fine with this. +>> +>> Maybe directives can also be restricted to only be allowed on the line by +>> themselves (not separated by blank lines, however) or something similar. +>> +>> --[[isbear]] + [[!debbug 487397]] diff --git a/doc/bugs/wrong_link_in_recentchanges_when_reverting_an_ikiwiki_outside_git_root.mdwn b/doc/bugs/wrong_link_in_recentchanges_when_reverting_an_ikiwiki_outside_git_root.mdwn index bf311c198..5f7450b79 100644 --- a/doc/bugs/wrong_link_in_recentchanges_when_reverting_an_ikiwiki_outside_git_root.mdwn +++ b/doc/bugs/wrong_link_in_recentchanges_when_reverting_an_ikiwiki_outside_git_root.mdwn @@ -1,3 +1,8 @@ in ikiwiki instances that don't reside in the git root directory (the only ones i know of are ikiwiki itself), reverts show the wrong link in the recentchanges (for example, in the ikiwiki main repository's 4530430 and its revert, the main index page was edited, but the revert shows doc/index as a link). the expected behavior is to compensate for the modified root directory (i.e., show index instead of doc/index). + +> This seems to work OK now - commit 84c4ca33 and its reversion both +> appear correctly in [[recentchanges]]. Looking at git history, +> Joey [[fixed this|done]] in commit 1b6c1895 before 3.20120203. +> --[[smcv]] diff --git a/doc/bugs/yaml:xs_codependency_not_listed.mdwn b/doc/bugs/yaml:xs_codependency_not_listed.mdwn new file mode 100644 index 000000000..3ae156db6 --- /dev/null +++ b/doc/bugs/yaml:xs_codependency_not_listed.mdwn @@ -0,0 +1,16 @@ +YAML:XS is not listed as a dep in the spec file which results in + +``` +HOME=/home/me /usr/bin/perl -Iblib/lib ikiwiki.in -dumpsetup ikiwiki.setup +Can't locate YAML/XS.pm in @INC (@INC contains: . blib/lib /usr/local/lib64/perl5 /usr/local/share/perl5 /usr/lib64/perl5/vendor_perl /usr/share/perl5/vendor_perl /usr/lib64/perl5 /usr/share/perl5) at (eval 39) line 2. +BEGIN failed--compilation aborted at (eval 39) line 2. +make: *** [ikiwiki.setup] Error 2 +error: Bad exit status from /var/tmp/rpm-tmp.Sgq2QK (%build) +``` + +when trying to build + +> Ok, added. [[done]] --[[Joey]] +>> Appears to be missing in 'Makefile.PL' also. -- [[ttw]] + +>>> Added --[[Joey]] diff --git a/doc/contact.mdwn b/doc/contact.mdwn index 486a4d186..dab092549 100644 --- a/doc/contact.mdwn +++ b/doc/contact.mdwn @@ -4,8 +4,7 @@ ikiwiki's own wiki. ikiwiki provides a [[bug_tracker|bugs]], a [[TODO_list|TODO]], and "discussion" sub-pages for every page, as well as a [[forum]] for general questions and discussion. ikiwiki developers monitor [[RecentChanges]] closely, via the webpage, email, -[CIA](http://cia.navi.cx), and IRC, and respond in a timely fashion. +and IRC, and respond in a timely fashion. You could also drop by the IRC channel `#ikiwiki` on -[OFTC](http://www.oftc.net/) (`irc.oftc.net`), or use the -[identi.ca ikiwiki group](http://identi.ca/group/ikiwiki). +[OFTC](http://www.oftc.net/) (`irc.oftc.net`). diff --git a/doc/convert.mdwn b/doc/convert.mdwn index 871cd31fe..a6f19a802 100644 --- a/doc/convert.mdwn +++ b/doc/convert.mdwn @@ -3,5 +3,8 @@ to convert it to ikiwiki? Various tools and techniques have been developed to handle such conversions. * [[tips/convert_mediawiki_to_ikiwiki]] -* [[tips/convert_MoinMoin_and_TWiki_to_ikiwiki]] +* [[tips/convert_moinmoin_to_ikiwiki]] * [[tips/convert_blogger_blogs_to_ikiwiki]] +* [[tips/Movable_Type_to_ikiwiki]] + +In addition, [[JoshTriplett]] has written scripts to convert Twiki sites, see [his page](/users/JoshTriplett) for more information. diff --git a/doc/css_market.mdwn b/doc/css_market.mdwn index 3f5627028..376f81b8b 100644 --- a/doc/css_market.mdwn +++ b/doc/css_market.mdwn @@ -10,6 +10,10 @@ included in ikiwiki for easy use. Feel free to add your own stylesheets here. (Upload as wiki pages; wiki gnomes will convert them to css files..) +* **[lessish.css](https://raw.github.com/spiffin/ikiwiki_lessish/master/lessish.css)**, contributed by [[Spiffin]], + A responsive stylesheet based on the [Less CSS Framework](http://lessframework.com). + Links: [PNG preview](https://github.com/spiffin/ikiwiki_lessish/blob/master/lessish_preview.png) and [GitHub repo](https://github.com/spiffin/ikiwiki_lessish). + * **[[css_market/zack.css]]**, contributed by [[StefanoZacchiroli]], customized mostly for *blogging purposes*, can be seen in action on [zack's blog](http://upsilon.cc/~zack/blog/) @@ -44,6 +48,8 @@ gnomes will convert them to css files..) templates. [[!meta stylesheet="bma"]] +* ** http://blog.lastlog.de/, contributed by joachim schiele; please feel free to copy. + * **[blankoblues.css][1]**, contributed by [[Blanko]]. Can be seen on [Blankoblues Demo][2]. Local.css and templates available [here][3]. * **[contraste.css][4]**, contributed by [[Blanko]]. Can be seen on [Contraste Demo][5]. Local.css and templates available [here][6]. @@ -56,9 +62,9 @@ gnomes will convert them to css files..) * **[ikiwiked gray-orange](https://github.com/AntPortal/ikiwiked/raw/master/theme/gray-orange/local.css)**, contributed by [Danny Castonguay](https://antportal.com/). Can be seen in action at [antportal.com/wiki](https://antportal.com/wiki/). Feel free to modify and contribute on [Github](https://github.com/AntPortal/ikiwiked) - [1]: http://blankoworld.homelinux.com/demo/ikiwiki/blankoblues/src/local.css (Download Blankoblues CSS) - [2]: http://blankoworld.homelinux.com/demo/ikiwiki/blankoblues/htdocs/ (Take a tour on Blankoblues Demo) - [3]: http://blankoworld.homelinux.com/demo/ikiwiki/blankoblues/blankoblues.tar.gz (Download local.css and templates for Blankoblues theme) - [4]: http://blankoworld.homelinux.com/demo/ikiwiki/contraste/src/local.css (Download Contraste CSS) - [5]: http://blankoworld.homelinux.com/demo/ikiwiki/contraste/htdocs/ (Take a tour on Contraste Demo) - [6]: http://blankoworld.homelinux.com/demo/ikiwiki/contraste/contraste.tar.gz (Download local.css and templates for Contraste theme) + [1]: http://olivier.dossmann.net/demo/ikiwiki/blankoblues/src/local.css (Download Blankoblues CSS) + [2]: http://olivier.dossmann.net/demo/ikiwiki/blankoblues/htdocs/ (Take a tour on Blankoblues Demo) + [3]: http://olivier.dossmann.net/demo/ikiwiki/blankoblues/blankoblues.tar.gz (Download local.css and templates for Blankoblues theme) + [4]: http://olivier.dossmann.net/demo/ikiwiki/contraste/src/local.css (Download Contraste CSS) + [5]: http://olivier.dossmann.net/demo/ikiwiki/contraste/htdocs/ (Take a tour on Contraste Demo) + [6]: http://olivier.dossmann.net/demo/ikiwiki/contraste/contraste.tar.gz (Download local.css and templates for Contraste theme) diff --git a/doc/download.mdwn b/doc/download.mdwn index f1ae5ad31..100f72843 100644 --- a/doc/download.mdwn +++ b/doc/download.mdwn @@ -35,10 +35,7 @@ own RPM. ## BSD ports -Ikiwiki can be installed [from macports](http://www.macports.org/ports.php?by=name&substr=ikiwiki) -by running `sudo port install ikiwiki`. - -NetBSD and many other platforms: pkgsrc has an [ikiwiki package](ftp://ftp.netbsd.org/pub/pkgsrc/current/pkgsrc/www/ikiwiki/README.html). +NetBSD, Mac OS X, Solaris, and many other platforms: [pkgsrc](http://www.pkgsrc.org/) has an [ikiwiki package](http://pkgsrc.se/www/ikiwiki). FreeBSD has ikiwiki in its [ports collection](http://www.freshports.org/www/ikiwiki/). diff --git a/doc/examples/blog/posts.mdwn b/doc/examples/blog/posts.mdwn index 08e014838..2bd0f1d6f 100644 --- a/doc/examples/blog/posts.mdwn +++ b/doc/examples/blog/posts.mdwn @@ -1,3 +1,3 @@ Here is a full list of posts to the [[blog|index]]. -[[!inline pages="page(./posts/*) and !*/Discussion" archive=yes feedshow=10 quick=yes]] +[[!inline pages="page(./posts/*) and !*/Discussion" archive=yes feedshow=10 quick=yes trail=yes]] diff --git a/doc/examples/softwaresite/bugs/hghg.mdwn b/doc/examples/softwaresite/bugs/hghg.mdwn new file mode 100644 index 000000000..cece64126 --- /dev/null +++ b/doc/examples/softwaresite/bugs/hghg.mdwn @@ -0,0 +1 @@ +hghg diff --git a/doc/features.mdwn b/doc/features.mdwn index 66f7ecb73..61b23bb59 100644 --- a/doc/features.mdwn +++ b/doc/features.mdwn @@ -64,9 +64,11 @@ Ikiwiki can also [[plugins/aggregate]] external blogs, feeding them into the wiki. This can be used to create a Planet type site that aggregates interesting feeds. -You can also mix blogging with podcasting by dropping audio files where -they will be picked up like blog posts. This will work for any files that -you would care to syndicate. +You can also mix blogging with [[podcasting|podcast]]. Simply drop +media files where they will be picked up like blog posts. For +fuller-featured podcast feeds, enclose media files in blog posts +using [[plugins/meta]]. Either way, this will work for any files +that you would care to syndicate. ## Valid html and [[css]] diff --git a/doc/forum/Adding_a_custom_header_and_footer.mdwn b/doc/forum/Adding_a_custom_header_and_footer.mdwn new file mode 100644 index 000000000..d9bdedc6a --- /dev/null +++ b/doc/forum/Adding_a_custom_header_and_footer.mdwn @@ -0,0 +1,13 @@ +I want to do some things that I think are easiest accomplished +by allowing me to add arbitrary HTML to be embedded on all pages +in the site. Specifically, I want to add meta tags to the top of +the page so that it renders pretty-like in things like Twitter, +and I want to add Piwik tracking to the bottom of the page. + +So how do I do that? + +I could write a whole new template for the site, but I suspect +that there's a more modular approach that is advised. And if you +have ideas of totally different ways do do this, do tell. + +Thanks diff --git a/doc/forum/Adding_a_custom_header_and_footer/comment_1_e82dbfef77ff222a7fa07aab0a19fb18._comment b/doc/forum/Adding_a_custom_header_and_footer/comment_1_e82dbfef77ff222a7fa07aab0a19fb18._comment new file mode 100644 index 000000000..d10961c19 --- /dev/null +++ b/doc/forum/Adding_a_custom_header_and_footer/comment_1_e82dbfef77ff222a7fa07aab0a19fb18._comment @@ -0,0 +1,10 @@ +[[!comment format=mdwn + username="spalax" + ip="82.216.247.172" + subject="Use page.tmpl" + date="2014-05-16T17:11:01Z" + content=""" +I think the right thing to do is to copy the default `page.tmpl` to your wiki (in your template directory), and add the code you wish. + +-- [[Louis|spalax]] +"""]] diff --git a/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__.mdwn b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__.mdwn new file mode 100644 index 000000000..c0b896515 --- /dev/null +++ b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__.mdwn @@ -0,0 +1,3 @@ +Is anyone successfull mirroring feeds from ikiwiki to identi.ca (or another status.net instance)? How did you set up your feed? + +When I try to, identi.ca presents me with an error about no "author ID URI" being found in the feed. Indeed the ikiwiki-generated atom feed only has got a global "author" - I presume identi.ca requires author information in each entry. Is it possible to set up ikiwiki's feed that way? diff --git a/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_1_8a5acbb6234104b607c8c4cf16124ae4._comment b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_1_8a5acbb6234104b607c8c4cf16124ae4._comment new file mode 100644 index 000000000..1d710d153 --- /dev/null +++ b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_1_8a5acbb6234104b607c8c4cf16124ae4._comment @@ -0,0 +1,8 @@ +[[!comment format=mdwn + username="Franek" + ip="188.99.178.40" + subject="[[!meta author="..." + date="2012-05-19T14:51:42Z" + content=""" +Adding [[!meta author=\"me\"]] to the entries and/or the feedpage does not help. +"""]] diff --git a/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_2_155e5823860a91989647ede8b5c9224a._comment b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_2_155e5823860a91989647ede8b5c9224a._comment new file mode 100644 index 000000000..6c709b3f0 --- /dev/null +++ b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_2_155e5823860a91989647ede8b5c9224a._comment @@ -0,0 +1,16 @@ +[[!comment format=mdwn + username="Franek" + ip="188.99.178.40" + subject="Further enquiries" + date="2012-05-20T10:46:07Z" + content=""" +I did some more experiments setting not only \"[[!meta author=...\", but also \"authorurl\" globally and per-entry in various combinations, with no success. As far as I could see, \"authorurl\" had no effect on the atom feed whatsoever. + +It seems that identi.ca wants a feed to have an field with a subfield, as described here: [[http://www.atomenabled.org/developers/syndication/#person]] . Is there a way to achieve this with ikiwiki inline-feeds? + +I also found two old and unresolved status.net bugreports on the matter: + +[[http://status.net/open-source/issues/2840]] + +[[http://status.net/open-source/issues/2839]] +"""]] diff --git a/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_3_317f1202a3da1bfc845d4becbac4bba8._comment b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_3_317f1202a3da1bfc845d4becbac4bba8._comment new file mode 100644 index 000000000..6bda93433 --- /dev/null +++ b/doc/forum/Anyone_mirroring_ikiwiki_inline_feed_to_identi.ca__63__/comment_3_317f1202a3da1bfc845d4becbac4bba8._comment @@ -0,0 +1,10 @@ +[[!comment format=mdwn + username="Franek" + ip="92.74.26.119" + subject="kind of solved, but another problem comes up" + date="2012-05-26T19:31:19Z" + content=""" +The templates atompage.tmpl and/or atomitem.tmpl appear to be what would have to be altered to satisfy identi.ca. I did that on my system, just hard-coding a element into for testing. In one respect, it worked: identi.ca does not complain about the missing author uri any more. In another, it did not, another error comes up now: \"Internal server error\" and something like \"could not add feed\". + +I do not know where to go from this very unspecific error message. I guess I am going to try something like twitterfeed.com, for now. +"""]] diff --git a/doc/forum/Attachment_and_sub-directory.mdwn b/doc/forum/Attachment_and_sub-directory.mdwn new file mode 100644 index 000000000..91d7aee27 --- /dev/null +++ b/doc/forum/Attachment_and_sub-directory.mdwn @@ -0,0 +1,5 @@ +Hi. + +If I create a page and attach a file to the page, ikiwiki creates a sub-directory with the page name and places the attachment in the sub-directory regardless of usedirs setup. Is there any setup not to create the sub-directory and to place the attachment in the same directory where the page is, so that I can edit and properly *preview* at a local machine using third-party markdown editors? + +Thanks in advance. diff --git a/doc/forum/Background_picture_and_css.mdwn b/doc/forum/Background_picture_and_css.mdwn new file mode 100644 index 000000000..827100984 --- /dev/null +++ b/doc/forum/Background_picture_and_css.mdwn @@ -0,0 +1,8 @@ +Is it possible to put two different background pictures into the right and left sides of the following ikiwiki css? + +[lessish css theme](https://raw.github.com/spiffin/ikiwiki_lessish/master/lessish.css) + +Is it also possible to have a background like this: [http://ysharifi.wordpress.com/](http://ysharifi.wordpress.com/) +or this [tex.stackexchange.com](tex.stackexchange.com) + +I am not a css expert so, it would be nice if you could provide some details. diff --git a/doc/forum/CGI_script_and_HTTPS.mdwn b/doc/forum/CGI_script_and_HTTPS.mdwn new file mode 100644 index 000000000..2f255002d --- /dev/null +++ b/doc/forum/CGI_script_and_HTTPS.mdwn @@ -0,0 +1,29 @@ +Dear ikiwiki folks, + +using Debian Wheezy and ikiwiki 3.20120629 for some reason when accessing the site using HTTP (and not HTTPS), going to Edit, so executing the CGI script, all URLs are prepended with HTTPS, which I do not want. + + + +Trying to look at the source, I guess it is originating from `IkiWiki/CGI.pm`. + + sub printheader ($) { + my $session=shift; + + if (($ENV{HTTPS} && lc $ENV{HTTPS} ne "off") || $config{sslcookie}) { + print $session->header(-charset => 'utf-8', + -cookie => $session->cookie(-httponly => 1, -secure => 1)); + } + else { + print $session->header(-charset => 'utf-8', + -cookie => $session->cookie(-httponly => 1)); + } + } + +Does it check if HTTPS is enabled in the environment? During `ikiwiki --setup example.setup` or when the CGI script is run when the site is accessed (for example in an Apache environment)? + +Can this somehow be disabled in ikiwiki. Reading the code I guess I could somehow set `HTTPS = off` somewhere in the `VirtualHost` section of the Apache configuration. + + +Thanks, + +--[[PaulePanter]] diff --git a/doc/forum/CGI_script_and_HTTPS/comment_1_3f8ef438ca7de11635d4e40080e7baa9._comment b/doc/forum/CGI_script_and_HTTPS/comment_1_3f8ef438ca7de11635d4e40080e7baa9._comment new file mode 100644 index 000000000..03f1032e9 --- /dev/null +++ b/doc/forum/CGI_script_and_HTTPS/comment_1_3f8ef438ca7de11635d4e40080e7baa9._comment @@ -0,0 +1,43 @@ +[[!comment format=mdwn + username="http://smcv.pseudorandom.co.uk/" + nickname="smcv" + subject="comment 1" + date="2012-11-05T11:27:02Z" + content=""" +IkiWiki generates self-referential URLs using the `url` and `cgiurl` +configuration parameters, and the `urlto()` and `cgiurl()` functions; +the code you quoted isn't involved (it's choosing whether to set +HTTPS-only cookies or not, rather than choosing how to generate +self-referential URLs). + +If you want your wiki to be accessible via both HTTP and HTTPS, and use +whichever the user first requested, you should set both `url` and +`cgiurl` to the same URI scheme and hostname with no port specified, +either both `http` or both `https`, for instance: + + url: http://www.example.com/ + cgiurl: http://www.example.com/ikiwiki.cgi + +or + + url: https://example.org/wiki/ + cgiurl: https://example.org/cgi-bin/ikiwiki + +(or the Perl-syntax equivalents if you're not using a YAML +setup file). + +If you use one of those, IkiWiki will attempt to generate +path-only links, like \"/wiki/\" and \"/cgi-bin/ikiwiki?...\", +whenever it's valid to do so. A visitor using HTTP will stay +on HTTP and a visitor using HTTPS will stay on HTTPS. + +The choice of `http` or `https` for the `url` and `cgiurl` +still matters when a URL *must* be absolute, such as in an +RSS feed. + +I improved this code in late 2010 for this todo item: +[[todo/want_to_avoid_ikiwiki_using_http_or_https_in_urls_to_allow_serving_both]]. +It's possible that it has regressed (that's happened +a couple of times). If it has, please quote your exact +`url` and `cgiurl` configuration. +"""]] diff --git a/doc/forum/Calendar:_listing_multiple_entries_per_day.mdwn b/doc/forum/Calendar:_listing_multiple_entries_per_day.mdwn index ebea43697..c3ecf36be 100644 --- a/doc/forum/Calendar:_listing_multiple_entries_per_day.mdwn +++ b/doc/forum/Calendar:_listing_multiple_entries_per_day.mdwn @@ -15,3 +15,7 @@ I'm not sure how I go about swapping the link on the day number to a link to, I and a suitable whilst loop looks to be all that's needed... Any pointers appreciated. + +A [[!taglink patch]] has been proposed in [comment](#comment-d6f94e2b779d1c038b6359aad79ed14b) + +> This has been applied. --[[Joey]] diff --git a/doc/forum/Calendar:_listing_multiple_entries_per_day/comment_5_de545ebb6376066674ef2aaae4757b9c._comment b/doc/forum/Calendar:_listing_multiple_entries_per_day/comment_5_de545ebb6376066674ef2aaae4757b9c._comment new file mode 100644 index 000000000..fef852066 --- /dev/null +++ b/doc/forum/Calendar:_listing_multiple_entries_per_day/comment_5_de545ebb6376066674ef2aaae4757b9c._comment @@ -0,0 +1,97 @@ +[[!comment format=mdwn + username="spalax" + subject="Popup listing multiple entries per day" + date="2012-06-08T00:56:06Z" + content=""" +[[!tag patch]] + +Hello, +here is a patch that: + +- if there is a single entry in one day, does not change anything (compared to the previous version of the calendar plugin); +- if there are several entries, when mouse passes over the day, displays a popup listing all the entries of that day. + +That's all. No new pages for each day, takes as little space as it took before, and only a few lines more in the source. + +The only thing I am not totally happy with is the CSS. We have to say that the text is aligned on the left (otherwise, it is aligned on the right, as is each day of the calendar), but I do not know which place is the more sensible to put that line of CSS in. + +Regards, +-- Louis + + + diff --git a/IkiWiki/Plugin/calendar.pm b/IkiWiki/Plugin/calendar.pm + index d443198..2c9ed79 100644 + --- a/IkiWiki/Plugin/calendar.pm + +++ b/IkiWiki/Plugin/calendar.pm + @@ -86,8 +86,11 @@ sub format_month (@) { + my $year = $date[5] + 1900; + my $mtag = sprintf(\"%02d\", $month); + + - # Only one posting per day is being linked to. + - $linkcache{\"$year/$mtag/$mday\"} = $p; + + # Several postings per day + + if (! $linkcache{\"$year/$mtag/$mday\"}) { + + $linkcache{\"$year/$mtag/$mday\"} = []; + + } + + push(@{$linkcache{\"$year/$mtag/$mday\"}}, $p); + } + + my $pmonth = $params{month} - 1; + @@ -221,11 +224,36 @@ EOF + $tag='month-calendar-day-link'; + } + $calendar.=qq{\t\t}; + - $calendar.=htmllink($params{page}, $params{destpage}, + - $linkcache{$key}, + - noimageinline => 1, + - linktext => $day, + - title => pagetitle(IkiWiki::basename($linkcache{$key}))); + + if ( scalar(@{$linkcache{$key}}) == 1) { + + # Only one posting on this page + + my $page = $linkcache{$key}[0]; + + $calendar.=htmllink($params{page}, $params{destpage}, + + $page, + + noimageinline => 1, + + linktext => $day, + + title => pagetitle(IkiWiki::basename($page))); + + } else { + + $calendar.=qq{}; + + } + $calendar.=qq{\n}; + } + else { + diff --git a/doc/style.css b/doc/style.css + old mode 100644 + new mode 100755 + index 6e2afce..4149229 + --- a/doc/style.css + +++ b/doc/style.css + @@ -316,6 +316,7 @@ div.progress-done { + .popup .paren, + .popup .expand { + display: none; + + text-align: left; + } + .popup:hover .balloon, + .popup:focus .balloon { + +"""]] diff --git a/doc/forum/Can_I_have_different_favicons_for_each_folder__63__/comment_2_b8ccd3c29249eca73766f567bce12569._comment b/doc/forum/Can_I_have_different_favicons_for_each_folder__63__/comment_2_b8ccd3c29249eca73766f567bce12569._comment new file mode 100644 index 000000000..0c8ca3bce --- /dev/null +++ b/doc/forum/Can_I_have_different_favicons_for_each_folder__63__/comment_2_b8ccd3c29249eca73766f567bce12569._comment @@ -0,0 +1,8 @@ +[[!comment format=mdwn + username="Franek" + ip="178.7.43.64" + subject="comment 2" + date="2012-06-25T09:58:03Z" + content=""" +I did as you suggested (finally) and created a simple modification of the [[plugins/favicon]] plugin: [[plugins/contrib/localfavicon]]. It checks for the \"localfavicon\" option, and if it is set, it uses bestlink() to determine which favicon to use for each page; if not, it behaves just like the original favicon plugin. +"""]] diff --git a/doc/forum/Can__39__t_call_method___34__distribution__34___on_an_undefined_value_at_FirstTime.pm.html b/doc/forum/Can__39__t_call_method___34__distribution__34___on_an_undefined_value_at_FirstTime.pm.html new file mode 100644 index 000000000..b68395856 --- /dev/null +++ b/doc/forum/Can__39__t_call_method___34__distribution__34___on_an_undefined_value_at_FirstTime.pm.html @@ -0,0 +1,64 @@ +This really look like a general PERL problem, but google search returns no relative result of undfined method 'distribution' at FireTime.pm at all. Answer on where to look for answer is appreciated too. Using perl 5.18 on NETBSD 6.1 + +
      +$ PERL5LIB=`pwd`/ikiwiki:`pwd`/ikiwiki/cpan:`pwd`/lib/perl5 PERL_MM_USE_DEFAULT=1 perl -MCPAN -e 'CPAN::Shell->install("Bundle::IkiWiki")'
      +perl: warning: Setting locale failed.
      +perl: warning: Please check that your locale settings:
      +        LC_ALL = "en_US.UTF-8",
      +        LANG = "en_US.UTF-8"
      +    are supported and installed on your system.
      +perl: warning: Falling back to the standard locale ("C").
      +perl: warning: Setting locale failed.
      +perl: warning: Please check that your locale settings:
      +        LC_ALL = "en_US.UTF-8",
      +        LANG = "en_US.UTF-8"
      +    are supported and installed on your system.
      +perl: warning: Falling back to the standard locale ("C").
      +
      +CPAN.pm requires configuration, but most of it can be done automatically.
      +If you answer 'no' below, you will enter an interactive dialog for each
      +configuration option instead.
      +
      +Would you like to configure as much as possible automatically? [yes] yes
      +
      + 
      +
      +Warning: You do not have write permission for Perl library directories.
      +
      +To install modules, you need to configure a local Perl library directory or
      +escalate your privileges.  CPAN can help you by bootstrapping the local::lib
      +module or by configuring itself to use 'sudo' (if available).  You may also
      +resolve this problem manually if you need to customize your setup.
      +
      +What approach do you want?  (Choose 'local::lib', 'sudo' or 'manual')
      + [local::lib] local::lib
      +
      +Autoconfigured everything but 'urllist'.
      +
      +Now you need to choose your CPAN mirror sites.  You can let me
      +pick mirrors for you, you can select them from a list or you
      +can enter them by hand.
      +
      +Would you like me to automatically choose some CPAN mirror
      +sites for you? (This means connecting to the Internet) [yes] yes
      +Trying to fetch a mirror list from the Internet
      +Fetching with LWP:
      +http://www.perl.org/CPAN/MIRRORED.BY
      +
      +Looking for CPAN mirrors near you (please be patient)
      +.......................... done!
      +
      +New urllist
      +  http://cpan.llarian.net/
      +  http://mirrors.syringanetworks.net/CPAN/
      +  http://noodle.portalus.net/CPAN/
      +
      +Autoconfiguration complete.
      +
      +Attempting to bootstrap local::lib...
      +
      +Writing /arpa/tz/w/weiwu/.local/share/.cpan/CPAN/MyConfig.pm for bootstrap...
      +commit: wrote '/arpa/tz/w/weiwu/.local/share/.cpan/CPAN/MyConfig.pm'
      +Can't call method "distribution" on an undefined value at /usr/pkg/lib/perl5/5.18.0/CPAN/FirstTime.pm line 1257.
      +$ rm -r /arpa/tz/w/weiwu/.local/share/.cpan/
      +
      diff --git a/doc/forum/Can_ikiwiki_be_configured_as_multi_user_blog__63__.mdwn b/doc/forum/Can_ikiwiki_be_configured_as_multi_user_blog__63__.mdwn new file mode 100644 index 000000000..118b534ed --- /dev/null +++ b/doc/forum/Can_ikiwiki_be_configured_as_multi_user_blog__63__.mdwn @@ -0,0 +1,7 @@ +All the ikiwiki blogs I have seen have a single user blog. Is it possible to give every user a blog, where they can create their own pages in their own directory = based on their user name? + +I feel that a wiki might give more options in the way users share and collaborate when compared to a blog engine (like Word Press in multi user format) + +Is this the best place to post a question like this? There doesn't seem to be much traffic in this forum +Thanks for your help +Richard diff --git a/doc/forum/Can_ikiwiki_be_configured_as_multi_user_blog__63__/comment_1_8e34b10699bed1b53b6c929ed1e9f19c._comment b/doc/forum/Can_ikiwiki_be_configured_as_multi_user_blog__63__/comment_1_8e34b10699bed1b53b6c929ed1e9f19c._comment new file mode 100644 index 000000000..1f0577c9c --- /dev/null +++ b/doc/forum/Can_ikiwiki_be_configured_as_multi_user_blog__63__/comment_1_8e34b10699bed1b53b6c929ed1e9f19c._comment @@ -0,0 +1,32 @@ +[[!comment format=mdwn + username="http://smcv.pseudorandom.co.uk/" + nickname="smcv" + subject="It's a wiki: any editor can have as many blogs as they want" + date="2013-07-17T08:17:05Z" + content=""" +Ikiwiki is a wiki, so you can certainly have multiple users. Any user +with appropriate access can create any number of blogs: they just need +to put an [[ikiwiki/directive/inline]] directive on any page they can +edit, with a [[ikiwiki/PageSpec]] pointing to pages (blog posts) in a +directory where they can create pages. + +If you want a limited set of users to be able to edit the wiki without +making them full wiki admins, you can use [[plugins/lockedit]]: + + locked_pages: * and !(user(bob) or user(chris)) + +or if you want \"most\" users to only be able to write on their own blog, and +not on other users' blogs (for instance: Alice the wiki admin can edit +anything, but Bob can only edit `users/bob/...` and Chris can only edit +`users/chris/...`) then you can use [[plugins/lockedit]], something like: + + locked_pages: * and !(user(bob) and (users/bob or users/bob/*)) and !(user(chris) and (users/chris or users/chris/*)) + +(Wiki admins can always edit locked pages.) + +If you have lots of users and you know a bit of Perl, you might want +to [[write a plugin|plugins/write]] that adds a function-like +[[ikiwiki/PageSpec]] like `owndirectory(users)`, which would match if +there is a logged-in user and the page being edited is equal to or +a subpage of their directory in users. +"""]] diff --git a/doc/forum/Can_ikiwiki_be_configured_as_multi_user_blog__63__/comment_2_6083e16f72e12c03bdf739b84bd2f352._comment b/doc/forum/Can_ikiwiki_be_configured_as_multi_user_blog__63__/comment_2_6083e16f72e12c03bdf739b84bd2f352._comment new file mode 100644 index 000000000..10929a35a --- /dev/null +++ b/doc/forum/Can_ikiwiki_be_configured_as_multi_user_blog__63__/comment_2_6083e16f72e12c03bdf739b84bd2f352._comment @@ -0,0 +1,12 @@ +[[!comment format=mdwn + username="http://smcv.pseudorandom.co.uk/" + nickname="smcv" + subject="comment 2" + date="2013-07-17T08:23:32Z" + content=""" +IkiWiki's own [[bugs]], [[news]] and [[to-do list|todo]] are functionally +equivalent to blogs, in fact. ([[news]] is the most obviously blog-like, +[[bugs]] is like a blog except that solved bugs disappear from the +page/feed, and [[todo]] only shows titles, not content, but is otherwise +like [[bugs]].) +"""]] diff --git a/doc/forum/Can_not_advance_past_first_page_of_results_using_search_plugin.mdwn b/doc/forum/Can_not_advance_past_first_page_of_results_using_search_plugin.mdwn new file mode 100644 index 000000000..1a9391e48 --- /dev/null +++ b/doc/forum/Can_not_advance_past_first_page_of_results_using_search_plugin.mdwn @@ -0,0 +1,26 @@ +I'm using the [[/plugins/search/]] plugin and it correctly displays the first page of results, but the "Next" button doesn't work. + +If I search for "linux", for example, I see "1-10 of exactly 65 matches" and this in my browser's address bar: https://example.com/ikiwiki.cgi?P=linux + +Then, I scroll down and click "Next" and I see. . . + +> Although this page is encrypted, the information you have entered is to be sent over an unencrypted connection and could easily be read by a third party. +> +> Are you sure you want to continue sending this information? + +. . . then I click "Continue" but I'm stuck on the first page of search results (it still says "1-10 of exactly 65 matches") and I have the following in my browser's address bar: + +https://example.com/ikiwiki.cgi?P=linux&DEFAULTOP=or&%253E=Next&DB=default&FMT=query&xP=Zlinux&xDB=default&xFILTERS=--O + +I noticed that if I change what's in the address bar to the following, I **can** advance to page 2 (it shows "11-20 of exactly 65 matches"). That is to say, I'm removing "25" from "%253E" as a work around: + +https://example.com/ikiwiki.cgi?P=linux&DEFAULTOP=or&%3E=Next&DB=default&FMT=query&xP=Zlinux&xDB=default&xFILTERS=--O + +Based on this output, I might need to make a change to "searchquery.tmpl", which is under [[/templates]]. . . + + [wikiuser@ikiwiki1 ~]$ grep -r DEFAULTOP /usr/share/ikiwiki + /usr/share/ikiwiki/templates/searchquery.tmpl: