From: Simon McVittie Date: Mon, 15 Sep 2014 08:44:51 +0000 (+0100) Subject: Merge remote-tracking branch 'jcflack/early-env' X-Git-Url: https://sipb.mit.edu/gitweb.cgi/ikiwiki.git/commitdiff_plain/9d928bd69496648cd7a2d4542a2d533992c01757?hp=29e80b4eedadc2afd3f9f36d215076c82982971b Merge remote-tracking branch 'jcflack/early-env' --- diff --git a/IkiWiki.pm b/IkiWiki.pm index cd4ac815d..49ac97196 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -2460,6 +2460,19 @@ sub pagespec_match ($$;@) { return $sub->($page, @params); } +# e.g. @pages = sort_pages("title", \@pages, reverse => "yes") +# +# Not exported yet, but could be in future if it is generally useful. +# Note that this signature is not the same as IkiWiki::SortSpec::sort_pages, +# which is "more internal". +sub sort_pages ($$;@) { + my $sort = shift; + my $list = shift; + my %params = @_; + $sort = sortspec_translate($sort, $params{reverse}); + return IkiWiki::SortSpec::sort_pages($sort, @$list); +} + sub pagespec_match_list ($$;@) { my $page=shift; my $pagespec=shift; diff --git a/IkiWiki/CGI.pm b/IkiWiki/CGI.pm index c0d8f598b..cb83319e6 100644 --- a/IkiWiki/CGI.pm +++ b/IkiWiki/CGI.pm @@ -110,11 +110,23 @@ sub decode_cgi_utf8 ($) { } } +sub safe_decode_utf8 ($) { + my $octets = shift; + # call decode_utf8 on >= 5.20 only if it's not already decoded, + # otherwise it balks, on < 5.20, always call it + if ($] < 5.02 || !Encode::is_utf8($octets)) { + return decode_utf8($octets); + } + else { + return $octets; + } +} + sub decode_form_utf8 ($) { if ($] >= 5.01) { my $form = shift; foreach my $f ($form->field) { - my @value=map { decode_utf8($_) } $form->field($f); + my @value=map { safe_decode_utf8($_) } $form->field($f); $form->field(name => $f, value => \@value, force => 1, diff --git a/IkiWiki/Plugin/comments.pm b/IkiWiki/Plugin/comments.pm index a0ca9f32e..98ae13810 100644 --- a/IkiWiki/Plugin/comments.pm +++ b/IkiWiki/Plugin/comments.pm @@ -438,6 +438,16 @@ sub editcomment ($$) { $page)); } + # There's no UI to get here, but someone might construct the URL, + # leading to a comment that exists in the repository but isn't + # shown + if (!pagespec_match($page, $config{comments_pagespec}, + location => $page)) { + error(sprintf(gettext( + "comments on page '%s' are not allowed"), + $page)); + } + if (pagespec_match($page, $config{comments_closed_pagespec}, location => $page)) { error(sprintf(gettext( diff --git a/IkiWiki/Plugin/conditional.pm b/IkiWiki/Plugin/conditional.pm index 0a3d7fb4c..b450f1a0a 100644 --- a/IkiWiki/Plugin/conditional.pm +++ b/IkiWiki/Plugin/conditional.pm @@ -33,11 +33,15 @@ sub preprocess_if (@) { # An optimisation to avoid needless looping over every page # for simple uses of some of the tests. $params{test} =~ /^([\s\!()]*((enabled|sourcepage|destpage|included)\([^)]*\)|(and|or))[\s\!()]*)+$/) { - add_depends($params{page}, "($params{test}) and $params{page}"); $result=pagespec_match($params{page}, $params{test}, location => $params{page}, sourcepage => $params{page}, destpage => $params{destpage}); + my $i = $result->influences; + foreach my $k (keys %$i) { + # minor optimization: influences are always simple dependencies + $IkiWiki::depends_simple{$params{page}}{lc $k} |= $i->{$k}; + } } else { $result=pagespec_match_list($params{page}, $params{test}, diff --git a/IkiWiki/Plugin/edittemplate.pm b/IkiWiki/Plugin/edittemplate.pm index e3ce5e3d9..c2a8da29f 100644 --- a/IkiWiki/Plugin/edittemplate.pm +++ b/IkiWiki/Plugin/edittemplate.pm @@ -139,6 +139,25 @@ sub filltemplate ($$) { $template->param(name => $page); + if ($template->query(name => 'uuid')) { + my $uuid; + if (open(my $fh, "<", "/proc/sys/kernel/random/uuid")) { + $uuid = <$fh>; + chomp $uuid; + close $fh; + } + else { + eval { + require UUID::Tiny; + $uuid = UUID::Tiny::create_uuid_as_string(UUID::Tiny::UUID_V4()); + }; + } + $template->param(uuid => $uuid); + } + + my $time = time(); + $template->param(time => IkiWiki::date_3339($time)); + return $template->output; } diff --git a/IkiWiki/Plugin/img.pm b/IkiWiki/Plugin/img.pm index b92e24cc0..54c13d069 100644 --- a/IkiWiki/Plugin/img.pm +++ b/IkiWiki/Plugin/img.pm @@ -65,82 +65,85 @@ sub preprocess (@) { my $dir = $params{page}; my $base = IkiWiki::basename($file); my $issvg = $base=~s/\.svg$/.png/i; + my $ispdf = $base=~s/\.pdf$/.png/i; + my $pagenumber = exists($params{pagenumber}) ? int($params{pagenumber}) : 0; + if ($pagenumber != 0) { + $base = "p$pagenumber-$base"; + } eval q{use Image::Magick}; error gettext("Image::Magick is not installed") if $@; - my $im = Image::Magick->new($issvg ? (magick => "png") : ()); + my $im = Image::Magick->new(); my $imglink; - my $r = $im->Read($srcfile); + my $imgdatalink; + my $r = $im->Read("$srcfile\[$pagenumber]"); error sprintf(gettext("failed to read %s: %s"), $file, $r) if $r; my ($dwidth, $dheight); - if ($params{size} ne 'full') { + if ($params{size} eq 'full') { + $dwidth = $im->Get("width"); + $dheight = $im->Get("height"); + } else { my ($w, $h) = ($params{size} =~ /^(\d*)x(\d*)$/); error sprintf(gettext('wrong size format "%s" (should be WxH)'), $params{size}) unless (defined $w && defined $h && (length $w || length $h)); - - if ((length $w && $w > $im->Get("width")) || - (length $h && $h > $im->Get("height"))) { - # resizing larger - $imglink = $file; - - # don't generate larger image, just set display size - if (length $w && length $h) { - ($dwidth, $dheight)=($w, $h); - } - # avoid division by zero on 0x0 image - elsif ($im->Get("width") == 0 || $im->Get("height") == 0) { - ($dwidth, $dheight)=(0, 0); - } - # calculate unspecified size from the other one, preserving - # aspect ratio - elsif (length $w) { - $dwidth=$w; - $dheight=$w / $im->Get("width") * $im->Get("height"); - } - elsif (length $h) { - $dheight=$h; - $dwidth=$h / $im->Get("height") * $im->Get("width"); - } + + if ($im->Get("width") == 0 || $im->Get("height") == 0) { + ($dwidth, $dheight)=(0, 0); + } elsif (! length $w || (length $h && $im->Get("height")*$w > $h * $im->Get("width"))) { + # using height because only height is given or ... + # because original image is more portrait than $w/$h + # ... slimness of $im > $h/w + # ... $im->Get("height")/$im->Get("width") > $h/$w + # ... $im->Get("height")*$w > $h * $im->Get("width") + + $dheight=$h; + $dwidth=$h / $im->Get("height") * $im->Get("width"); + } else { # (! length $h) or $w is what determines the resized size + $dwidth=$w; + $dheight=$w / $im->Get("width") * $im->Get("height"); + } + } + + if ($dwidth < $im->Get("width") || $ispdf) { + # resize down, or resize to pixels at all + + my $outfile = "$config{destdir}/$dir/$params{size}-$base"; + $imglink = "$dir/$params{size}-$base"; + + will_render($params{page}, $imglink); + + if (-e $outfile && (-M $srcfile >= -M $outfile)) { + $im = Image::Magick->new; + $r = $im->Read($outfile); + error sprintf(gettext("failed to read %s: %s"), $outfile, $r) if $r; } else { - # resizing smaller - my $outfile = "$config{destdir}/$dir/${w}x${h}-$base"; - $imglink = "$dir/${w}x${h}-$base"; - - will_render($params{page}, $imglink); - - if (-e $outfile && (-M $srcfile >= -M $outfile)) { - $im = Image::Magick->new; - $r = $im->Read($outfile); - error sprintf(gettext("failed to read %s: %s"), $outfile, $r) if $r; + $r = $im->Resize(geometry => "${dwidth}x${dheight}"); + error sprintf(gettext("failed to resize: %s"), $r) if $r; + + $im->set(($issvg || $ispdf) ? (magick => 'png') : ()); + my @blob = $im->ImageToBlob(); + # don't actually write resized file in preview mode; + # rely on width and height settings + if (! $params{preview}) { + writefile($imglink, $config{destdir}, $blob[0], 1); } else { - $r = $im->Resize(geometry => "${w}x${h}"); - error sprintf(gettext("failed to resize: %s"), $r) if $r; - - # don't actually write resized file in preview mode; - # rely on width and height settings - if (! $params{preview}) { - my @blob = $im->ImageToBlob(); - writefile($imglink, $config{destdir}, $blob[0], 1); - } - else { - $imglink = $file; - } + eval q{use MIME::Base64}; + error($@) if $@; + $imgdatalink = "data:image/".$im->Get("magick").";base64,".encode_base64($blob[0]); } - - # always get the true size of the resized image - $dwidth = $im->Get("width"); - $dheight = $im->Get("height"); } - } - else { - $imglink = $file; - $dwidth = $im->Get("width"); + + # always get the true size of the resized image (it could be + # that imagemagick did its calculations differently) + $dwidth = $im->Get("width"); $dheight = $im->Get("height"); + } else { + $imglink = $file; } if (! defined($dwidth) || ! defined($dheight)) { @@ -148,14 +151,9 @@ sub preprocess (@) { } my ($fileurl, $imgurl); - if (! $params{preview}) { - $fileurl=urlto($file, $params{destpage}); - $imgurl=urlto($imglink, $params{destpage}); - } - else { - $fileurl=urlto($file); - $imgurl=urlto($imglink); - } + my $urltobase = $params{preview} ? undef : $params{destpage}; + $fileurl=urlto($file, $urltobase); + $imgurl=$imgdatalink ? $imgdatalink : urlto($imglink, $urltobase); if (! exists $params{class}) { $params{class}="img"; diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm index 123dfd364..f578526cc 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm @@ -329,8 +329,12 @@ sub preprocess_inline (@) { my $ret=""; - if (length $config{cgiurl} && ! $params{preview} && (exists $params{rootpage} || - (exists $params{postform} && yesno($params{postform}))) && + my $postform = (exists $params{rootpage}); + if (exists $params{postform}) { + $postform = yesno($params{postform}); + } + + if (length $config{cgiurl} && ! $params{preview} && $postform && IkiWiki->can("cgi_editpage")) { # Add a blog post form, with feed buttons. my $formtemplate=template_depends("blogpost.tmpl", $params{page}, blind_cache => 1); diff --git a/IkiWiki/Plugin/linkmap.pm b/IkiWiki/Plugin/linkmap.pm index ac26e072e..b5ef1a137 100644 --- a/IkiWiki/Plugin/linkmap.pm +++ b/IkiWiki/Plugin/linkmap.pm @@ -5,6 +5,7 @@ use warnings; use strict; use IkiWiki 3.00; use IPC::Open2; +use HTML::Entities; sub import { hook(type => "getsetup", id => "linkmap", call => \&getsetup); @@ -22,6 +23,18 @@ sub getsetup () { my $mapnum=0; +sub pageescape { + my $item = shift; + # encoding explicitly in case ikiwiki is configured to accept <> or & + # in file names + my $title = pagetitle($item, 1); + # it would not be necessary to encode *all* the html entities (<> would + # be sufficient, &" probably a good idea), as dot accepts utf8, but it + # isn't bad either + $title = encode_entities($title); + return("<$title>"); +} + sub preprocess (@) { my %params=@_; @@ -63,7 +76,7 @@ sub preprocess (@) { my $show=sub { my $item=shift; if (! $shown{$item}) { - print OUT "\"$item\" [shape=box,href=\"$mapitems{$item}\"];\n"; + print OUT pageescape($item)." [shape=box,href=\"$mapitems{$item}\"];\n"; $shown{$item}=1; } }; @@ -74,7 +87,7 @@ sub preprocess (@) { foreach my $endpoint ($item, $link) { $show->($endpoint); } - print OUT "\"$item\" -> \"$link\";\n"; + print OUT pageescape($item)." -> ".pageescape($link).";\n"; } } print OUT "}\n"; diff --git a/IkiWiki/Plugin/trail.pm b/IkiWiki/Plugin/trail.pm index d5fb2b5d6..476db4dcb 100644 --- a/IkiWiki/Plugin/trail.pm +++ b/IkiWiki/Plugin/trail.pm @@ -319,10 +319,9 @@ sub prerender { } if (defined $pagestate{$trail}{trail}{sort}) { - # re-sort - @$members = pagespec_match_list($trail, 'internal(*)', - list => $members, - sort => $pagestate{$trail}{trail}{sort}); + @$members = IkiWiki::sort_pages( + $pagestate{$trail}{trail}{sort}, + $members); } if (IkiWiki::yesno $pagestate{$trail}{trail}{reverse}) { diff --git a/debian/changelog b/debian/changelog index 022fc7d05..611e51843 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,26 @@ +ikiwiki (3.20140912) UNRELEASED; urgency=medium + + * Don't double-decode CGI submissions with Encode.pm >= 2.53, + fixing "Error: Cannot decode string with wide characters". + Thanks, Antoine Beaupré + * Avoid making trails depend on everything in the wiki by giving them + a better way to sort the pages + * Don't let users post comments that won't be displayed + * Fix encoding of Unicode strings in Python plugins. + Thanks, chrysn + * Improve performance and correctness of the [[!if]] directive + * Let [[!inline rootpage=foo postform=no]] disable the posting form + * Switch default [[!man]] shortcut to manpages.debian.org. Closes: #700322 + * Add UUID and TIME variables to edittemplate. Closes: #752827 + Thanks, Jonathon Anderson + * Display pages in linkmaps as their pagetitle (no underscore escapes). + Thanks, chrysn + * Fix aspect ratio when scaling small images, and add support for + converting SVG and PDF graphics to PNG. + Thanks, chrysn + + -- Simon McVittie Fri, 12 Sep 2014 21:23:58 +0100 + ikiwiki (3.20140831) unstable; urgency=medium * Make --no-gettime work in initial build. Closes: #755075 diff --git a/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn b/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn index 81a5abf28..59ca75435 100644 --- a/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn +++ b/doc/bugs/CGI_wrapper_doesn__39__t_store_PERL5LIB_environment_variable.mdwn @@ -26,3 +26,44 @@ This brutal patch implement your solution as a temporary fix. As I am not sure that remembering `PERL5LIB` is a good idea, I think that a prettier solution will be to add a config variable (let's say `cgi_wrapper_perllib`) which, if fixed, contains the `PERL5LIB` value to include in the wrapper, or another (let's say `cgi_wrapper_remember_libdir`), which, if fixed, remember the current `PERL5LIB`. -- Bruno + +**Update:** I had not seen this bug earlier, but I ran into the same issue and made a more general solution. You can already add stuff to `%config{ENV}` in the setup file, but it was being processed too late for `PERL5LIB` to do any good. +[This change](https://github.com/jcflack/ikiwiki/compare/early-env) moves the `%config{ENV}` handling earlier in the wrapper, so anything specified there is placed back in the actual environment before Perl gets control. Problem solved! + +-- Chap + +> Thanks, this looks like a nicer solution than the above. Some review: +> +> + $val =~ s/([\\"])/\\$1/g; +> +> This is *probably* OK, because the configuration is unlikely to include +> non-ASCII, but I'd prefer something that covers all possibilities, +> like this: +> +> my $tmp = $val; +> utf8::encode($tmp) if utf8::is_utf8($tmp); +> $tmp =~ s/([^A-Za-z0-9])/sprintf "\\x%02x", $1/ge; +> +> and then passing $tmp to addenv. +> +> + delete $config{ENV}; +> +> I don't think this is particularly necessary: there doesn't seem any harm +> in having it in the storable too? +> +> --[[smcv]] + +Happy to make the escaping change, thanks for the sharp eye. + +My thinking on `delete` is once it's handled, it's handled. The C code +is going to put this straight into the real environment and then do +a simple `exec` ... is there any way this hasn't been handled? + +It just takes up space twice in the generated wrapper otherwise. +Admittedly it's not much space, but seems to be even less point ... ? + +-- Chap + +> That makes sense, as long as nothing else is going to read +> `$config{ENV}` for purposes other than copying it into the actual +> environment. --[[smcv]] diff --git a/doc/bugs/Inlining_adds_newlines_which_can_break_markdown.mdwn b/doc/bugs/Inlining_adds_newlines_which_can_break_markdown.mdwn new file mode 100644 index 000000000..eb71994e5 --- /dev/null +++ b/doc/bugs/Inlining_adds_newlines_which_can_break_markdown.mdwn @@ -0,0 +1,43 @@ +I'm trying to put a list of tags in a table, so I carefully make a newline-free taglist.tmpl and then do: + + | \[[!inline pages="link(/category/env)" feeds=no archive=yes sort=title template=taglist]] | + +but there's a line in `inline.pm` that does: + + return "<div class=\"inline\" id=\"$#inline\"></div>\n\n"; + +And the extra newlines break the table. Can they be safely removed? + +> If you want an HTML table, I would suggest using an HTML table, which +> should pass through Markdown without being interpreted further: +> +> +> \[[!inline pages="link(/category/env)" feeds=no archive=yes sort=title template=tagtd]] +>
+> +> where tagtd.tmpl is of the form `your markup here`; or even just +> +> \[[!inline pages="link(/category/env)" feeds=no archive=yes sort=title template=tagtable]] +> +> where tagtable.tmpl looks like +> +> +> +> +> +> +> +> +>
your tag here
+>
+> +> I don't think you're deriving much benefit from Markdown's table syntax +> if you have to mix it with HTML::Template and ikiwiki directives, +> and be pathologically careful with whitespace. "Right tool for the job" +> and all that :-) +> +> When I edited this page I was amused to find that you used HTML, +> not Markdown, as its format. It seems oddly appropriate to my answer, but +> I've converted it to Markdown and adjusted the formatting, for easier +> commenting. +> --[[smcv]] diff --git a/doc/bugs/__91____91____33__inline_postform__61__no__93____93___doesn__39__t_disable_it.mdwn b/doc/bugs/__91____91____33__inline_postform__61__no__93____93___doesn__39__t_disable_it.mdwn index 7e7548657..7b97b40b3 100644 --- a/doc/bugs/__91____91____33__inline_postform__61__no__93____93___doesn__39__t_disable_it.mdwn +++ b/doc/bugs/__91____91____33__inline_postform__61__no__93____93___doesn__39__t_disable_it.mdwn @@ -21,3 +21,4 @@ not the actual inlining of pages, but it's a start. --[[smcv]] >> this looks simple, straightforward and good to me --[[chrysn]] +>>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/can__39__t_upload_a_simple_png_image:_prohibited_by_allowed__95__attachments___40__file_MIME_type_is_application__47__octet-stream....mdwn b/doc/bugs/can__39__t_upload_a_simple_png_image:_prohibited_by_allowed__95__attachments___40__file_MIME_type_is_application__47__octet-stream....mdwn index 4e3332748..627b2c827 100644 --- a/doc/bugs/can__39__t_upload_a_simple_png_image:_prohibited_by_allowed__95__attachments___40__file_MIME_type_is_application__47__octet-stream....mdwn +++ b/doc/bugs/can__39__t_upload_a_simple_png_image:_prohibited_by_allowed__95__attachments___40__file_MIME_type_is_application__47__octet-stream....mdwn @@ -56,16 +56,22 @@ Weird... --[[anarcat]] > > > > --[[anarcat]] +> > > [[!template id=gitbranch branch=ready/more-magic author="[[smcv]]" browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/ready/more-magic]] > > > If the regex match isn't necessary and it's just about deleting the -> > > parameters, I think I'd prefer something like +> > > parameters, I think I'd prefer > > > > > > if (! defined $mimetype) { > > > ... > > > } > > > $mimetype =~ s/;.*//; > > > -> > > but I'd be hesitant to do that without knowing why Joey implemented it -> > > the way it is. If it's about catching a result from file(1) that +> > > as done in my `ready/more-magic` branch. +> > > +> > > I'm a little hesitant to do that without knowing why Joey implemented it +> > > the way it is, but as far as I can tell it's just an oversight. +> > > +> > > Or, if the result of the s/// is checked for a reason, and it's +> > > about catching a result from file(1) that > > > is not, in fact, a MIME type at all (empty string or error message > > > or something), maybe something more like this? > > > @@ -74,3 +80,12 @@ Weird... --[[anarcat]] > > > (or whatever the allowed characters in MIME types are). --[[smcv]] > > > > I don't mind either way, but i feel this should be fixed for the next release, as I need to reapply this patch at every upgrade now. -- [[anarcat]] + +> > > > > This is still a problem in 3.20140831. -- [[anarcat]] + +> > > > > > I still don't think appending a semicolon is the right answer: +> > > > > > at best it's equivalent to what I suggested, and at worst it's +> > > > > > disabling a check that does have some reason behind it. +> > > > > > I've turned the version I suggested above into a proper branch. +> > > > > > Review by someone who can commit to ikiwiki.git would be appreciated. +> > > > > > --[[smcv]] diff --git a/doc/bugs/debwiki_shortcut_creates_buggy_URLs_to_subpages.mdwn b/doc/bugs/debwiki_shortcut_creates_buggy_URLs_to_subpages.mdwn new file mode 100644 index 000000000..f83f960ce --- /dev/null +++ b/doc/bugs/debwiki_shortcut_creates_buggy_URLs_to_subpages.mdwn @@ -0,0 +1,5 @@ +E.g. [[!debwiki Derivatives/Guidelines]]. + +Maybe we should use `%S` instead of `%s` in the shortcut definition? + +> seems reasonable, [[done]] --[[smcv]] diff --git a/doc/bugs/editing_gitbranch_template_is_really_slow.mdwn b/doc/bugs/editing_gitbranch_template_is_really_slow.mdwn index c7d0ffbe2..22733e6fe 100644 --- a/doc/bugs/editing_gitbranch_template_is_really_slow.mdwn +++ b/doc/bugs/editing_gitbranch_template_is_really_slow.mdwn @@ -63,3 +63,5 @@ browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/ > `bestlink` is still the single most expensive function in this refresh > at ~ 9.5s, with `match_glob` at ~ 5.2s as the runner-up. > --[[smcv]] + +>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/garbled_non-ascii_characters_in_body_in_web_interface.mdwn b/doc/bugs/garbled_non-ascii_characters_in_body_in_web_interface.mdwn new file mode 100644 index 000000000..657b86baa --- /dev/null +++ b/doc/bugs/garbled_non-ascii_characters_in_body_in_web_interface.mdwn @@ -0,0 +1,126 @@ +since my latest jessie upgrade here, charsets are all broken when editing a page. the page i'm trying to edit is [this wishlist](http://anarc.at/wishlist/), and it used to work fine. now, instead of: + +`Voici des choses que vous pouvez m'acheter si vous êtes le Père Nowel (yeah right):` + +... as we see in the rendered body right now, when i edit the page i see: + +`Voici des choses que vous pouvez m'acheter si vous �tes le P�re Nowel (yeah right):` + +... a typical double-encoding nightmare. The actual binary data is this for the word "Père" according to `hd`: + +~~~~ +anarcat@marcos:ikiwiki$ echo "Père" | hd +00000000 50 c3 a8 72 65 0a |P..re.| +00000006 +anarcat@marcos:ikiwiki$ echo "P�re" | hd +00000000 50 ef bf bd 72 65 0a |P...re.| +00000007 +~~~~ + +> I don't know what that is, but it isn't the usual double-UTF-8 encoding: +> +> >>> u'è'.encode('utf-8') +> '\xc3\xa8' +> >>> u'è'.encode('utf-8').decode('latin-1').encode('utf-8') +> '\xc3\x83\xc2\xa8' +> +> A packet capture of the incorrect HTTP request/response headers and body +> might be enlightening? --[[smcv]] +> +> > Here are the headers according to chromium: +> > +> > ~~~~ +> > GET /ikiwiki.cgi?do=edit&page=wishlist HTTP/1.1 +> > Host: anarc.at +> > Connection: keep-alive +> > Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8 +> > User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 +> > Referer: http://anarc.at/wishlist/ +> > Accept-Encoding: gzip,deflate,sdch +> > Accept-Language: fr,en-US;q=0.8,en;q=0.6 +> > Cookie: openid_provider=openid; ikiwiki_session_anarcat=XXXXXXXXXXXXXXXXXXXXXXX +> > +> > HTTP/1.1 200 OK +> > Date: Mon, 08 Sep 2014 21:22:24 GMT +> > Server: Apache/2.4.10 (Debian) +> > Set-Cookie: ikiwiki_session_anarcat=XXXXXXXXXXXXXXXXXXXXXXX; path=/; HttpOnly +> > Vary: Accept-Encoding +> > Content-Encoding: gzip +> > Content-Length: 4093 +> > Keep-Alive: timeout=5, max=100 +> > Connection: Keep-Alive +> > Content-Type: text/html; charset=utf-8 +> > ~~~~ +> > +> > ... which seem fairly normal... getting more data than this is a little inconvenient since the data is gzip-encoded and i'm kind of lazy extracting that from the stream. Chromium does seem to auto-detect it as utf8 according to the menus however... not sure what's going on here. I would focus on the following error however, since it's clearly emanating from the CGI... --[[anarcat]] + +Clicking on the Cancel button yields the following warning: + +~~~~ +Error: Cannot decode string with wide characters at /usr/lib/x86_64-linux-gnu/perl/5.20/Encode.pm line 215. +~~~~ + +> Looks as though you might be able to get a Python-style backtrace for this +> by setting `$Carp::Verbose = 1`. +> +> The error is that we're taking some string (which string? only a backtrace +> would tell you) that is already flagged as Unicode, and trying to decode +> it from byte-blob to Unicode again, analogous to this Python: +> +> some_bytes.decode('utf-8').decode('utf-8') +> +> --[[smcv]] +> > +> > I couldn't figure out where to set that Carp thing - it doesn't work simply by setting it in /usr/bin/ikiwiki - so i am not sure how to use this. However, with some debugging code in Encode.pm, i was able to find a case of double-encoding - in the left menu, for example, which is the source of the Encode.pm crash. +> > +> > It seems that some unicode semantics changed in Perl 5.20, or more precisely, in Encode.pm 2.53, according to [this](https://code.activestate.com/lists/perl-unicode/3314/). 5.20 does have significant Unicode changes, but I am not sure they are related (see [perldelta](https://metacpan.org/pod/distribution/perl/pod/perldelta.pod)). Doing more archeology, it seems that Encode.pm is indeed where the problem started, all the way back in [commit 8005a82](https://github.com/dankogai/p5-encode/commit/8005a82d8aa83024d72b14e66d9eb97d82029eeb#diff-f3330aa405ffb7e3fec2395c1fc953ac) (august 2013), taken from [pull request #11](https://github.com/dankogai/p5-encode/pull/11) which expressively forbids double-decoding, in effect failing like python does in the above example you gave (Perl used to silently succeed instead, a rather big change if you ask me). +> > +> > So stepping back, it seems that this would be a bug in Ikiwiki. It could be in any of those places: +> > +> > ~~~~ +> > anarcat@marcos:ikiwiki$ grep -r decode_utf8 IkiWiki* | wc -l +> > 31 +> > ~~~~ +> > +> > Now the fun part is to determine which one should be turned off... or should we duplicate the logic that was removed in decode_utf8, or make a safe_decode_utf8 for ourselves? --[[anarcat]] + +The apache logs yield: + +~~~~ +[Mon Sep 08 16:17:43.995827 2014] [cgi:error] [pid 2609] [client 192.168.0.3:47445] AH01215: Died at /usr/share/perl5/IkiWiki/CGI.pm line 467., referer: http://anarc.at/ikiwiki.cgi?do=edit&page=wishlist +~~~~ + +Interestingly enough, I can't reproduce the bug here (at least in this page). Also, editing the page through git works fine. + +I had put ikiwiki on hold during the last upgrade, so it was upgraded separately. The bug happens both with 3.20140613 and 3.20140831. The major thing that happened today is the upgrade from perl 5.18 to 5.20. Here's the output of `egrep '[0-9] (remove|purge|install|upgrade)' /var/log/dpkg.log | pastebinit -b paste.debian.net` to give an idea of what was upgraded today: + +http://paste.debian.net/plain/119944 + +This is a major bug which should probably be fixed before jessie, yet i can't seem to find a severity statement in reportbug that would justify blocking the release based on this - unless we consider non-english speakers as "most" users (i don't know the demographics well enough). It certainly makes ikiwiki completely unusable for my users that operate on the web interface in french... --[[anarcat]] + +Note that on this one page, i can't even get the textarea to display and i immediately get `Error: Cannot decode string with wide characters at /usr/lib/x86_64-linux-gnu/perl/5.20/Encode.pm line 215`: http://anarc.at/ikiwiki.cgi?do=edit&page=hardware%2Fserver%2Fmarcos. + +Also note that this is the same as [[forum/"Error: cannot decode string with wide characters" on Mageia Linux x86-64 Cauldron]], I believe. The backtrace I get here is: + +~~~~ +Error: Cannot decode string with wide characters at /usr/lib/x86_64-linux-gnu/perl/5.20/Encode.pm line 215. Encode::decode_utf8("**Menu**\x{d}\x{a}\x{d}\x{a} * [[\x{fffd} propos|index]]\x{d}\x{a} * [[Logiciels|software]]"...) +called at /usr/share/perl5/IkiWiki/CGI.pm line 117 IkiWiki::decode_form_utf8(CGI::FormBuilder=HASH(0x2ad63b8)) +called at /usr/share/perl5/IkiWiki/Plugin/editpage.pm line 90 IkiWiki::cgi_editpage(CGI=HASH(0xd514f8), CGI::Session=HASH(0x27797e0)) +called at /usr/share/perl5/IkiWiki/CGI.pm line 443 IkiWiki::__ANON__(CODE(0xfaa460)) +called at /usr/share/perl5/IkiWiki.pm line 2101 IkiWiki::run_hooks("sessioncgi", CODE(0x2520138)) +called at /usr/share/perl5/IkiWiki/CGI.pm line 443 IkiWiki::cgi() +called at /usr/bin/ikiwiki line 192 eval {...} +called at /usr/bin/ikiwiki line 192 IkiWiki::main() +called at /usr/bin/ikiwiki line 231 +~~~~ + +so this would explain the error on cancel, but doesn't explain the weird encoding i get when editing the page... ... + +... and that leads me to this crazy patch which fixes all the above issue, by avoiding double-decoding... go figure that shit out... + +[[!template id=gitbranch branch=anarcat/dev/safe_unicode author="[[anarcat]]"]] + +> [[Looks good to me|users/smcv/ready]] although I'm not sure how valuable +> the `$] < 5.02 || ` test is - I'd be tempted to just call `is_utf8`. --[[smcv]] + +>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/image_rescaling_distorts_with_small_pictures.mdwn b/doc/bugs/image_rescaling_distorts_with_small_pictures.mdwn index a8c8deebf..9ce091e15 100644 --- a/doc/bugs/image_rescaling_distorts_with_small_pictures.mdwn +++ b/doc/bugs/image_rescaling_distorts_with_small_pictures.mdwn @@ -41,4 +41,11 @@ If you use the rescaling feature of the directive [[ikiwiki/directive/img/]] wit >>> remains in read-entire-file mode afterwards. To avoid odd side-effects, >>> I would suggest using `readfile()` like `t/trail.t` does. >>> +>>> [[!template id=gitbranch branch=smcv/ready/imgforpdf-and-more author="[[chrysn]], [[smcv]]" + browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/imgforpdf-and-more]] +>>> I've used `readfile()` (but not done anything about the ImageMagick file type) +>>> in my copy of the branch. +>>> >>> --[[smcv]] + +>>>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/linkmap_displays_underscore_escapes.mdwn b/doc/bugs/linkmap_displays_underscore_escapes.mdwn index 14164d076..16080358b 100644 --- a/doc/bugs/linkmap_displays_underscore_escapes.mdwn +++ b/doc/bugs/linkmap_displays_underscore_escapes.mdwn @@ -33,3 +33,5 @@ the patch is stored in [[the patch.pl]] as created by git-format-patch, and can be pulled from the abovementioned branch. > update 2014-06-29: branch still merges cleanly and works. --[[chrysn]] + +>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn b/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn index ad52d780a..bae331f64 100644 --- a/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn +++ b/doc/bugs/listdirectives_doesn__39__t_register_a_link.mdwn @@ -112,3 +112,34 @@ The [[ikiwiki/directive/listdirectives]]` directive doesn't register a link betw >>>>> it doesn't inline. That's never going to end well :-) --[[smcv]] >>>>>> We have to differentiate between what users of ikiwiki consider first class links and what internally is happening. For the user any link contributing to the structured access tree is first class. The code on the other hand has to differentiate between the static links, then generated links, then orphan links. Three "passes", even your proposed solution could be seen as adding another pass since the orphan plugin has to run after all the plugins generating (first class user) links. -- [[holger]] +>>>>>>> I think the difference between your point of view, and what ikiwiki +>>>>>>> currently implements / what its design is geared towards, is this: +>>>>>>> ikiwiki says A links to B if the *source code* of A contains an +>>>>>>> explicit link to B. You say A links to B if the *compiled HTML* +>>>>>>> of A contains a link to B. +>>>>>>> +>>>>>>> Would you agree with that characterization? +>>>>>>> +>>>>>>> I suspect that "link in the source code" may be the more useful concept +>>>>>>> when using links for backlinks (I think the original implementation is +>>>>>>> ) and as pseudo-tags +>>>>>>> (). The fact that this is what +>>>>>>> `link()` and `backlink()` mean could be better-documented: it's +>>>>>>> entirely possible that the author of their documentation (Joey?) +>>>>>>> thought it was obvious that that's what they mean, because they +>>>>>>> were coming from a compiler/source-code mindset. +>>>>>>> +>>>>>>> Also, backlinks become rather all-engulfing if their presence in +>>>>>>> the compiled output counts as a link, since after a render pass, they +>>>>>>> would all become bidirectional; and as I noted previously, if pagespecs +>>>>>>> can match by linkedness (which we want) and plugins can generate lists +>>>>>>> of links according to pagespecs (which we also want), then links in the +>>>>>>> compiled output can certainly get into [[!wikipedia Russell's paradox]]-like +>>>>>>> situations, such as the page that links to every page to which it +>>>>>>> does not link. +>>>>>>> +>>>>>>> For the special case of deciding what is orphaned, sure, it's the +>>>>>>> compiled HTML that is the more relevant thing; +>>>>>>> that's why I talked about "reachability" rather than "links". +>>>>>>> +>>>>>>> --[[smcv]] diff --git a/doc/bugs/openid_login_fails_wirth_Could_not_determine_ID_provider_from_URL.mdwn b/doc/bugs/openid_login_fails_wirth_Could_not_determine_ID_provider_from_URL.mdwn index ec22f0c78..073c10d14 100644 --- a/doc/bugs/openid_login_fails_wirth_Could_not_determine_ID_provider_from_URL.mdwn +++ b/doc/bugs/openid_login_fails_wirth_Could_not_determine_ID_provider_from_URL.mdwn @@ -4,6 +4,37 @@ On some ikiwikis that I run, I get the following error on OpenID logins: > Is this fixed now that [[!debbug 738493]] has been fixed? --[[smcv]] +> > No, it isn't. I still get: `no_identity_server: Could not determine ID provider from URL.` from the latest ikiwiki in jessie (3.20140831), with liblwpx-paranoidagent-perl 1.10-3. Debugging tells me it's still related to the `500 Can't verify SSL peers without knowing which Certificate Authorities to trust` error, so probably because `Mozilla::CA` is not packaged ([[!debbug 702124]]). I still had to apply the patch to disable SSL verification at the end of this file. However, setting `$ENV{PERL_LWP_SSL_CA_PATH} = '/etc/ssl/certs';` seems to work now, so the following dumb patch works: +> > +> > ~~~~ +> > --- /usr/bin/ikiwiki.orig 2014-09-08 15:48:35.715868902 -0400 +> > +++ /usr/bin/ikiwiki 2014-09-08 15:50:29.666779878 -0400 +> > @@ -225,4 +225,5 @@ +> > } +> > } +> > +> > +$ENV{PERL_LWP_SSL_CA_PATH} = '/etc/ssl/certs'; +> > main; +> > ~~~~ +> > +> > may not be the best place to fiddle around with this, but then again it makes sense that it applies to the whole program. it should probably be reported upstream as well. also in my git repo. -- [[anarcat]] +> > +> > > This seems Debian-specific. I would be inclined to consider this to be +> > > a packaging/system-integration (i.e. non-upstream) bug in +> > > `liblwpx-paranoidagent-perl` rather than an upstream bug in IkiWiki; +> > > it certainly seems inappropriate to put this Debian-specific path +> > > in upstream IkiWiki. If it can't be fixed in LWPX::ParanoidAgent for +> > > whatever reason, applying it via some sort of sed in ikiwiki's +> > > `debian/rules` might be more reasonable? --[[smcv]] +> > > +> > > > by "upstream", i did mean `liblwpx-paranoidagent-perl`. so yeah, maybe this should be punted back into that package's court again. :( --[[anarcat]] +> > > > +> > > > done, by bumping the severity of [[!debbug 744404]] to release-criticial. --[[anarcat]] +> > > > +> > > > > ooh cool, the bug was fixed already with an upload, so this should probably be considered [[done]] at this point, even without the patch below! great! -- [[anarcat]] + +[[!template id=gitbranch branch=anarcat/dev/ssl_ca_path author="[[anarcat]]"]] + I seem recall having that error before, and fixing it, but it always seems to come back and I forget how to fix it. So I'll just open this bug and document it if i can figure it out... -- [[users/anarcat]] The Perl module manual says: @@ -157,18 +188,13 @@ Workaround - disable error checking: > My only workaround for now was to fix `PERL_LWP_SSL_VERIFY_HOSTNAME` to 0 directly in `ikiwiki` :-( -- [[users/bbb]] ~~~~ -*** /home/bruno/opt/ikiwiki/bin/ikiwiki.bad 2014-04-17 15:41:38.868972152 +0200 ---- /home/bruno/opt/ikiwiki/bin/ikiwiki 2014-04-17 15:04:56.524996905 +0200 -*************** sub main () { -*** 226,229 **** - } - } - -! main; ---- 226,229 ---- +--- /usr/bin/ikiwiki.orig 2014-09-08 15:48:35.715868902 -0400 ++++ /usr/bin/ikiwiki 2014-09-08 15:48:38.895947911 -0400 +@@ -225,4 +225,5 @@ } - } - -! $ENV{PERL_LWP_SSL_VERIFY_HOSTNAME} = 0 ; main; + } + ++$ENV{PERL_LWP_SSL_VERIFY_HOSTNAME} = 0; + main; ~~~~ diff --git a/doc/bugs/possible_to_post_comments_that_will_not_be_displayed.mdwn b/doc/bugs/possible_to_post_comments_that_will_not_be_displayed.mdwn index bb6cd17d3..83d662cbf 100644 --- a/doc/bugs/possible_to_post_comments_that_will_not_be_displayed.mdwn +++ b/doc/bugs/possible_to_post_comments_that_will_not_be_displayed.mdwn @@ -30,3 +30,5 @@ to comments_pagespec && !comments_closed_pagespec && check_canedit --[[smcv]] + +> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/pythonproxy-utf8_again.mdwn b/doc/bugs/pythonproxy-utf8_again.mdwn index fa702a22c..f068782b4 100644 --- a/doc/bugs/pythonproxy-utf8_again.mdwn +++ b/doc/bugs/pythonproxy-utf8_again.mdwn @@ -1,4 +1,6 @@ [[!template id=gitbranch branch=chrysn/more-proxy-utf8-fail author="[[chrysn]]"]] +[[!template id=gitbranch author="[[chrysn]], [[smcv]]" branch=smcv/ready/more-proxy-utf8-fail + browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/more-proxy-utf8-fail]] the recently introduced fixes for [[crashes in the python proxy even if disabled]] caused the typical python2 implicit conversion failures ("'ascii' codec @@ -52,3 +54,17 @@ patch. >>> a `unicode`. (i'd happily ditch python2 and port all plugins to python3, >>> where this is all easier, but my [[todo/vCard rendering]] still uses an >>> ancient module.) --[[chrysn]] + +>>>> You were right about this, `encode` is appropriate to go from `unicode` +>>>> to `str` under Python 2. However, Python 3 is still broken. +>>>> +>>>> My `ready/more-proxy-utf8-fail` branch, based on yours, +>>>> [[fixes the `rst` test when run under Python 3|bugs/rst_plugin_hangs_when_used_with_Python_3]] +>>>> and hopefully also fixes this one. Please check that it still +>>>> fixes your test-case too. +>>>> +>>>> Joey, I think this is [[ready for merge|users/smcv/ready]] even if it +>>>> doesn't fix chrysn's bug - it does fix Python 3 support +>>>> in general. --[[smcv]] + +>>>>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/redirect.mdwn b/doc/bugs/redirect.mdwn deleted file mode 100644 index 6296c3df1..000000000 --- a/doc/bugs/redirect.mdwn +++ /dev/null @@ -1,26 +0,0 @@ -I suppose this isn't technically a bug, but whetever. - -I want symbolic links to be rendered as HTTP redirects. For example, -if we do this, - - touch foo.mkdwn - ln -s foo.mkdwn bar.mkdwn - git push baz.branchable.com - -then the following command should print 302 - - curl -o /dev/null -s -w "%{http_code}" http://baz.thomaslevine.com/bar/ - -> An interesting idea, but it conflicts somewhat with wanting symlinks to be -> treated as the referenced file when it's safe to do so, which would be -> great for [[todo/git-annex support]], and also good to avoid duplication -> for files in system-wide underlays. -> -> Also, I don't think this is possible without help from the web server -> configuration: for instance, under Apache, I believe the only way to get -> an HTTP 302 redirect is via Apache-specific `.htaccess` files or -> system-level Apache configuration. -> -> In current ikiwiki, you can get a broadly similar effect by either -> using \[[!meta redir=foo]] (which does a HTML `` redirect) -> or reconfiguring the web server. --[[smcv]] diff --git a/doc/bugs/rst_fails_on_file_containing_only_a_number.mdwn b/doc/bugs/rst_fails_on_file_containing_only_a_number.mdwn index 99e46aac9..57e0cf6aa 100644 --- a/doc/bugs/rst_fails_on_file_containing_only_a_number.mdwn +++ b/doc/bugs/rst_fails_on_file_containing_only_a_number.mdwn @@ -27,3 +27,5 @@ throwing code..): > On second thought, this was a bug in ikiwiki, it should be transmitting > that as a string. Fixed in external.pm --[[Joey]] + +>> [[done]] a while ago, then. I've added a regression test now. --[[smcv]] diff --git a/doc/bugs/rst_plugin_fails_with___34__uncaught_exception:___39__ascii__39___codec_can__39__t_encode_character__34__.mdwn b/doc/bugs/rst_plugin_fails_with___34__uncaught_exception:___39__ascii__39___codec_can__39__t_encode_character__34__.mdwn index ff2a41c07..1893e7089 100644 --- a/doc/bugs/rst_plugin_fails_with___34__uncaught_exception:___39__ascii__39___codec_can__39__t_encode_character__34__.mdwn +++ b/doc/bugs/rst_plugin_fails_with___34__uncaught_exception:___39__ascii__39___codec_can__39__t_encode_character__34__.mdwn @@ -36,5 +36,5 @@ name, repr(ret))` (which should not hurt since it's a message for debugging purposes only). -> this is fixed in commit [154c4ea9](http://source.ikiwiki.branchable.com/?p=source.git;a=commit;h=154c4ea9e65d033756330a7f8c5c0fa285380bf0) +> this is [[fixed|done]] in commit [154c4ea9](http://source.ikiwiki.branchable.com/?p=source.git;a=commit;h=154c4ea9e65d033756330a7f8c5c0fa285380bf0) > (november 2013), which is included in 3.20140227. --[[chrysn]] diff --git a/doc/bugs/rst_plugin_hangs_when_used_with_Python_3.mdwn b/doc/bugs/rst_plugin_hangs_when_used_with_Python_3.mdwn new file mode 100644 index 000000000..001d990ca --- /dev/null +++ b/doc/bugs/rst_plugin_hangs_when_used_with_Python_3.mdwn @@ -0,0 +1,37 @@ +During ikiwiki make phase the rst process hangs: +[ps output](http://dpaste.com/21TQQKT) +[gdb backtrace 1](http://dpaste.com/0VQBW6D) +[gdb backtrace 1](http://dpaste.com/1VHS88Y) + +working with python 2.7 +[http://dpaste.com/0985A91](http://dpaste.com/0985A91) +not working with python3.3~3.4 +[http://dpaste.com/0ACNK3W](http://dpaste.com/0ACNK3W) + +> Retitled this bug report since it seems to be specific to Python 3. +> +> The `rst` plugin is probably more commonly used with Python 2. +> It seems likely that there is some Python-3-specific bug in `proxy.py`, +> perhaps introduced by [commit 154c4ea + "properly encode and decode from/to utf8 when sending rpc to ikiwiki"]( +http://source.ikiwiki.branchable.com/?p=source.git;a=commitdiff;h=154c4ea9e65d033756330a7f8c5c0fa285380bf0). +> +> I can reproduce this on Debian by installing `python3-docutils` +> and changing the first line of `plugins/proxy.py`, the first +> line of `plugins/pythondemo`, the first line of `plugins/rst` +> and the `system()` call in `t/rst.t` to use `python3` instead +> of `python`. --[[smcv]] + +looks like the problem is in proxy.py +ml = _IkiWikiExtPluginXMLRPCHandler._read(in_fd).decode('utf8') + +without decode('utf8') is working + +> That call was introduced +> [[to fix a bug under Python 2|bugs/crashes_in_the_python_proxy_even_if_disabled]] +> so it cannot just be removed, but I've put a proposed branch on +> [[this related bug|bugs/pythonproxy-utf8_again]]. [[!tag patch]] --smcv + +tested and fixed with patch [http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/38bd51bc1bab0cabd97dfe3cb598220a2c02550a](http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/38bd51bc1bab0cabd97dfe3cb598220a2c02550a) and patch [http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/81506fae8a6d5360f6d830b0e07190e60a7efd1c](http://git.pseudorandom.co.uk/smcv/ikiwiki.git/commitdiff/81506fae8a6d5360f6d830b0e07190e60a7efd1c) + +> [[done]], pending release --[[smcv]] diff --git a/doc/bugs/svg_and_pdf_conversion_fails.mdwn b/doc/bugs/svg_and_pdf_conversion_fails.mdwn index ac18fe8aa..9910959f9 100644 --- a/doc/bugs/svg_and_pdf_conversion_fails.mdwn +++ b/doc/bugs/svg_and_pdf_conversion_fails.mdwn @@ -56,3 +56,5 @@ should be safe for inclusion. >>> which works, so my biggest fear about the all-to-png change is unwarranted. >>> i'll have a look at that some time, but i think as things are, this is >>> ready now, please review again. --[[chrysn]] + +>>>> [[merged|done]] --[[smcv]] diff --git a/doc/bugs/trails_depend_on_everything.mdwn b/doc/bugs/trails_depend_on_everything.mdwn index babb1e361..8e9edcf43 100644 --- a/doc/bugs/trails_depend_on_everything.mdwn +++ b/doc/bugs/trails_depend_on_everything.mdwn @@ -12,3 +12,5 @@ list of pages. They should just sort the pages instead; they'll already have all the dependencies they need. My branch adds `IkiWiki::sort_pages` but does not make it plugin API just yet. --[[smcv]] + +> [[merged|done]] --[[smcv]] diff --git a/doc/forum/Using_reverse_proxy__59___base_URL_is_http_instead_of_https/comment_5_674f56100c0682eba36cc5327fbdae4a._comment b/doc/forum/Using_reverse_proxy__59___base_URL_is_http_instead_of_https/comment_5_674f56100c0682eba36cc5327fbdae4a._comment new file mode 100644 index 000000000..1546c67a0 --- /dev/null +++ b/doc/forum/Using_reverse_proxy__59___base_URL_is_http_instead_of_https/comment_5_674f56100c0682eba36cc5327fbdae4a._comment @@ -0,0 +1,61 @@ +[[!comment format=mdwn + username="https://www.google.com/accounts/o8/id?id=AItOawk6z7Jsfi_XWfzFJNZIjYUcjgrthg4aPUU" + nickname="Alejandro" + subject="Same Trick in Apache" + date="2014-09-10T18:58:24Z" + content=""" +I got it working with Apache 2.4 and Virtual Hosts on both HTTP 1.1 and HTTPS (SNI). The procedure is somewhat analogous to the nginx procedure above. So here is my set-up in the hopes will help other avoid this pain. + +## Set-up + + CLIENT <---- HTTPS ----> REVERSE PROXY <---- HTTP ----> IKIWIKI + + +## The HTTP to HTTPS Redirect + +To assure that all your HTTP requests are being redirected to HTTPS I chose to use mod_rewrite because simple Redirect does not pass query parameters. You will want an HTTP VHost that will redirect with something like the one below (notice the subtle ? before query string). **Note: This will NOT re-write ikiwiki's http:// URLs (base tag, etc.)**. For that I use a content filter like you will see below. This HTTP to HTTPS redirect is required though for both security and for the /foo/?updated URI form in this set-up. + +
+
+<VirtualHost *:80>
+    ServerName imass.name
+    RewriteEngine On
+    RewriteCond %{HTTPS} off
+    RewriteRule (.*) https://%{HTTP_HOST}%{REQUEST_URI}?%{QUERY_STRING}
+    ErrorLog /var/log/imass.name-error.log
+    LogLevel warn
+    CustomLog /var/log/imass.name-access.log combined
+</VirtualHost>
+
+
+ +## The SSL Virtual Host + +This part is a bit more tricky. First I am using SNI as I don't care for non-SNI user agents. Second, you need to use a filter that replaces all http:// to https:// before the response is set. Note that this alone won't deal with ?update so you will need the HTTP to HTTPS set-up above anyway. Third, I use HTTP Auth so I don't know if this will work with your particular Auth set-up (although it should IMHO), YMMV: + +
+
+<VirtualHost *:443>
+    ServerName imass.name
+    ProxyHTMLEnable On
+    ProxyHTMLExtended On
+    SSLEngine on
+    SSLCertificateFile XXX
+    SSLCertificateKeyFile XXX
+    SSLCertificateChainFile XXX
+    SSLOptions +StdEnvVars
+    ProxyPreserveHost On
+    ProxyHTMLURLMap http:// https://
+    ProxyPass / http://192.168.101.101/
+    ProxyPassReverse / http://192.168.101.101/
+    LogLevel warn
+    ErrorLog /var/log/imass.name-ssl-error.log
+    TransferLog \"/var/log/imass.name-ssl-access.log\"
+    CustomLog \"/var/log/imass.name-ssl-request.log\" \"%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \\"%r\\" %b\"
+</VirtualHost>
+
+
+ + + +"""]] diff --git a/doc/forum/__34__Error:_cannot_decode_string_with_wide_characters__34___on_Mageia_Linux_x86-64_Cauldron.mdwn b/doc/forum/__34__Error:_cannot_decode_string_with_wide_characters__34___on_Mageia_Linux_x86-64_Cauldron.mdwn index 8f9225968..c5a91be06 100644 --- a/doc/forum/__34__Error:_cannot_decode_string_with_wide_characters__34___on_Mageia_Linux_x86-64_Cauldron.mdwn +++ b/doc/forum/__34__Error:_cannot_decode_string_with_wide_characters__34___on_Mageia_Linux_x86-64_Cauldron.mdwn @@ -18,3 +18,6 @@ Can anyone shed any light on this problem and guide me what I need to do to fix Regards, -- [Shlomi Fish](http://www.shlomifish.org/) + +> [[Merged anarcat's fix for +this|bugs/garbled non-ascii characters in body in web interface]] --[[smcv]] diff --git a/doc/forum/__34__Error:_cannot_decode_string_with_wide_characters__34___on_Mageia_Linux_x86-64_Cauldron/comment_1_abf7ec7c378ab0908685d72d159e9fd2._comment b/doc/forum/__34__Error:_cannot_decode_string_with_wide_characters__34___on_Mageia_Linux_x86-64_Cauldron/comment_1_abf7ec7c378ab0908685d72d159e9fd2._comment new file mode 100644 index 000000000..8b066b391 --- /dev/null +++ b/doc/forum/__34__Error:_cannot_decode_string_with_wide_characters__34___on_Mageia_Linux_x86-64_Cauldron/comment_1_abf7ec7c378ab0908685d72d159e9fd2._comment @@ -0,0 +1,8 @@ +[[!comment format=mdwn + username="https://id.koumbit.net/anarcat" + ip="72.0.72.144" + subject="comment 1" + date="2014-09-10T03:00:22Z" + content=""" +i had a similar issue, reported in [[bugs/garbled_non-ascii_characters_in_body_in_web_interface]]. +"""]] diff --git a/doc/git.mdwn b/doc/git.mdwn index e71fa57d7..55cc9c16e 100644 --- a/doc/git.mdwn +++ b/doc/git.mdwn @@ -81,6 +81,7 @@ think about merging them. This is recommended. :-) * [[cbaines]] `git://git.cbaines.net/ikiwiki` * [[mhameed]] `git://github.com/mhameed/ikiwiki.git` * [[spalax]] `git://github.com/paternal/ikiwiki.git` ([[browse|https://github.com/paternal/ikiwiki]]) +* [[jcflack]] `git://github.com/jcflack/ikiwiki.git` ## branches diff --git a/doc/ikiwiki/directive/edittemplate.mdwn b/doc/ikiwiki/directive/edittemplate.mdwn index a6f301dd3..6269f5dd8 100644 --- a/doc/ikiwiki/directive/edittemplate.mdwn +++ b/doc/ikiwiki/directive/edittemplate.mdwn @@ -27,8 +27,20 @@ something like: Details: The template page can also contain [[!cpan HTML::Template]] directives, -like other ikiwiki [[templates]]. Currently only one variable is -set: `` is replaced with the name of the page being -created. +like other ikiwiki [[templates]]. + +These variables might be set: + +* `` is replaced with the name of the page being + created. + +* `` is replaced with a version 4 (random) UUID + suitable for use in `\[[!meta guid="urn:uuid:"]]`. + (Requires the `UUID::Tiny` Perl module if not running on Linux.) + +* `` is replaced with the current (template generation) + time using a fixed format (RFC 3339, `%Y-%m-%dT%H:%M:%SZ`), + suitable for use in `\[[!meta date=""]]` + (see [[meta]]) or `\[[!date ""]]` (see [[date]]). [[!meta robots="noindex, follow"]] diff --git a/doc/ikiwiki/directive/img.mdwn b/doc/ikiwiki/directive/img.mdwn index cda62b58f..08d158987 100644 --- a/doc/ikiwiki/directive/img.mdwn +++ b/doc/ikiwiki/directive/img.mdwn @@ -28,6 +28,9 @@ to the full size version. By default it does; set "link=somepage" to link to another page instead, or "link=no" to disable the link, or "link=http://url" to link to a given url. +The `pagenumber` parameter selects which of multiple images should be rendered; +this is relevant mainly for GIF and PDF source images. + You can also set default values that will be applied to all later images on the page, unless overridden. Useful when including many images on a page. diff --git a/doc/ikiwiki/markdown/discussion.mdwn b/doc/ikiwiki/markdown/discussion.mdwn new file mode 100644 index 000000000..7c0d75858 --- /dev/null +++ b/doc/ikiwiki/markdown/discussion.mdwn @@ -0,0 +1,48 @@ +There is an ongoing [effort to standardise Markdown][sm]; I think it would be nice to check whether this implementation is compliant with it. + +[sm]: http://standardmarkdown.com/ + +http://standardmarkdown.com/ + +> IkiWiki's [[plugins/mdwn]] plugin does not contain an implementation +> of Markdown: it relies on external libraries. It can currently use +> any of these, most-preferred first: +> +> * [[!cpan Text::MultiMarkdown]], only if explicitly requested via +> `$config{multimarkdown}` +> * [[!cpan Text::Markdown::Discount]], if not explicitly disabled +> via `$config{nodiscount}` +> * [[!cpan Text::Markdown]] +> * [[!cpan Markdown]] +> * `/usr/bin/markdown` +> +> In practice, Discount is the implementation pulled in by the +> Debian package dependencies, and (I suspect) the most +> commonly used with IkiWiki. +> +> If the selected external library (whatever it happens to be) +> complies with a particular interpretation of Markdown, then +> IkiWiki will too. If not, it won't. The only influence +> IkiWiki has over its level of compliance with a particular +> interpretation is in how we choose which external library +> we prefer. +> +> As such, if you want IkiWiki to change its interpretation of +> Markdown, the way to do that is to either change Discount's +> interpretation of Markdown, or contribute a patch to make +> `mdwn.pm` prefer a different (and presumably "more compliant") +> Markdown implementation. +> +> IkiWiki has one syntax extension beyond Markdown, which is +> that text enclosed in double-square-brackets is an IkiWiki +> [[ikiwiki/wikilink]] or [[ikiwiki/directive]]. This applies +> to any markup language used with IkiWiki, not just Markdown. +> +> (There also doesn't seem to be any consensus that labelling +> any particular fork of Markdown as "standard" can make it the +> truth, or that this particular fork is the Correct™ fork and not +> just ; but that's between the authors of +> Markdown implementations and those who want to standardize +> Markdown, and it isn't IkiWiki's job to police that.) +> +> --[[smcv]] diff --git a/doc/news/openid/discussion.mdwn b/doc/news/openid/discussion.mdwn index d8c83f022..e1a7ef075 100644 --- a/doc/news/openid/discussion.mdwn +++ b/doc/news/openid/discussion.mdwn @@ -121,3 +121,8 @@ I'm worried, at least until the issue is cleared. This poll is now 8 years old. Do we have enough data to make a decision? Can we consider adding `open=no` to the poll? -- [[Jon]] + +---- + +I vote against disabling password logins until my OpenID will work on [ikiwiki.info](/)! +See [[/plugins/openid/troubleshooting]]. -- Chap diff --git a/doc/news/version_3.20140102.mdwn b/doc/news/version_3.20140102.mdwn deleted file mode 100644 index e10164625..000000000 --- a/doc/news/version_3.20140102.mdwn +++ /dev/null @@ -1,24 +0,0 @@ -ikiwiki 3.20140102 released with [[!toggle text="these changes"]] -[[!toggleable text=""" - * aggregate: Improve display of post author. - * poll: Fix behavior of poll buttons when inlined. - * Fixed unncessary tight loop hash copy in saveindex where a pointer - can be used instead. Can speed up refreshes by nearly 50% in some - circumstances. - * Optimized loadindex by caching the page name in the index. - * Added only\_committed\_changes config setting, which speeds up wiki - refresh by querying git to find the files that were changed, rather - than looking at the work tree. Not enabled by default as it can - break some setups where not all files get committed to git. - * comments: Write pending moderation comments to the transient underlay - to avoid conflict with only\_committed\_changes. - * search: Added google\_search option, which makes it search google - rather than using the internal xapain database. - (googlesearch plugin is too hard to turn on when xapain databases - corrupt themselves, which happens all too frequently). - * osm: Remove invalid use of charset on embedded javascript tags. - Closes: #[731197](http://bugs.debian.org/731197) - * style.css: Add compatibility definitions for more block-level - html5 elements. Closes: #[731199](http://bugs.debian.org/731199) - * aggregrate: Fix several bugs in handling of empty and colliding - titles when generating filenames."""]] \ No newline at end of file diff --git a/doc/news/version_3.20140831.mdwn b/doc/news/version_3.20140831.mdwn new file mode 100644 index 000000000..c8ea1a237 --- /dev/null +++ b/doc/news/version_3.20140831.mdwn @@ -0,0 +1,3 @@ +ikiwiki 3.20140831 released with [[!toggle text="these changes"]] +[[!toggleable text=""" + * Make --no-gettime work in initial build. Closes: #[755075](http://bugs.debian.org/755075)"""]] \ No newline at end of file diff --git a/doc/plugins/openid.mdwn b/doc/plugins/openid.mdwn index d56d1a396..82c23fc4f 100644 --- a/doc/plugins/openid.mdwn +++ b/doc/plugins/openid.mdwn @@ -30,3 +30,8 @@ certain setups. to be used when doing openid authentication. The `openid_cgiurl` must point to an ikiwiki [[CGI]], and it will need to match the `openid_realm` to work. + +## troubleshooting + +See [[plugins/openid/troubleshooting]] for a number of issues that may +need to be addressed when setting up ikiwiki to accept OpenID logins reliably. diff --git a/doc/plugins/openid/troubleshooting.mdwn b/doc/plugins/openid/troubleshooting.mdwn new file mode 100644 index 000000000..c59f7346a --- /dev/null +++ b/doc/plugins/openid/troubleshooting.mdwn @@ -0,0 +1,197 @@ +**TL;DR** + +[[!toc levels=3]] + +# An odyssey through lots of things that have to be right before OpenID works + +Having just (at last) made an ikiwiki installation accept my +OpenID, I have learned many of the things that may have to be checked +when getting the [[plugins/openid]] plugin to work. (These are probably +the reasons why [ikiwiki.info](/) itself won't accept my OpenID!) + +Just to describe my OpenID setup a bit (and why it makes a good stress-test +for the OpenID plugin :). + +I'm using my personal home page URL as my OpenID. My page lives at +a shared-hosting service I have hired. It contains links that delegate +my OpenID processing to [indieauth.com](https://indieauth.com). + +IndieAuth, in turn, uses +[rel-me authentication](http://microformats.org/wiki/RelMeAuth) to find +an [OAuth](http://microformats.org/wiki/OAuth) provider that can authenticate +me. (At present, I am using [github](http://github.com) for that, which +is an OAuth provider but not an OpenID provider, so the gatewaying provided +by IndieAuth solves that problem.) As far as ikiwiki is concerned, +IndieAuth is my OpenID provider; the details beyond that are transparent. + +So, what were the various issues I had to sort out before my first successful +login with the [[plugins/openid]] plugin? + +## no_identity_server: Could not determine ID provider from URL. + +This is the message [ikiwiki.info](/) shows as soon as I enter my home URL +as an OpenID. It is also the first one I got on my own ikiwiki installation. + +### various possible causes ... + +There could be lots of causes. Maybe: + +* the offered OpenID is an `https:` URL and there is an issue in checking + the certificate, so the page can't be retrieved? +* the page can be retrieved, but it isn't well-formed HTML and the library + can't parse it for the needed OpenID links? +* ...? + +### make a luckier setting of useragent ?! + +In my case, it was none of the above. It turns out my shared-hosting provider +has a rule that refuses requests with `User-Agent: libwww-perl/6.03` (!!). +This is the sort of problem that's really hard to anticipate or plan around. +I could fix it (_for this case!_) by changing `useragent:` in `ikiwiki.setup` +to a different string that my goofy provider lets through. + +__Recommendation:__ set `useragent:` in `ikiwiki.setup` to some +unlikely-to-be-blacklisted value. I can't guess what the best +unlikely-to-be-blacklisted value is; if there is one, it's probably the +next one all the rude bots will be using anyway, and some goofy provider +like mine will blacklist it. + +## Error: OpenID failure: naive_verify_failed_network: Could not contact ID provider to verify response. + +Again, this could have various causes. It was helpful to bump the debug level +and get some logging, to see: + + 500 Can't connect to indieauth.com:443 (Net::SSL from Crypt-SSLeay can't + verify hostnames; either install IO::Socket::SSL or turn off verification + by setting the PERL_LWP_SSL_VERIFY_HOSTNAME environment variable to 0) + +I don't belong to the camp that solves every verification problem by turning +verification off, so this meant finding out how to get verification to be done. +It turns out there are two different Perl modules that can be used for SSL: + +* `IO::Socket::SSL` (verifies hostnames) +* `Net::SSL` (_does not_ verify hostnames) + +Both were installed on my hosted server. How was Perl deciding which one +to use? + +### set `PERL_NET_HTTPS_SSL_SOCKET_CLASS` appropriately + +It turns out +[there's an environment variable](https://rt.cpan.org/Public/Bug/Display.html?id=71599). +So just set `PERL_NET_HTTPS_SSL_SOCKET_CLASS` to `IO::Socket::SSL` and the +right module gets used, right? + +[Wrong](https://github.com/csirtgadgets/LWPx-ParanoidAgent/commit/fed6f7d7df8619df0754e8883cfad2ac15703a38#diff-2). +That change was made to `ParanoidAgent.pm` back in November 2013 because of an +unrelated [bug](https://github.com/csirtgadgets/LWPx-ParanoidAgent/issues/4) +in `IO::Socket::SSL`. Essentially, _hmm, something goes wrong in +`IO::Socket::SSL` when reading certain large documents, so we'll fix it by +forcing the use of `Net::SSL` instead (the one that never verifies hostnames!), +no matter what the admin has set `PERL_NET_HTTPS_SSL_SOCKET_CLASS` to!_ + +### undo change that broke `PERL_NET_HTTPS_SSL_SOCKET_CLASS` + +Plenty of [comments](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=738493) +quickly appeared about how good an idea that wasn't, and it was corrected in +June 2014 with [one commit](https://github.com/csirtgadgets/LWPx-ParanoidAgent/commit/a92ed8f45834a6167ff62d3e7330bb066b307a35) +to fix the original reading-long-documents issue in `IO::Socket::SSL` and +[another commit](https://github.com/csirtgadgets/LWPx-ParanoidAgent/commit/815c691ad5554a219769a90ca5f4001ae22a4019) +that reverts the forcing of `Net::SSL` no matter how the environment is set. + +Unfortunately, there isn't a release in CPAN yet that includes those two +commits, but they are only a few lines to edit into your own locally-installed +module. + +## Still naive_verify_failed_network, new improved reason + + 500 Can't connect to indieauth.com:443 (SSL connect attempt failed + with unknown error error:14090086:SSL + routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed) + +Yay, at least it's trying to verify! Now why can't it verify IndieAuth's +certificate? + +[Here's why](https://tools.ietf.org/html/rfc6066#section-3). As it turns out, +[indieauth.com](https://indieauth.com/) is itself a virtual host on a shared +server. If you naively try + + openssl s_client -connect indieauth.com:443 + +you get back a certificate for [indieweb.org](https://indieweb.org/) +instead, so the hostname won't verify. If you explicitly indicate what server +name you're connecting to: + + openssl s_client -connect indieauth.com:443 -servername indieauth.com + +then, magically, the correct certificate comes back. + +### ensure `OpenSSL`, `Net::SSLeay`, `IO::Socket::SSL` new enough for SNI + +If your `openssl` doesn't recognize the `-servername` option, it is too old +to do SNI, and a newer version needs to be built and installed. In fact, +even though SNI support was reportedly backported into OpenSSL 0.9.8f, it will +not be used by `IO::Socket::SSL` unless it is +[1.0 or higher](http://search.cpan.org/~sullr/IO-Socket-SSL-1.998/lib/IO/Socket/SSL.pod#SNI_Support). + +Then a recent `Net::SSLeay` perl module needs to be built and linked against it. + +### Local OpenSSL installation will need certs to trust + +Bear in mind that the OpenSSL distribution doesn't come with a collection +of trusted issuer certs. If a newer version is built and installed locally +(say, on a shared server where the system locations can't be written), it will +need to be given a directory of trusted issuer certs, say by linking to the +system-provided ones. However, a change to the certificate hash algorithm used +for the symlinks in that directory was [reportedly](http://www.cilogon.org/openssl1) +made with OpenSSL 1.0.0. So if the system-provided trusted certificate directory +was set up for an earlier OpenSSL version, all the certificates in it will be +fine but the hash symlinks will be wrong. That can be fixed by linking only the +named certificate files from the system directory into the newly-installed one, +and then running the new version of `c_rehash` there. + +## Still certificate verify failed + +Using [SNI](https://tools.ietf.org/html/rfc6066#section-3)-supporting versions +of `IO::Socket::SSL`, `Net::SSLeay`, and `OpenSSL` doesn't do any good if an +upper layer hasn't passed down the name of the host being connected to so the +SSL layer can SNI for it. + +### ensure that `LWPx::ParanoidAgent` passes server name to SSL layer for SNI + +That was fixed in `LWPx::ParanoidAgent` with +[this commit](https://github.com/csirtgadgets/LWPx-ParanoidAgent/commit/df6df19ccdeeb717c709cccb011af35d3713f546), +which needs to be backported by hand if it hasn't made it into a CPAN release +yet. + +Only that still doesn't end the story, because that hand didn't know what +[this hand](https://github.com/noxxi/p5-io-socket-ssl/commit/4f83a3cd85458bd2141f0a9f22f787174d51d587#diff-1) +was doing. What good is passing the name in +`PeerHost` if the SSL code looks in `PeerAddr` first ... and then, if that +doesn't match a regex for a hostname, decides you didn't supply one at all, +without even looking at `PeerHost`? + +Happily, is is possible to assign a key that _explicitly_ supplies the +server name for SNI: + + --- LWPx/Protocol/http_paranoid.pm 2014-09-08 03:33:00.000000000 -0400 + +++ LWPx/Protocol/http_paranoid.pm 2014-09-08 03:33:27.000000000 -0400 + @@ -73,6 +73,7 @@ + close($el); + $sock = $self->socket_class->new(PeerAddr => $addr, + PeerHost => $host, + + SSL_hostname => $host, + PeerPort => $port, + Proto => 'tcp', + Timeout => $conn_timeout, + +... not submitted upstream yet, so needs to be applied by hand. + +# Success!! + +And with that, ladies and gents, I got my first successful OpenID login! +I'm pretty sure that if the same fixes can be applied to +[ikiwiki.info](/) itself, a wider range of OpenID logins (like mine, for +example :) will work here too. + +-- Chap diff --git a/doc/sandbox.mdwn b/doc/sandbox.mdwn index a53eeb3ae..f9fa321b3 100644 --- a/doc/sandbox.mdwn +++ b/doc/sandbox.mdwn @@ -1,3 +1,4 @@ + This is the [[SandBox]], a page anyone can edit to try out ikiwiki (version [[!version ]]). @@ -100,7 +101,7 @@ This is an email link: Send Mail

-This is some preformatted text. Each line is proceeded by four spaces. +What follows is some preformatted text. Each line is proceeded by four spaces. Test diff --git a/doc/shortcuts.mdwn b/doc/shortcuts.mdwn index b4f6d8ef4..ca529c296 100644 --- a/doc/shortcuts.mdwn +++ b/doc/shortcuts.mdwn @@ -27,7 +27,7 @@ This page controls what shortcut links the wiki supports. * [[!shortcut name=debrt url="https://rt.debian.org/Ticket/Display.html?id=%s"]] * [[!shortcut name=debss url="http://snapshot.debian.org/package/%s/"]] * Usage: `\[[!debss package]]` or `\[[!debss package/version]]`. See for details. -* [[!shortcut name=debwiki url="https://wiki.debian.org/%s"]] +* [[!shortcut name=debwiki url="https://wiki.debian.org/%S"]] * [[!shortcut name=fdobug url="https://bugs.freedesktop.org/show_bug.cgi?id=%s" desc="freedesktop.org bug #%s"]] * [[!shortcut name=fdolist url="http://lists.freedesktop.org/mailman/listinfo/%s" desc="%s@lists.freedesktop.org"]] * [[!shortcut name=gnomebug url="https://bugzilla.gnome.org/show_bug.cgi?id=%s" desc="GNOME bug #%s"]] @@ -55,7 +55,7 @@ This page controls what shortcut links the wiki supports. * [[!shortcut name=whois url="http://reports.internic.net/cgi/whois?whois_nic=%s&type=domain"]] * [[!shortcut name=cve url="https://cve.mitre.org/cgi-bin/cvename.cgi?name=%s"]] * [[!shortcut name=flickr url="https://secure.flickr.com/photos/%s"]] -* [[!shortcut name=man url="http://linux.die.net/man/%s"]] +* [[!shortcut name=man url="http://manpages.debian.org/%s"]] * [[!shortcut name=ohloh url="https://www.ohloh.net/p/%s"]] * [[!shortcut name=cpanrt url="https://rt.cpan.org/Ticket/Display.html?id=%s" desc="CPAN RT#%s"]] * [[!shortcut name=novellbug url="https://bugzilla.novell.com/show_bug.cgi?id=%s" desc="bug %s"]] diff --git a/doc/tips/Hosting_Ikiwiki_and_master_git_repository_on_different_machines.mdwn b/doc/tips/Hosting_Ikiwiki_and_master_git_repository_on_different_machines.mdwn index 6bbaf3e6e..e6277d338 100644 --- a/doc/tips/Hosting_Ikiwiki_and_master_git_repository_on_different_machines.mdwn +++ b/doc/tips/Hosting_Ikiwiki_and_master_git_repository_on_different_machines.mdwn @@ -38,6 +38,8 @@ it on a remote machine, and tell Ikiwiki to use it instead of its local one. We will also ensure that the wiki is rendered whenever a commit is done to the git repository. +[[!img separate-web-git-servers.svg size=400x]] + # Conventions - We are building a wiki called *SITE*. diff --git a/doc/tips/Hosting_Ikiwiki_and_master_git_repository_on_different_machines/separate-web-git-servers.svg b/doc/tips/Hosting_Ikiwiki_and_master_git_repository_on_different_machines/separate-web-git-servers.svg new file mode 100644 index 000000000..b6095a2b7 --- /dev/null +++ b/doc/tips/Hosting_Ikiwiki_and_master_git_repository_on_different_machines/separate-web-git-servers.svg @@ -0,0 +1,783 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + working clones + repository + srcdir + destdir + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <html> + + + + + + + + + + + + + + + ikiwiki.cgi + + post-updatehook + ikiwiki.cgipush + .git + .git + .git + .git + .git + web-sideedit + automaticrebuild + gitpull + gitpush + + pingee + + + + diff --git a/doc/tips/distributed_wikis.mdwn b/doc/tips/distributed_wikis.mdwn index 1d8a8f5a9..2547a2e9f 100644 --- a/doc/tips/distributed_wikis.mdwn +++ b/doc/tips/distributed_wikis.mdwn @@ -37,6 +37,8 @@ the git server. ### Separate webserver and git repository, the git srcdir being hosted on the webserver +[[!img Hosting_Ikiwiki_and_master_git_repository_on_different_machines/separate-web-git-servers.svg size=400x]] + This is the configuration described in [[tips/Hosting_Ikiwiki_and_master_git_repository_on_different_machines]]. One server hosts the web server (and the [[Ikiwiki cgi|cgi]]) and the git source dir; a second server hosts the git bare repository. This can be used when you have very limited access to the git server. diff --git a/doc/todo/Option_linktext_for_pagestats_directive.mdwn b/doc/todo/Option_linktext_for_pagestats_directive.mdwn index 8bbb7c2cf..ab5eb229b 100644 --- a/doc/todo/Option_linktext_for_pagestats_directive.mdwn +++ b/doc/todo/Option_linktext_for_pagestats_directive.mdwn @@ -194,3 +194,10 @@ Regards, > like `limit` (by analogy with SQL) or `max` as the canonical name for the > "number of things to match" parameter, at which point a non-numeric > `show` could mean this thing. --[[smcv]] + +>> [[!template id=gitbranch branch=smcv/pagestats-show +author="[[Louis|spalax]], [[smcv]]" +browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/pagestats-show]] +>> Here's a branch. It depends on my `ready/limit` branch +>> from [[todo/pick a new canonical name for equivalent of SQL limit]]. +>> --[[smcv]] diff --git a/doc/todo/calendar_autocreate.mdwn b/doc/todo/calendar_autocreate.mdwn index fc0b5c0c2..2a7350b79 100644 --- a/doc/todo/calendar_autocreate.mdwn +++ b/doc/todo/calendar_autocreate.mdwn @@ -177,3 +177,49 @@ sub gencalendaryear { > --[[smcv]] > > > Thank you for this review. -- [[Louis|spalax]] + +--- + +[[smcv]], can you please go on reviewing this? + +> I don't think I'm really the reviewer you want, since I don't have commit +> access (as you might be able to tell from the number of pending branches +> I have)... but nobody with commit access seems to be available to do +> reviews at the moment, so I'm probably the best you're going to get. +> +> + 0 0 * * * ikiwiki ~/ikiwiki.setup --refresh +> +> I think that should be `ikiwiki --setup ~/ikiwiki.setup --refresh` +> +> The indentation of some of the new code in `IkiWiki/Plugin/calendar.pm` +> is weird. Please use one hard tab (U+0009) per indent step: you seem +> to have used a mixture of one hard tab per indent or two spaces +> per indent, which looks bizarre for anyone whose tab size is not +> 2 spaces. +> +> + return unless $config{calendar_autocreate}; +> +> This is checked in `gencalendaryear` but not in `gencalendarmonth`. +> Shouldn't `gencalendarmonth` do it too? Alternatively, do the check +> in `scan`, which calls `gencalendarmonth` directly. +> +> + my $year = $date[5] + 1900; +> +> You calculate this, but you don't seem to do anything with it? +> +> + if (not exists $changed{$params{year}}) { +> + $changed{$params{year}} = (); +> + } +> + $changed{$params{year}}{$params{month}} = 1; +> +> `$changed{$params{year}}` is a scalar (you can tell because it starts with the +> `$` sigil) but `()` is a list. I think you want `{}` +> (a scalar that is a reference to an empty anonymous hash). +> +> However, that whole `if` block can be omitted, and you can just use +> `$changed{$params{year}}{$params{month}} = 1;`, because Perl will automatically +> create `$changed{$params{year}}` as a reference to an empty hash if necessary, +> in order to put the pair `$params{month} => 1` in it (the term to look +> up if you're curious is "autovivification"). +> +> --[[smcv]] diff --git a/doc/todo/do_not_make_links_backwards.mdwn b/doc/todo/do_not_make_links_backwards.mdwn index 4059d8e2a..50720fed0 100644 --- a/doc/todo/do_not_make_links_backwards.mdwn +++ b/doc/todo/do_not_make_links_backwards.mdwn @@ -30,6 +30,35 @@ Discussion > > > > Originally, I named that parameter `backwards_links`, but then it wouldn't make sense in the long term, and isn't exactly neutral: it assume the current way is backwards! Your suggestion is interesting however, but I don't think the rtl/ltr nomenclature is problematic, with proper documentation of course... --[[anarcat]] +> > > I still don't think `rtl`/`ltr` is the right terminology here. I think +> > > the "API" should say what you mean: the distinction being made is +> > > "text first" vs. "link first", so, say that. +> > > +> > > As far as I understand it, RTL languages like Arabic typically write +> > > text files "in logical order" (i.e. reading/writing order - first +> > > letter is first in the bytestream) and only apply RTL rendering on +> > > display. IkiWiki is UTF-8-only, and Unicode specifies that all +> > > Unicode text should be in logical order. The opposite of logical +> > > order is is "display order", which is how you would have to mangle +> > > the file for it to appear correctly on a naive terminal that expects +> > > LTR; that can only work correctly for hard-wrapped text, I think. +> > > +> > > IkiWiki will parse files +> > > in logical order too; so if a link's text and destination are both +> > > written in Arabic, in text-before-link order in the source code, an +> > > Arabic reader starting from the right would still see the text +> > > before the link. Similarly, in your proposed link-before-text +> > > order, an Arabic reader would still see the link before the text +> > > (which in their case means further to the right). So I don't think +> > > it would make sense to suggest that +> > > one order was more appropriate for RTL languages than the other: if +> > > it's "more correct" (for whatever opinion of "correct") in English, then +> > > it's "more correct" in Arabic too. +> > > +> > > (If the destination is written in Latin then it gets +> > > more complicated, because the destination will be rendered LTR within an +> > > otherwise RTL document. I think the order still works though.) --[[smcv]] + There's a caveat: we can't have a per-wiki backwards_links option, because of the underlay, common to all wikis, which needs to be converted. So the option doesn't make much sense. Not sure how to deal with this... Maybe this needs to be at the package level? --[[anarcat]] > I've thought about adding a direction-neutral `\[[!link]]` directive - @@ -80,6 +109,7 @@ I think we can approach this rationnally: 1. left to right (text then link) can be considered more natural, and should therefore be supported 2. it is supported in markdown using regular markdown links. in the proposed branch, the underlay wikilinks are converted to use regular markdown links + > Joey explicitly rejected this for a valid reason (it breaks inlining). See above. --[[smcv]] 3. ikiwiki links break other markup plugins, like mediawiki and creole, as those work right to left. 4. those are recognized "standards" used by other popular sites, like Wikipedia, or any wiki supporting the Creole markup, which is [most wikis](http://www.wikicreole.org/wiki/Engines) diff --git a/doc/todo/edittemplate_should_support_uuid__44___date_variables.mdwn b/doc/todo/edittemplate_should_support_uuid__44___date_variables.mdwn index 73f04adf0..6d702fedf 100644 --- a/doc/todo/edittemplate_should_support_uuid__44___date_variables.mdwn +++ b/doc/todo/edittemplate_should_support_uuid__44___date_variables.mdwn @@ -47,8 +47,8 @@ Changes to the structure of `$pagestate{$registering_page}{edittemplate}{$pagesp >> better way based on that, maybe global configuration in `$config`. >> --[[smcv]] ->>> [[!template id=gitbranch branch=smcv/ready/edittemplate - browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/edittemplate +>>> [[!template id=gitbranch branch=smcv/ready/edittemplate2 + browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/edittemplate2 author="Jonathon Anderson, [[smcv]]"]] >>> Here is a version of that branch that I [[would merge|users/smcv/ready]] if I could. >>> Changes since Jonathon's version: @@ -77,3 +77,11 @@ Changes to the structure of `$pagestate{$registering_page}{edittemplate}{$pagesp >>>> html5 would leave old evaluations of displaytime around in the repository. >>>> (example template: `\[[!meta date=""]]I wrote this post at >>>> \[[!displaytime ""]]`). --[[chrysn]] + +>>>>> That's a very good point; and Joey added `\[[!date "]]`, +>>>>> which does the same thing as your hypothetical `\[[!displaytime]]`, +>>>>> almost 5 years ago. Branch replaced by `smcv/ready/edittemplate2` +>>>>> which drops `formatted_time` and `html_time`, and adds a suggestion +>>>>> to use `\[[!date]]`. --[[smcv]] + +>>>>>> [[merged|done]] --[[smcv]] diff --git a/doc/todo/pick_a_new_canonical_name_for_equivalent_of_SQL_limit.mdwn b/doc/todo/pick_a_new_canonical_name_for_equivalent_of_SQL_limit.mdwn index daa520d71..4e70f8179 100644 --- a/doc/todo/pick_a_new_canonical_name_for_equivalent_of_SQL_limit.mdwn +++ b/doc/todo/pick_a_new_canonical_name_for_equivalent_of_SQL_limit.mdwn @@ -35,4 +35,10 @@ Which of those do Joey/other contributors prefer? Or if keeping `show=10` is preferred, what should be the conventional name for functionality like `\[[!map show=title]]`? -I personally like the idea of `\[[!inline limit=10]]`. --[[smcv]] +> [[!template id=gitbranch branch=smcv/ready/limit +author="[[Simon McVittie|smcv]]" +browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/limit]] +> [[!tag patch users/smcv/ready]] + +I personally prefer `\[[!inline limit=10]]` so I have put that in a branch. +Agreement/objections/better ideas welcome. --[[smcv]] diff --git a/doc/todo/redirect.mdwn b/doc/todo/redirect.mdwn new file mode 100644 index 000000000..87f6a67e7 --- /dev/null +++ b/doc/todo/redirect.mdwn @@ -0,0 +1,53 @@ +I suppose this isn't technically a bug, but whetever. + +I want symbolic links to be rendered as HTTP redirects. For example, +if we do this, + + touch foo.mkdwn + ln -s foo.mkdwn bar.mkdwn + git push baz.branchable.com + +then the following command should print 302 + + curl -o /dev/null -s -w "%{http_code}" http://baz.thomaslevine.com/bar/ + +> An interesting idea, but it conflicts somewhat with wanting symlinks to be +> treated as the referenced file when it's safe to do so, which would be +> great for [[todo/git-annex support]], and also good to avoid duplication +> for files in system-wide underlays. +> +> Also, I don't think this is possible without help from the web server +> configuration: for instance, under Apache, I believe the only way to get +> an HTTP 302 redirect is via Apache-specific `.htaccess` files or +> system-level Apache configuration. +> +> In current ikiwiki, you can get a broadly similar effect by either +> using \[[!meta redir=foo]] (which does a HTML `` redirect) +> or reconfiguring the web server. --[[smcv]] + +>> The CGI spec (http://www.ietf.org/rfc/rfc3875) says that a CGI can cause a redirect by returning a Location: header. +>> So it's possible; desirable (due to your point about conflicting with git-annex support) is a different matter. + +>>> One of the major things that separates ikiwiki from other wiki software +>>> is that ikiwiki is a wiki compiler: ordinary page-views are purely +>>> static HTML, and the CGI only gets involved when you do something +>>> that really has to be dynamic (like an edit). +>>> +>>> However, there is no server-independent static content that ikiwiki +>>> could write out to the destdir that would result in that redirect. +>>> +>>> If you're OK with requiring the [[plugins/404]] plugin (and a +>>> web server where it works, which I think still means Apache) then +>>> it would be possible to write a plugin that detected symlinks, +>>> stored them in the `%wikistate`, and used them to make the +>>> [[plugins/404]] plugin (or its own hook similar to the one +>>> in that plugin) do a 302 redirect instead of a 404. +>>> Similarly, a plugin that assumed a suitable Apache +>>> configuration with fairly broad `AllowOverrides`, +>>> and wrote out `.htaccess` files, would be a feasible thing +>>> for someone to write. +>>> +>>> I don't think this is a bug; I think it's a request for a +>>> feature that not everyone will want. The solution to those +>>> is for someone who wants the feature to +>>> [[write a plugin|plugins/write]]. --[[smcv]] diff --git a/doc/todo/should_use_a_standard_encoding_for_utf_chars_in_filenames.mdwn b/doc/todo/should_use_a_standard_encoding_for_utf_chars_in_filenames.mdwn index a454d7da5..5a55fcce5 100644 --- a/doc/todo/should_use_a_standard_encoding_for_utf_chars_in_filenames.mdwn +++ b/doc/todo/should_use_a_standard_encoding_for_utf_chars_in_filenames.mdwn @@ -5,7 +5,7 @@ I hope it's a bug, not a feature and you fix it soon :) --[[Paweł|ptecza]] > ikiwiki only allows a very limited set of characters raw in page names, > this is done as a deny-by-default security thing. All other characters -> need to be encoded in __code__ format, where "code" is the character +> need to be encoded in `__code__` format, where "code" is the character > number. This is normally done for you, but if you're adding a page > manually, you need to handle it yourself. --[[Joey]] @@ -48,6 +48,11 @@ I hope it's a bug, not a feature and you fix it soon :) --[[Paweł|ptecza]] >>>>>> What's your locale? I have both pl\_PL (ISO-8859-2) and pl\_PL.UTF-8, >>>>>> but I use pl\_PL. Is it wrong? --[[Paweł|ptecza]] +>>>>>>> IkiWiki assumes UTF-8 throughout, so escaped filename characters +>>>>>>> should be `__x____y____z__` where x, y, z are the bytes of the +>>>>>>> UTF-8 encoding of the character. I don't know how to achieve that +>>>>>>> from a non-UTF-8 locale. --[[smcv]] + >>>> Now, as to UTF7, in retrospect, using a standard encoding might be a >>>> better idea than coming up with my own encoding for filenames. Can >>>> you provide a pointer to a description to modified-UTF7? --[[Joey]] @@ -58,4 +63,38 @@ I hope it's a bug, not a feature and you fix it soon :) --[[Paweł|ptecza]] >>>>> There is a Perl [Unicode::IMAPUtf7](http://search.cpan.org/~fabpot/Unicode-IMAPUtf7-2.01/lib/Unicode/IMAPUtf7.pm) >>>>> module at the CPAN, but probably it hasn't been debianized yet :( --[[Paweł|ptecza]] +> Note: [libencode-imaputf7-perl][1] has made it into debian. +> +>> "IMAP UTF-7" uses & as an escape character, which seems like a recipe +>> for shell injection vulnerabilities... so I would not recommend it +>> for this particular use. --[[smcv]] + +> I would value some clarification, in the ikiwiki setup file I have +> +> wiki_file_chars: -[:alnum:][\p{Arabic}()]+/.:_ +> +> Ikiwiki doesn't seem to produce any errors on the commandline for this, but +> when I attempt to create a new post with Arabic characters from the web I get the following error : +> +> Error: Cannot decode string with wide characters at /usr/lib/x86_64-linux-gnu/perl/5.20/Encode.pm line 215. +> +> Should the modified regexp not be sufficient? +> Ikiwiki 3.20140815. +> --[[mhameed]] + +>> This seems like a bug: in principle non-ASCII in `wiki_file_chars` should work, +>> in practice it does not. I would suggest either using the default +>> `wiki_file_chars`, or digging into the code to find what is wrong. +>> Solving this sort of bug usually requires having a clear picture of +>> which "strings" are bytestrings, and which "strings" are Unicode. --[[smcv]] + +>>> mhameed confirmed on IRC that anarcat's [[patch]] from +>>> [[bugs/garbled_non-ascii_characters_in_body_in_web_interface]] fixes this. +>>> --[[smcv]] + +>>>> Merged that patch. Not marking this page as done, because the todo +>>>> about using a standard encoding still stands (although I'm not at +>>>> all sure there's an encoding that would be better). --[[smcv]] + [[wishlist]] +[1]: https://packages.debian.org/search?suite=all§ion=all&arch=any&searchon=names&keywords=libencode-imaputf7-perl diff --git a/doc/todo/upload__95__figure.mdwn b/doc/todo/upload__95__figure.mdwn index d8dd65921..a63e183e8 100644 --- a/doc/todo/upload__95__figure.mdwn +++ b/doc/todo/upload__95__figure.mdwn @@ -18,3 +18,5 @@ Unfortunately, Github shows [[raw code|https://github.com/paternal/ikiwiki/blob/ > > This particular SVG [[looks good to me|users/smcv/ready]] and I've > mirrored it in my own git repo. --[[smcv]] + +>> [[merged|done]] --[[smcv]] diff --git a/plugins/proxy.py b/plugins/proxy.py index d70a967a5..b61eb466c 100755 --- a/plugins/proxy.py +++ b/plugins/proxy.py @@ -158,15 +158,24 @@ class _IkiWikiExtPluginXMLRPCHandler(object): def send_rpc(self, cmd, in_fd, out_fd, *args, **kwargs): xml = _xmlrpc_client.dumps(sum(kwargs.items(), args), cmd) self._debug_fn( - "calling ikiwiki procedure `{0}': [{1}]".format(cmd, xml)) - _IkiWikiExtPluginXMLRPCHandler._write(out_fd, xml.encode('utf8')) + "calling ikiwiki procedure `{0}': [{1}]".format(cmd, repr(xml))) + # ensure that encoded is a str (bytestring in Python 2, Unicode in 3) + if str is bytes and not isinstance(xml, str): + encoded = xml.encode('utf8') + else: + encoded = xml + _IkiWikiExtPluginXMLRPCHandler._write(out_fd, encoded) self._debug_fn('reading response from ikiwiki...') - xml = _IkiWikiExtPluginXMLRPCHandler._read(in_fd).decode('utf8') + response = _IkiWikiExtPluginXMLRPCHandler._read(in_fd) + if str is bytes and not isinstance(response, str): + xml = response.encode('utf8') + else: + xml = response self._debug_fn( 'read response to procedure {0} from ikiwiki: [{1}]'.format( - cmd, xml)) + cmd, repr(xml))) if xml is None: # ikiwiki is going down self._debug_fn('ikiwiki is going down, and so are we...') diff --git a/t/img.t b/t/img.t new file mode 100755 index 000000000..9a48cb1c5 --- /dev/null +++ b/t/img.t @@ -0,0 +1,82 @@ +#!/usr/bin/perl +# +# unit test that creates test images (png, svg, multi-page pdf), runs ikiwiki +# on them, checks the resulting images for plausibility based on their image +# sizes, and checks if they vanish when not required in the build process any +# more +# +# if you have trouble here, be aware that there are three debian packages that +# can provide Image::Magick: perlmagick, libimage-magick-perl and +# graphicsmagick-libmagick-dev-compat +# +package IkiWiki; + +use warnings; +use strict; +use Test::More; + +BEGIN { use_ok("IkiWiki"); } +BEGIN { use_ok("Image::Magick"); } + +ok(! system("rm -rf t/tmp; mkdir -p t/tmp/in")); + +ok(! system("cp t/img/redsquare.png t/tmp/in/redsquare.png")); +writefile("emptysquare.svg", "t/tmp/in", ''); +# using different image sizes for different pages, so the pagenumber selection can be tested easily +ok(! system("cp t/img/twopages.pdf t/tmp/in/twopages.pdf")); + +writefile("imgconversions.mdwn", "t/tmp/in", <new(); + my $r = $im->Read($filename); + return "no image" if $r; + my $w = $im->Get("width"); + my $h = $im->Get("height"); + return "${w}x${h}"; +} + +my $outpath = "t/tmp/out/imgconversions"; +my $outhtml = readfile("$outpath.html"); + +is(size("$outpath/10x-redsquare.png"), "10x10"); +ok(! -e "$outpath/30x-redsquare.png"); +ok($outhtml =~ /width="30" height="30".*expecting 30x30/); +# if this fails, you need libmagickcore-6.q16-2-extra installed +is(size("$outpath/10x-emptysquare.png"), "10x10"); +is(size("$outpath/12x-twopages.png"), "12x12"); +is(size("$outpath/16x-p1-twopages.png"), "16x2"); + +# now let's remove them again + +if (1) { # for easier testing + writefile("imgconversions.mdwn", "t/tmp/in", "nothing to see here"); + + ok(! system("$command --refresh")); + + ok(! -e "$outpath/10x-simple.png"); + ok(! -e "$outpath/10x-simple-svg.png"); + ok(! -e "$outpath/10x-simple-pdf.png"); + ok(! -e "$outpath/10x-p1-simple-pdf.png"); + + # cleanup + ok(! system("rm -rf t/tmp")); +} +done_testing; + +1; diff --git a/t/img/redsquare.png b/t/img/redsquare.png new file mode 100644 index 000000000..0033932aa Binary files /dev/null and b/t/img/redsquare.png differ diff --git a/t/img/twopages.pdf b/t/img/twopages.pdf new file mode 100644 index 000000000..8be9b6539 Binary files /dev/null and b/t/img/twopages.pdf differ diff --git a/t/inline.t b/t/inline.t new file mode 100755 index 000000000..726227b8f --- /dev/null +++ b/t/inline.t @@ -0,0 +1,68 @@ +#!/usr/bin/perl +use warnings; +use strict; +use Test::More; +use IkiWiki; + +my $blob; + +ok(! system("rm -rf t/tmp")); +ok(! system("mkdir t/tmp")); + +sub write_old_file { + my $name = shift; + my $content = shift; + + writefile($name, "t/tmp/in", $content); + ok(utime(333333333, 333333333, "t/tmp/in/$name")); +} + +write_old_file("protagonists.mdwn", + '[[!inline pages="protagonists/*" rootpage="protagonists/new"]]'); +write_old_file("friends.mdwn", + '[[!inline pages="friends/*" postform=yes sort=title show=2]]'); +write_old_file("antagonists.mdwn", + '[[!inline pages="antagonists/*"]]'); +write_old_file("enemies.mdwn", + '[[!inline pages="enemies/*" postform=no rootpage=enemies sort=title reverse=yes show=2]]'); +foreach my $page (qw(protagonists/shepard protagonists/link + antagonists/saren antagonists/ganondorf + friends/garrus friends/liara friends/midna friends/telma + enemies/benezia enemies/geth enemies/rachni + enemies/zant)) { + write_old_file("$page.mdwn", "this page is {$page}"); +} + +ok(! system("make -s ikiwiki.out")); + +my $command = "perl -I. ./ikiwiki.out -set usedirs=0 -plugin inline -url=http://example.com -cgiurl=http://example.com/ikiwiki.cgi -rss -atom -underlaydir=underlays/basewiki -set underlaydirbase=underlays -templatedir=templates t/tmp/in t/tmp/out -verbose"; + +ok(! system($command)); + +ok(! system("$command -refresh")); + +$blob = readfile("t/tmp/out/protagonists.html"); +like($blob, qr{Add a new post}, 'rootpage=yes gives postform'); +like($blob, qr{ 2; +use Test::More tests => 3; BEGIN { use_ok("IkiWiki"); } @@ -19,4 +19,8 @@ $config{add_plugins}=[qw(rst)]; IkiWiki::loadplugins(); IkiWiki::checkconfig(); -ok(IkiWiki::htmlize("foo", "foo", "rst", "foo\n") =~ m{\s*

foo

\s*}); +like(IkiWiki::htmlize("foo", "foo", "rst", "foo\n"), qr{\s*

foo

\s*}); +# regression test for [[bugs/rst fails on file containing only a number]] +my $html = IkiWiki::htmlize("foo", "foo", "rst", "11"); +$html =~ s/<[^>]*>//g; +like($html, qr{\s*11\s*});