my %old=map { $_ => 1 }
split("\n", readfile(srcfile($pagesources{$params{page}})));
foreach my $line (split("\n", $params{content})) {
- push @diff, $line if ! exists $old{$_};
+ push @diff, $line if ! exists $old{$line};
}
$params{diff}=join("\n", @diff);
}
showform($form, $buttons, $session, $q);
}
-sub cgi_custom_failure ($$) {
- my $header=shift;
+sub cgi_custom_failure ($$$) {
+ my $q=shift;
+ my $httpstatus=shift;
my $message=shift;
- print $header;
+ print $q->header(
+ -status => $httpstatus,
+ -charset => 'utf-8',
+ );
print $message;
# Internet Explod^Hrer won't show custom 404 responses
$session->delete();
cgi_savesession($session);
cgi_custom_failure(
- $q->header(-status => "403 Forbidden"),
+ $q, "403 Forbidden",
gettext("You are banned."));
}
}
if (exists $ENV{REDIRECT_STATUS} &&
$ENV{REDIRECT_STATUS} eq '404') {
- my $page = cgi_page_from_404($ENV{REDIRECT_URL},
+ my $page = cgi_page_from_404(
+ Encode::decode_utf8($ENV{REDIRECT_URL}),
$config{url}, $config{usedirs});
IkiWiki::Plugin::goto::cgi_goto($cgi, $page);
}
return if ! defined $form->field("do") || ($form->field("do") ne "edit" && $form->field("do") ne "create") ;
- my $filename=$q->param('attachment');
+ my $filename=Encode::decode_utf8($q->param('attachment'));
if (defined $filename && length $filename &&
($form->submitted eq "Upload Attachment" || $form->submitted eq "Save Page")) {
my $session=$params{session};
IkiWiki::saveindex();
}
elsif ($form->submitted eq "Insert Links") {
- my $page=quotemeta($q->param("page"));
+ my $page=quotemeta(Encode::decode_utf8($q->param("page")));
my $add="";
foreach my $f ($q->param("attachment_select")) {
+ $f=Encode::decode_utf8($f);
$f=~s/^$page\///;
$add.="[[$f]]\n";
}
link => htmllink($page, $page, $f, noimageinline => 1),
size => IkiWiki::Plugin::filecheck::humansize((stat(_))[7]),
mtime => displaytime($IkiWiki::pagemtime{$f}),
+ mtime_raw => $IkiWiki::pagemtime{$f},
};
}
}
IkiWiki::check_canedit($page, $cgi, $session);
$postcomment=0;
- my $location=unique_comment_location($page, $config{srcdir});
-
my $content = "[[!comment format=$type\n";
# FIXME: handling of double quotes probably wrong?
my $subject = $form->field('subject');
if (defined $subject && length $subject) {
$subject =~ s/"/"/g;
- $content .= " subject=\"$subject\"\n";
}
+ else {
+ $subject = "comment ".(num_comments($page, $config{srcdir}) + 1);
+ }
+ $content .= " subject=\"$subject\"\n";
$content .= " date=\"" . decode_utf8(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime)) . "\"\n";
$editcontent =~ s/"/\\"/g;
$content .= " content=\"\"\"\n$editcontent\n\"\"\"]]\n";
+ my $location=unique_comment_location($page, $content, $config{srcdir});
+
# This is essentially a simplified version of editpage:
# - the user does not control the page that's created, only the parent
# - it's always a create operation, never an edit
if (! $ok) {
my $penddir=$config{wikistatedir}."/comments_pending";
- $location=unique_comment_location($page, $penddir);
+ $location=unique_comment_location($page, $content, $penddir);
writefile("$location._comment", $penddir, $content);
IkiWiki::printheader($session);
print IkiWiki::misctemplate(gettext(gettext("comment stored for moderation")),
if ($action eq 'Accept') {
my $content=eval { readfile($file) };
next if $@; # file vanished since form was displayed
- my $dest=unique_comment_location($page, $config{srcdir})."._comment";
+ my $dest=unique_comment_location($page, $content, $config{srcdir})."._comment";
writefile($dest, $config{srcdir}, $content);
if ($config{rcs} and $config{comments_commit}) {
IkiWiki::rcs_add($dest);
}
}
-sub unique_comment_location ($) {
+sub num_comments ($$) {
my $page=shift;
my $dir=shift;
+ my @comments=glob("$dir/$page/$config{comments_pagename}*._comment");
+ return @comments;
+}
+
+sub unique_comment_location ($$$) {
+ my $page=shift;
+
+ eval q{use Digest::MD5 'md5_hex'};
+ error($@) if $@;
+ my $content_md5=md5_hex(shift);
+
+ my $dir=shift;
+
my $location;
- my $i = 0;
+ my $i = num_comments($page, $dir);
do {
$i++;
- $location = "$page/$config{comments_pagename}$i";
+ $location = "$page/$config{comments_pagename}${i}_${content_md5}";
} while (-e "$dir/$location._comment");
return $location;
if (! exists $pagesources{$page}) {
IkiWiki::cgi_custom_failure(
- $cgi->header(-status => "404 Not Found"),
+ $cgi,
+ "404 Not Found",
IkiWiki::misctemplate(gettext("missing page"),
"<p>".
sprintf(gettext("The page %s does not exist."),
if (! length $link) {
IkiWiki::cgi_custom_failure(
- $q->header(-status => "404 Not Found"),
+ $q,
+ "404 Not Found",
IkiWiki::misctemplate(gettext("missing page"),
"<p>".
sprintf(gettext("The page %s does not exist."),
if ($params{show}) {
$num=$params{show};
}
- if ($params{feedshow} && $num < $params{feedshow}) {
+ if ($params{feedshow} && $num < $params{feedshow} && $num > 0) {
$num=$params{feedshow};
}
if ($params{skip} && $num) {
my $mapnum=shift;
return "" unless exists $maps{$mapnum};
my %params=%{$maps{$mapnum}};
+ my $connected=IkiWiki::yesno($params{connected});
# Get all the items to map.
my %mapitems = map { $_ => urlto($_, $params{destpage}) }
print OUT "charset=\"utf-8\";\n";
print OUT "ratio=compress;\nsize=\"".($params{width}+0).", ".($params{height}+0)."\";\n"
if defined $params{width} and defined $params{height};
+ my %shown;
+ my $show=sub {
+ my $item=shift;
+ if (! $shown{$item}) {
+ print OUT "\"$item\" [shape=box,href=\"$mapitems{$item}\"];\n";
+ $shown{$item}=1;
+ }
+ };
foreach my $item (keys %mapitems) {
- print OUT "\"$item\" [shape=box,href=\"$mapitems{$item}\"];\n";
+ $show->($item) unless $connected;
foreach my $link (map { bestlink($item, $_) } @{$links{$item}}) {
- print OUT "\"$item\" -> \"$link\";\n"
- if $mapitems{$link};
+ next unless length $link and $mapitems{$link};
+ foreach my $endpoint ($item, $link) {
+ $show->($endpoint);
+ }
+ print OUT "\"$item\" -> \"$link\";\n";
}
}
print OUT "}\n";
- close OUT;
+ close OUT || error gettext("failed to run dot");
local $/=undef;
my $ret="<object data=\"".urlto($dest, $params{destpage}).
"\" type=\"image/png\" usemap=\"#linkmap$mapnum\">\n".
<IN>.
"</object>";
- close IN;
+ close IN || error gettext("failed to run dot");
waitpid $pid, 0;
+ if ($?) {
+ error gettext("failed to run dot");
+ }
$SIG{PIPE}="DEFAULT";
error gettext("failed to run dot") if $sigpipe;
my @ret;
my $line = $results[0];
- while ($line =~ m/\s+key\s"(.*?)"\nsignature\s"(ok|bad|unknown)"\n\s+name\s"(.*?)"\n\s+value\s"(.*?)"\n\s+trust\s"(trusted|untrusted)"\n/sg) {
+ while ($line =~ m/\s+key\s["\[](.*?)[\]"]\nsignature\s"(ok|bad|unknown)"\n\s+name\s"(.*?)"\n\s+value\s"(.*?)"\n\s+trust\s"(trusted|untrusted)"\n/sg) {
push @ret, {
key => $1,
signature => $2,
$max = $counts{$page} if $counts{$page} > $max;
}
+ if (exists $params{show}) {
+ my $i=0;
+ my %show;
+ foreach my $key (sort { $counts{$b} <=> $counts{$a} } keys %counts) {
+ last if ++$i > $params{show};
+ $show{$key}=$counts{$key};
+ }
+ %counts=%show;
+ }
+
if ($style eq 'table') {
return "<table class='pageStats'>\n".
join("\n", map {
removal_confirm($q, $session, 0, $form->field("page"));
}
elsif ($form->submitted eq "Remove Attachments") {
- my @selected=$q->param("attachment_select");
+ my @selected=map { Encode::decode_utf8($_) } $q->param("attachment_select");
if (! @selected) {
error(gettext("Please select the attachments to remove."));
}
postremove($session);
}
elsif ($form->submitted eq 'Remove' && $form->validate) {
- my @pages=$q->param("page");
+ my @pages=$form->field("page");
# Validate removal by checking that the page exists,
# and that the user is allowed to edit(/remove) it.
}
}
else {
- removal_confirm($q, $session, 0, $q->param("page"));
+ removal_confirm($q, $session, 0, $form->field("page"));
}
exit 0;
if (defined $form->field("do") && ($form->field("do") eq "edit" ||
$form->field("do") eq "create")) {
+ IkiWiki::decode_form_utf8($form);
my $q=$params{cgi};
my $session=$params{session};
rename_start($q, $session, 0, $form->field("page"));
}
elsif ($form->submitted eq "Rename Attachment") {
- my @selected=$q->param("attachment_select");
+ my @selected=map { Encode::decode_utf8($_) } $q->param("attachment_select");
if (@selected > 1) {
error(gettext("Only one attachment can be renamed at a time."));
}
if ($q->param("do") eq 'rename') {
my $session=shift;
- my ($form, $buttons)=rename_form($q, $session, $q->param("page"));
+ my ($form, $buttons)=rename_form($q, $session, Encode::decode_utf8($q->param("page")));
IkiWiki::decode_form_utf8($form);
if ($form->submitted eq 'Cancel') {
# These untaints are safe because of the checks
# performed in check_canrename later.
- my $src=$q->param("page");
+ my $src=$form->field("page");
my $srcfile=IkiWiki::possibly_foolish_untaint($pagesources{$src});
- my $dest=IkiWiki::possibly_foolish_untaint(titlepage($q->param("new_name")));
+ my $dest=IkiWiki::possibly_foolish_untaint(titlepage($form->field("new_name")));
my $destfile=$dest;
if (! $q->param("attachment")) {
my $type=$q->param('type');
my $page="";
my $index="";
my %anchors;
- my $curlevel;
- my $startlevel=0;
+ my $startlevel=($params{startlevel} ? $params{startlevel} : 0);
+ my $curlevel=$startlevel-1;
my $liststarted=0;
my $indent=sub { "\t" x $curlevel };
$p->handler(start => sub {
my $anchor="index".++$anchors{$level}."h$level";
$page.="$text<a name=\"$anchor\"></a>";
- # Take the first header level seen as the topmost level,
+ # Unless we're given startlevel as a parameter,
+ # take the first header level seen as the topmost level,
# even if there are higher levels seen later on.
if (! $startlevel) {
$startlevel=$level;
$curlevel=$startlevel-1;
}
+ elsif (defined $params{startlevel} &&
+ $level < $params{startlevel}) {
+ return;
+ }
elsif ($level < $startlevel) {
$level=$startlevel;
}
-ikiwiki (3.20091203) UNRELEASED; urgency=low
+ikiwiki (3.20091219) UNRELEASED; urgency=low
+
+ * pagestats: Add show parameter. Closes: #562129 (David Paleino)
+ * toc: Add startlevel parameter. (kerravonsen)
+ * Remove example ikiwiki setup file from the Debian package. This
+ re-enables linking to /usr/share/ikiwiki/examples which has the
+ example sites also used by auto-blog.setup. The example setup file
+ can be generated at any time using ikiwiki --dumpsetup so I do
+ not see a reason to ship it. Closes: #562183
+ * Use env hack in python scripts.
+ * comments: Add a checksum to the name of comment pages, to
+ avoid merge conflicts when comments are posted to two branches of a
+ site.
+ * linkmap: Add option to omit disconnected pages from the map.
+ * inline: Fix bug that limited displayed pages when feedshow was
+ specified w/o show.
+
+ -- Joey Hess <joeyh@debian.org> Fri, 25 Dec 2009 14:31:22 -0500
+
+ikiwiki (3.20091218) unstable; urgency=low
- * cvs: Add missing bit to Automator.
* Add complete Spanish basewiki translation done by Fernando Gonzalez de
Requena.
+ * Improve javascript onload handling.
+ * monotone: Deal with format change in version 0.45.
+ (Thanks, Richard Levitte)
+ * cvs: Add missing bit to Automator.
+ * attachment: Fix reversion in attachment sorting by age.
+ * Fix utf-8 problems in rename, remove, attachment, 404, sourcepage, and
+ goto.
- -- Joey Hess <joeyh@debian.org> Wed, 02 Dec 2009 17:22:21 -0500
+ -- Joey Hess <joeyh@debian.org> Fri, 18 Dec 2009 20:47:24 -0500
ikiwiki (3.20091202) unstable; urgency=low
+++ /dev/null
-ikiwiki.setup
--- /dev/null
+I've got a wiki where editing requires [[plugins/httpauth]] (with
+`cgiauthurl` working nicely). I now want to let the general public
+edit Discussion subpages, so I enabled [[plugins/anonok]] and set
+`anonok_pagespec` to `'*/Discussion'`, but HTTP auth is still being
+required for those.
+
+(Actually, what I'll really want to do is probably [[plugins/lockedit]]
+and a whitelist of OpenIDs in `locked_pages`...)
+
+--[[schmonz]]
> Discussion pages should clearly be a special case that don't get Discussion
> links put at the top... aaand.. [[bugs/done]]! --[[Joey]]
+>> This bug appears to have returned. For example,
+>> [[plugins/contrib/unixauth/discussion]] has a Discussion link. -- [[schmonz]]
+
>>> Joey, I've just seen that you closed that bug in ikiwiki 1.37, but it seems
>>> you fixed it only for English "discussion" page. The bug still occurs
>>> for the international "discussion" pages. I have backported ikiwiki 1.40
--- /dev/null
+Current the rst plugin uses this shebang line:
+
+ #!/usr/bin/python
+
+The problem is that rst plugin uses some feature (for example, iterator comprehension) which is unavailable on old version of Python.
+
+So rst plugin will not work on a machine which has an old version of python in system path even though
+the user have installed a new version of python in other place. For example, I am using ikiwiki with the rst plugin on Mac OS X 10.4 which ships python 2.3 but I do have python2.6 installed on /opt/local/bin/python (via macports).
+
+Thus I suggest to change the shebang line to:
+
+ #!/usr/bin/env python
+
+> [[done]], although the irony of all the perl hashbangs in ikiwiki
+> being hardcoded doesn't escape me. --[[Joey]]
the action list (Edit, RecentChanges, etc.) as tabs.
[[!meta stylesheet="actiontabs"]]
+* **[wiki.css](http://cyborginstitute.net/includes/wiki.css)** by [[tychoish]].
+ I typically throw this in as `local.css` in new wikis as a slightly more clear and readable
+ layout for wikis that need to be functional and elegant, but not necessarily uniquely designed.
+ Currently in use by the [the outeralliance wiki](http://oa.criticalfutures.com/).
+
+
If your web browser allows selecting between multiple stylesheets, this
page can be viewed using many of the stylesheets above. For example, if
using Epiphany with the Select Stylesheet extension enabled, use View ->
+++ /dev/null
-This is a new bug report!
FooBar does not have enough bugs, which suggests that it's not a real Free
Software project. Please help create more bugs by adding code to FooBar!
:-)
-And what now?
* [[kaizer]] `git://github.com/engla/ikiwiki.git`
* [[bbb]] `http://git.boulgour.com/bbb/ikiwiki.git`
* [[KathrynAndersen]] `git://github.com/rubykat/ikiplugins.git`
+* [[ktf]] `git://github.com/ktf/ikiwiki.git`
+
## branches
Reading the documentation I would think that `feedshow` does not
influence `show`.
- [[!inline pages="./blog/*" archive=yes quick=yes feedshow=10 sort=title reverse=yes]]
+ \[[!inline pages="./blog/*" archive=yes quick=yes feedshow=10 sort=title reverse=yes]]
Only ten pages are listed in this example although `archive` is set to
yes. Removing `feedshow=10` all matching pages are shown.
Is that behaviour intended?
+> Is something going wrong because `quick="yes"` [[»turns off generation of any feeds«|inline]]? --[[PaulePanter]]
+
--[[PaulePanter]]
+
+>> Bug was that if feedshow was specified without show it limited to it incorrectly. Fixed. --[[Joey]]
Only links between mapped pages will be shown; links pointing to or from
unmapped pages will be omitted. If the pages to include are not specified,
-the links between all pages (and other files) in the wiki are mapped. For
-best results, only a small set of pages should be mapped, since otherwise
-the map can become very large, unwieldy, and complicated.
+the links between all pages (and other files) in the wiki are mapped.
Here are descriptions of all the supported parameters to the `linkmap`
directive:
* `height`, `width` - Limit the size of the map to a given height and width,
in inches. Both must be specified for the limiting to take effect, otherwise
the map's size is not limited.
+* `connected` - Controls whether to include pages on the map that link to
+ no other pages (connected=no, the default), or to only show pages that
+ link to others (connected=yes).
+
+For best results, only a small set of pages should be mapped, since
+otherwise the map can become very large, unwieldy, and complicated.
+If too many pages are included, the map may get so large that graphviz
+cannot render it. Using the `connected` parameter is a good way to prune
+out pages that clutter the map.
[[!meta robots="noindex, follow"]]
\[[!pagestats pages="tags/* and not tags/linux" among="tagged(linux)"]]
+The optional `show` parameter limits display to the specified number of
+pages. For instance, to show a table of the top ten pages with the most
+links:
+
+ \[[!pagestats style="table" show="10"]]
+
[[!meta robots="noindex, follow"]]
Thanks
-----
+> Hello unknown person.
+
+> I think it would require a different approach to what "tags" are, and/or what "pagestats" are. The pagestats plugin gives statistical information about *pages*, so it requires the pages in question to exist before it can get information about them. The tags plugin creates links to tag *pages*, with the expectation that a human being will create said pages and put whatever content they want on them (such as describing what the tag is about, and a map linking back to the tagged pages).
+
+> The approach that [PmWiki](http://www.pmwiki.org) takes is that it enables the optional auto-creation of (empty) pages which match a particular "group" (set of sub-pages); thus one could set all the "tags/*" pages to be auto-created, creating a new tags/foo page the first time the \[[!tag foo]] directive is used. See [[todo/auto-create_tag_pages_according_to_a_template]] for more discussion on this idea.
+> -- [[KathrynAndersen]]
The toc directive will take the level of the first header as the topmost
level, even if there are higher levels seen later in the file.
+To create a table of contents that only shows headers starting with a given
+level, use the `startlevel` parameter. For example, to show only h2 and
+smaller headers:
+
+ \[[!toc startlevel=2]]
+
The table of contents will be created as an ordered list. If you want
an unordered list instead, you can change the list-style in your local
style sheet.
========================
* [This wiki](http://ikiwiki.info) (of course!)
+<!-- * [NetBSD wiki](http://wiki.netbsd.org) -->
* The [GNU Hurd](http://www.gnu.org/software/hurd/)
* [DragonFly BSD](http://www.dragonflybsd.org/)
* [Monotone](http://monotone.ca/wiki/FrontPage/)
[[!meta title="server move"]]
-The ikiwiki.info domain has been moved to a new server. If you can see
-this, though, your DNS has not caught up and you're using the old server
-still. Any changes made will be synced over to the new server, so don't
-worry. --[[Joey]]
+The ikiwiki.info domain has been moved to a new server. If you can see
+this, your DNS has already caught up and you are using the new server.
+By the way, the new server should be somewhat faster.
+--[[Joey]]
+++ /dev/null
-ikiwiki 3.20091022 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * edittemplate: Allow template page name to be specified using anything
- legal for a wikilink (including eg, leading slashes).
- * edittemplate: Work around bug #551499 in CGI::FormBuilder.
- * Fix a bug introduced in the last version that caused ikiwiki
- to skip all files if a sourcedir of "./" was specified.
- * Support CFLAGS when building wrapper.
- * meta: Gather permalink info on scan pass so it is available
- to inline when using a template that does not include page content."""]]
\ No newline at end of file
--- /dev/null
+ikiwiki 3.20091218 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * Add complete Spanish basewiki translation done by Fernando Gonzalez de
+ Requena.
+ * Improve javascript onload handling.
+ * monotone: Deal with format change in version 0.45.
+ (Thanks, Richard Levitte)
+ * cvs: Add missing bit to Automator.
+ * attachment: Fix reversion in attachment sorting by age.
+ * Fix utf-8 problems in rename, remove, attachment, 404, sourcepage, and
+ goto."""]]
\ No newline at end of file
[[!template id=plugin name=field author="[[rubykat]]"]]
+[[!tag type/meta]]
[[!toc]]
## NAME
[[!template id=plugin name=pod author="[[rubykat]]"]]
+[[!tag type/format]]
## NAME
IkiWiki::Plugin::pod - process pages written in POD format.
* browse at GitHub: <http://github.com/rubykat/ikiplugins/blob/master/IkiWiki/Plugin/pod.pm>
* git repo at git://github.com/rubykat/ikiplugins.git
+
[[!template id=plugin name=xslt author="[[rubykat]]"]]
+[[!tag type/chrome]]
## NAME
IkiWiki::Plugin::xslt - ikiwiki directive to process an XML file with XSLT
Google is asked to search for pages in the domain specified in the wiki's
`url` configuration parameter. Results will depend on whether google has
-indexed the site, and how recently. Also, if the same domain has other
-content, outside the wiki's content, it will be searched as well.
+indexed the site, and how recently.
The [[search]] plugin offers full text search of only the wiki, but
requires that a search engine be installed on your site.
tweaked to do this? --[[schmonz]]
> Done. --[[Joey]]
+
+The main page said:
+
+> Also, if the same domain has other content, outside the wiki's
+> content, it will be searched as well.
+
+Is it still true now? (Or this statement is out of date?) --[weakish]
+
+[weakish]: http://weakish.pigro.net
+
+> I checked, and it's never been true; google is given the url to the top
+> of the wiki and only searches things in there. --[[Joey]]
<pre>
\[[!acl user=* page=/subsite/* acl=/subsite/acl.mdwn]]
</pre>
+
+Any idea when this is going to be finished? If you want, I am happy to beta test.
+
+> It's already done, though that is sorta hidden in the above. :-)
+> Example of use to only allow two users to edit the tipjar page:
+> locked_pages => 'tipjar and !(user(joey) or user(bob))',
+> --[[Joey]]
>> However, I think that if the cache does not work for a big load, it should
>> not work at all; small loads are small so they don't matter. --ulrik
+-----
+
+Another possiblity is using empty url for wikilinks (gitit uses this approach), for example:
+
+ `SomePage <>`_
+
+Since it uses *empty* url, I would like to call it *proposal 0* :-) --[weakish]
+
+[weakish]: http://weakish.pigro.net
--- /dev/null
+Currently, new comments are named with an incrementing ID (comment_N). So
+if a wiki has multiple disconnected servers, and comments are made to the
+same page on both, merging is guaranteed to result in conflicts.
+
+I propose avoiding such merge problems by naming a comment with a sha1sum
+of its (full) content. Keep the incrementing ID too, so there is an
+-ordering. And so duplicate comments are allowed..)
+So, "comment_N_SHA1".
+
+Note: The comment body will need to use meta title in the case where no
+title is specified, to retain the current behavior of the default title
+being "comment N".
+
+What do you think [[smcv]]? --[[Joey]]
+
+> I had to use md5sums, as sha1sum perl module may not be available and I
+> didn't want to drag it in. But I think that's ok; this doesn't need to be
+> cryptographically secure and even the chances of being able to
+> purposefully cause a md5 collision and thus an undesired merge conflict
+> are quite low since it modifies the input text and adds a date stamp to
+> it.
+>
+> Anyway, I think it's good, [[[done]] --[[Joey]]
> >
> > I no longer have the original wiki for which I wanted this feature, but I can
> > see using it on future ones. -- [[DonMarti]]
+
+>>> FWIW, for the case where one wants to update a site offline,
+>>> using an ikiwiki instance on a laptop, and include some deffered
+>>> posts in the push, the ad-hoc cron job type approach will be annoying.
+>>>
+>>> In modern ikiwiki, I guess the way to accomplish this would be to
+>>> add a pagespec that matches only pages posted in the present or past.
+>>> Then a page can have its post date set to the future, using meta date,
+>>> and only show up when its post date rolls around.
+>>>
+>>> Ikiwiki will need to somehow notice that a pagespec began matching
+>>> a page it did not match previously, despite said page not actually
+>>> changing. I'm not sure what the best way is.
+>>>
+>>> * One way could be to
+>>> use a needsbuild hook and some stored data about which pagespecs
+>>> exclude pages in the future. (But I'm not sure how evaluating the
+>>> pagespec could lead to that metadata and hook being set up.)
+>>> * Another way would be to use an explicit directive to delay a
+>>> page being posted. Then the directive stores the metadata and
+>>> sets up the needsbuild hook.
+>>> * Another way would be for ikiwiki to remember the last
+>>> time it ran. It could then easily find pages that have a post
+>>> date after that time, and treat them the same as it treats actually
+>>> modified files. Or a plugin could do this via a needsbuild hook,
+>>> probably. (Only downside to this is it would probably need to do
+>>> a O(n) walk of the list of pages -- but only running an integer
+>>> compare per page.)
+>>>
+>>> You'd still need a cron job to run ikiwiki -refresh every hour, or
+>>> whatever, so it can update. --[[Joey]]
> > projects prefer to receive changes as unified diffs (or as
> > branches in their chosen VCS, which is [[git]] here). --[[smcv]]
- 56,57c56,57
- < my $curlevel;
- < my $startlevel=0;
- ---
- > my $startlevel=($params{startlevel} ? $params{startlevel} : 0);
- > my $curlevel=$startlevel-1;
- 69a70
- > # unless we're given startlevel as a parameter
- 73a75,79
- > elsif (defined $params{startlevel}
- > and $level < $params{startlevel})
- > {
- > return;
- > }
+> > > Done. -- [[KathrynAndersen]]
+
+> > > > Looks like Joey has now [[merged|done]] this. Thanks! --[[smcv]]
+
+ --- /files/git/other/ikiwiki/IkiWiki/Plugin/toc.pm 2009-11-16 12:44:00.352050178 +1100
+ +++ toc.pm 2009-12-26 06:36:06.686512552 +1100
+ @@ -53,8 +53,8 @@
+ my $page="";
+ my $index="";
+ my %anchors;
+ - my $curlevel;
+ - my $startlevel=0;
+ + my $startlevel=($params{startlevel} ? $params{startlevel} : 0);
+ + my $curlevel=$startlevel-1;
+ my $liststarted=0;
+ my $indent=sub { "\t" x $curlevel };
+ $p->handler(start => sub {
+ @@ -67,10 +67,16 @@
+
+ # Take the first header level seen as the topmost level,
+ # even if there are higher levels seen later on.
+ + # unless we're given startlevel as a parameter
+ if (! $startlevel) {
+ $startlevel=$level;
+ $curlevel=$startlevel-1;
+ }
+ + elsif (defined $params{startlevel}
+ + and $level < $params{startlevel})
+ + {
+ + return;
+ + }
+ elsif ($level < $startlevel) {
+ $level=$startlevel;
+ }
[[!tag patch]]
Makes ikiwiki look in the specified directory first, before the regular
locations when loading library files and plugins. For example, if you set
- libdir to "/home/you/.ikiwiki/", you can install a Foo.pm plugin as
- "/home/you/.ikiwiki/IkiWiki/Plugin/Foo.pm".
+ libdir to "/home/you/.ikiwiki/", you can install a foo.pm plugin as
+ "/home/you/.ikiwiki/IkiWiki/Plugin/foo.pm".
* --discussion, --no-discussion
It's possible to use all of ikiwiki's web features (page editing, etc) in
-the `w3m` web browser without using a web server. `w3m` supports local CGI
+the [`w3m`](http://w3m.sourceforge.net/) web browser without using a web server. `w3m` supports local CGI
scripts, and ikiwiki can be set up to run that way. This requires some
special configuration:
Name: ikiwiki
-Version: 3.20091202
+Version: 3.20091218
Release: 1%{?dist}
Summary: A wiki compiler
-#!/usr/bin/python
+#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# pythondemo — demo Python ikiwiki plugin
-#!/usr/bin/python
+#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# rst — xml-rpc-based ikiwiki plugin to process RST files
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2009-12-02 13:10-0500\n"
+"POT-Creation-Date: 2009-12-18 20:48-0500\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
msgid "login failed, perhaps you need to turn on cookies?"
msgstr ""
-#: ../IkiWiki/CGI.pm:169 ../IkiWiki/CGI.pm:314
+#: ../IkiWiki/CGI.pm:169 ../IkiWiki/CGI.pm:318
msgid "Your login session has expired."
msgstr ""
msgid "Preferences saved."
msgstr ""
-#: ../IkiWiki/CGI.pm:278
+#: ../IkiWiki/CGI.pm:282
msgid "You are banned."
msgstr ""
-#: ../IkiWiki/CGI.pm:405 ../IkiWiki/CGI.pm:406 ../IkiWiki.pm:1281
+#: ../IkiWiki/CGI.pm:409 ../IkiWiki/CGI.pm:410 ../IkiWiki.pm:1281
msgid "Error"
msgstr ""
msgid "fortune failed"
msgstr ""
-#: ../IkiWiki/Plugin/getsource.pm:62 ../IkiWiki/Plugin/goto.pm:55
+#: ../IkiWiki/Plugin/getsource.pm:63 ../IkiWiki/Plugin/goto.pm:56
msgid "missing page"
msgstr ""
-#: ../IkiWiki/Plugin/getsource.pm:64 ../IkiWiki/Plugin/goto.pm:57
+#: ../IkiWiki/Plugin/getsource.pm:65 ../IkiWiki/Plugin/goto.pm:58
#, perl-format
msgid "The page %s does not exist."
msgstr ""
-#: ../IkiWiki/Plugin/getsource.pm:73
+#: ../IkiWiki/Plugin/getsource.pm:74
msgid "not a page"
msgstr ""
-#: ../IkiWiki/Plugin/getsource.pm:75
+#: ../IkiWiki/Plugin/getsource.pm:76
#, perl-format
msgid "%s is an attachment, not a page."
msgstr ""
msgid "Also rename SubPages and attachments"
msgstr ""
-#: ../IkiWiki/Plugin/rename.pm:247
+#: ../IkiWiki/Plugin/rename.pm:248
msgid "Only one attachment can be renamed at a time."
msgstr ""
-#: ../IkiWiki/Plugin/rename.pm:250
+#: ../IkiWiki/Plugin/rename.pm:251
msgid "Please select the attachment to rename."
msgstr ""
-#: ../IkiWiki/Plugin/rename.pm:347
+#: ../IkiWiki/Plugin/rename.pm:348
#, perl-format
msgid "rename %s to %s"
msgstr ""
-#: ../IkiWiki/Plugin/rename.pm:571
+#: ../IkiWiki/Plugin/rename.pm:572
#, perl-format
msgid "update for rename of %s to %s"
msgstr ""
msgid "you must enter a wikiname (that contains alphanumerics)"
msgstr ""
-#: ../IkiWiki/Setup/Automator.pm:76
+#: ../IkiWiki/Setup/Automator.pm:79
#, perl-format
msgid "unsupported revision control system %s"
msgstr ""
-#: ../IkiWiki/Setup/Automator.pm:102
+#: ../IkiWiki/Setup/Automator.pm:105
msgid "failed to set up the repository with ikiwiki-makerepo"
msgstr ""
-#: ../IkiWiki/Setup/Automator.pm:120
+#: ../IkiWiki/Setup/Automator.pm:123
#, perl-format
msgid "** Disabling plugin %s, since it is failing with this message:"
msgstr ""
<div class="pageheader">
<div class="header">
<span>
-<TMPL_VAR INDEXLINK>/ <TMPL_VAR TITLE>
+<TMPL_VAR INDEXLINK>/
+<span class="title">
+<TMPL_VAR TITLE>
+</span>
</span>
</div>
</div> <!-- .pageheader -->
// other browsers
window.onload = run_hooks_onload;
+var onload_done = 0;
+
function run_hooks_onload() {
// avoid firing twice
- if (arguments.callee.done)
+ if (onload_done)
return;
- arguments.callee.done = true;
+ onload_done = true;
run_hooks("onload");
}