ikiwiki-3.20130904.1ubuntu1/0000755000000000000000000000000012211730003012115 5ustar ikiwiki-3.20130904.1ubuntu1/ikiwiki-update-wikilist0000755000000000000000000000263212211727745016646 0ustar #!/usr/bin/perl -t # Add a user to the system wide wikilist. # This script can safely be made suid or put in /etc/sudoers. use warnings; use strict; use English; my $remove=(@ARGV && $ARGV[0] eq '-r'); my $username=getpwuid($REAL_USER_ID); if (! defined $username || ! length $username) { die "unable to determine user name for UID $REAL_USER_ID\n"; } my $wikilist="/etc/ikiwiki/wikilist"; if (! -e $wikilist) { die "$wikilist does not exist\n"; } my $changed=0; my $seen=0; my @lines; open (my $list, "<$wikilist") || die "read $wikilist: $!"; while (<$list>) { chomp; if (/^\s*([^\s]+)\s*$/) { my $user=$1; if ($user eq $username) { if (! $remove) { $seen=1; push @lines, $_; } else { $changed=1; } } else { push @lines, $_; } } else { push @lines, $_; } } if (! $seen && ! $remove) { push @lines, $username; $changed=1; } if ($changed) { close $list || die "ikiwiki-update-wikilist: error reading $list: $!\n"; open ($list, ">$wikilist") || die "ikiwiki-update-wikilist: cannot write to $wikilist\n"; foreach (@lines) { print $list "$_\n"; } if ($remove) { print "ikiwiki-update-wikilist: removed user $username from $wikilist\n"; } else { print "ikiwiki-update-wikilist: added user $username to $wikilist\n"; } close $list || die "ikiwiki-update-wikilist: error writing $wikilist: $!\n"; } else { print "ikiwiki-update-wikilist: no changes need to be made\n"; } ikiwiki-3.20130904.1ubuntu1/docwiki.setup0000644000000000000000000000171312211727745014655 0ustar #!/usr/bin/perl # Configuration file for ikiwiki to build its documentation wiki. # Use git during the build, if it's available and if we're building # from a git checkout. This ensures ikiwiki gets the right mtimes and # ctimes for files in the doc wiki. our $rcs="norcs"; BEGIN { my $git=`which git 2>&1`; chomp $git; if (-x $git && -d ".git") { $rcs="git"; } } use IkiWiki::Setup::Standard { wikiname => "ikiwiki", srcdir => "doc", destdir => "html", templatedir => "templates", underlaydirbase => "underlays", underlaydir => "underlays/basewiki", discussion => 0, exclude => qr/\/discussion|bugs\/*|todo\/*|forum\/*/, # save space locale => '', verbose => 1, syslog => 0, userdir => "users", usedirs => 0, prefix_directives => 1, add_plugins => [qw{goodstuff version haiku polygen fortune table}], # not appropriate for doc dir disable_plugins => [qw{recentchanges openid}], rcs => $rcs, gitorigin_branch => '', # don't pull during build } ikiwiki-3.20130904.1ubuntu1/html/0000755000000000000000000000000012211730003013061 5ustar ikiwiki-3.20130904.1ubuntu1/ikiwiki-transition.in0000755000000000000000000002043512211727745016327 0ustar #!/usr/bin/perl use warnings; use strict; use lib '.'; # For use in nonstandard directory, munged by Makefile. use IkiWiki; use HTML::Entities; my $regex = qr{ (\\?) # 1: escape? \[\[(!?) # directive open; 2: optional prefix ([-\w]+) # 3: command ( # 4: the parameters (including initial whitespace) \s+ (?: (?:[-\w]+=)? # named parameter key? (?: """.*?""" # triple-quoted value | "[^"]+" # single-quoted value | [^\s\]]+ # unquoted value ) \s* # whitespace or end # of directive ) *) # 0 or more parameters \]\] # directive closed }sx; sub handle_directive { my $escape = shift; my $prefix = shift; my $directive = shift; my $args = shift; if (length $escape) { return "${escape}[[${prefix}${directive}${args}]]" } if ($directive =~ m/^(if|more|table|template|toggleable)$/) { $args =~ s{$regex}{handle_directive($1, $2, $3, $4)}eg; } return "[[!${directive}${args}]]" } sub prefix_directives { loadsetup(shift); IkiWiki::loadplugins(); IkiWiki::checkconfig(); IkiWiki::loadindex(); if (! %pagesources) { error "ikiwiki has not built this wiki yet, cannot transition"; } foreach my $page (values %pagesources) { next unless defined pagetype($page) && -f $config{srcdir}."/".$page; my $content=readfile($config{srcdir}."/".$page); my $oldcontent=$content; $content=~s{$regex}{handle_directive($1, $2, $3, $4)}eg; if ($oldcontent ne $content) { writefile($page, $config{srcdir}, $content); } } } sub indexdb { setstatedir(shift); # Note: No lockwiki here because ikiwiki already locks it # before calling this. if (! IkiWiki::oldloadindex()) { die "failed to load index\n"; } if (! IkiWiki::saveindex()) { die "failed to save indexdb\n" } if (! IkiWiki::loadindex()) { die "transition failed, cannot load new indexdb\n"; } if (! unlink("$config{wikistatedir}/index")) { die "unlink failed: $!\n"; } } sub hashpassword { setstatedir(shift); eval q{use IkiWiki::UserInfo}; eval q{use Authen::Passphrase::BlowfishCrypt}; if ($@) { error("ikiwiki-transition hashpassword: failed to load Authen::Passphrase, passwords not hashed"); } IkiWiki::lockwiki(); IkiWiki::loadplugin("passwordauth"); my $userinfo = IkiWiki::userinfo_retrieve(); foreach my $user (keys %{$userinfo}) { if (ref $userinfo->{$user} && exists $userinfo->{$user}->{password} && length $userinfo->{$user}->{password} && ! exists $userinfo->{$user}->{cryptpassword}) { IkiWiki::Plugin::passwordauth::setpassword($user, $userinfo->{$user}->{password}); } } } sub aggregateinternal { loadsetup(shift); require IkiWiki::Plugin::aggregate; IkiWiki::checkconfig(); IkiWiki::Plugin::aggregate::migrate_to_internal(); } sub setupformat { my $setup=shift; loadsetup($setup); IkiWiki::checkconfig(); # unpack old-format wrappers setting into new fields my $cgi_seen=0; my $rcs_seen=0; foreach my $wrapper (@{$config{wrappers}}) { if ($wrapper->{cgi}) { if ($cgi_seen) { die "don't know what to do with second cgi wrapper ".$wrapper->{wrapper}."\n"; } $cgi_seen++; print "setting cgi_wrapper to ".$wrapper->{wrapper}."\n"; $config{cgi_wrapper}=$wrapper->{wrapper}; $config{cgi_wrappermode}=$wrapper->{wrappermode} if exists $wrapper->{wrappermode}; } elsif ($config{rcs}) { if ($rcs_seen) { die "don't know what to do with second rcs wrapper ".$wrapper->{wrapper}."\n"; } $rcs_seen++; print "setting $config{rcs}_wrapper to ".$wrapper->{wrapper}."\n"; $config{$config{rcs}."_wrapper"}=$wrapper->{wrapper}; $config{$config{rcs}."_wrappermode"}=$wrapper->{wrappermode} if exists $wrapper->{wrappermode}; } else { die "don't know what to do with wrapper ".$wrapper->{wrapper}."\n"; } } IkiWiki::Setup::dump($setup); } sub moveprefs { my $setup=shift; loadsetup($setup); IkiWiki::checkconfig(); eval q{use IkiWiki::UserInfo}; error $@ if $@; foreach my $field (qw{allowed_attachments locked_pages}) { my $orig=$config{$field}; foreach my $admin (@{$config{adminuser}}) { my $a=IkiWiki::userinfo_get($admin, $field); if (defined $a && length $a && # might already have been moved (! defined $orig || $a ne $orig)) { if (defined $config{$field} && length $config{$field}) { $config{$field}=IkiWiki::pagespec_merge($config{$field}, $a); } else { $config{$field}=$a; } } } } my %banned=map { $_ => 1 } @{$config{banned_users}}, IkiWiki::get_banned_users(); $config{banned_users}=[sort keys %banned]; IkiWiki::Setup::dump($setup); } sub deduplinks { loadsetup(shift); IkiWiki::loadplugins(); IkiWiki::checkconfig(); IkiWiki::loadindex(); foreach my $page (keys %links) { my %l; $l{$_}=1 foreach @{$links{$page}}; $links{$page}=[keys %l] } IkiWiki::saveindex(); } sub setstatedir { my $dirorsetup=shift; if (! defined $dirorsetup) { usage(); } if (-d $dirorsetup) { $config{wikistatedir}=$dirorsetup."/.ikiwiki"; } elsif (-f $dirorsetup) { loadsetup($dirorsetup); } else { error("ikiwiki-transition: $dirorsetup does not exist"); } if (! -d $config{wikistatedir}) { error("ikiwiki-transition: $config{wikistatedir} does not exist"); } } sub loadsetup { my $setup=shift; if (! defined $setup) { usage(); } require IkiWiki::Setup; %config = IkiWiki::defaultconfig(); IkiWiki::Setup::load($setup); } sub usage { print STDERR "Usage: ikiwiki-transition type ...\n"; print STDERR "Currently supported transition subcommands:\n"; print STDERR "\tprefix_directives setupfile ...\n"; print STDERR "\taggregateinternal setupfile\n"; print STDERR "\tsetupformat setupfile\n"; print STDERR "\tmoveprefs setupfile\n"; print STDERR "\thashpassword setupfile|srcdir\n"; print STDERR "\tindexdb setupfile|srcdir\n"; print STDERR "\tdeduplinks setupfile\n"; exit 1; } usage() unless @ARGV; my $mode=shift; if ($mode eq 'prefix_directives') { prefix_directives(@ARGV); } elsif ($mode eq 'hashpassword') { hashpassword(@ARGV); } elsif ($mode eq 'indexdb') { indexdb(@ARGV); } elsif ($mode eq 'aggregateinternal') { aggregateinternal(@ARGV); } elsif ($mode eq 'setupformat') { setupformat(@ARGV); } elsif ($mode eq 'moveprefs') { moveprefs(@ARGV); } elsif ($mode eq 'deduplinks') { deduplinks(@ARGV); } else { usage(); } package IkiWiki; # A slightly modified version of the old loadindex function. sub oldloadindex { %oldrenderedfiles=%pagectime=(); if (! $config{rebuild}) { %pagesources=%pagemtime=%oldlinks=%links=%depends= %destsources=%renderedfiles=%pagecase=%pagestate=(); } open (my $in, "<", "$config{wikistatedir}/index") || return; while (<$in>) { chomp; my %items; $items{link}=[]; $items{dest}=[]; foreach my $i (split(/ /, $_)) { my ($item, $val)=split(/=/, $i, 2); push @{$items{$item}}, decode_entities($val); } next unless exists $items{src}; # skip bad lines for now my $page=pagename($items{src}[0]); if (! $config{rebuild}) { $pagesources{$page}=$items{src}[0]; $pagemtime{$page}=$items{mtime}[0]; $oldlinks{$page}=[@{$items{link}}]; $links{$page}=[@{$items{link}}]; $depends{$page}={ $items{depends}[0] => $IkiWiki::DEPEND_CONTENT } if exists $items{depends}; $destsources{$_}=$page foreach @{$items{dest}}; $renderedfiles{$page}=[@{$items{dest}}]; $pagecase{lc $page}=$page; foreach my $k (grep /_/, keys %items) { my ($id, $key)=split(/_/, $k, 2); $pagestate{$page}{decode_entities($id)}{decode_entities($key)}=$items{$k}[0]; } } $oldrenderedfiles{$page}=[@{$items{dest}}]; $pagectime{$page}=$items{ctime}[0]; } # saveindex relies on %hooks being populated, else it won't save # the page state owned by a given hook. But no plugins are loaded # by this program, so populate %hooks with all hook ids that # currently have page state. foreach my $page (keys %pagemtime) { foreach my $id (keys %{$pagestate{$page}}) { $hooks{_dummy}{$id}=1; } } return close($in); } # Used to be in IkiWiki/UserInfo, but only used here now. sub get_banned_users () { my @ret; my $userinfo=userinfo_retrieve(); foreach my $user (keys %{$userinfo}) { push @ret, $user if $userinfo->{$user}->{banned}; } return @ret; } # Used to be in IkiWiki, but only used here (to migrate admin prefs into the # setup file) now. sub pagespec_merge ($$) { my $a=shift; my $b=shift; return $a if $a eq $b; return "($a) or ($b)"; } 1 ikiwiki-3.20130904.1ubuntu1/auto-blog.setup0000644000000000000000000000404512211727745015116 0ustar #!/usr/bin/perl # Ikiwiki setup automator -- blog version. # # This setup file causes ikiwiki to create a wiki, containing a blog, # check it into revision control, generate a setup file for the new # wiki, and set everything up. # # Just run: ikiwiki -setup /etc/ikiwiki/auto-blog.setup # # By default, it asks a few questions, and confines itself to the user's home # directory. You can edit it to change what it asks questions about, or to # modify the values to use site-specific settings. require IkiWiki::Setup::Automator; our $wikiname=IkiWiki::Setup::Automator::ask( gettext("What will the blog be named?"), gettext("blog")); our $wikiname_short=IkiWiki::Setup::Automator::sanitize_wikiname($wikiname); our $rcs=IkiWiki::Setup::Automator::ask( gettext("What revision control system to use?"), "git"); our $admin=IkiWiki::Setup::Automator::ask( gettext("What wiki user (or openid) will be admin?"), $ENV{USER}); use Net::Domain q{hostfqdn}; our $domain=hostfqdn() || IkiWiki::Setup::Automator::ask( gettext("What is the domain name of the web server?"), ""); IkiWiki::Setup::Automator->import( wikiname => $wikiname, adminuser => [$admin], rcs => $rcs, srcdir => "$ENV{HOME}/$wikiname_short", destdir => "$ENV{HOME}/public_html/$wikiname_short", repository => "$ENV{HOME}/$wikiname_short.".($rcs eq "monotone" ? "mtn" : $rcs), dumpsetup => "$ENV{HOME}/$wikiname_short.setup", url => "http://$domain/~$ENV{USER}/$wikiname_short", cgiurl => "http://$domain/~$ENV{USER}/$wikiname_short/ikiwiki.cgi", cgi_wrapper => "$ENV{HOME}/public_html/$wikiname_short/ikiwiki.cgi", adminemail => "$ENV{USER}\@$domain", add_plugins => [qw{goodstuff websetup comments blogspam calendar sidebar trail}], disable_plugins => [qw{}], libdir => "$ENV{HOME}/.ikiwiki", rss => 1, atom => 1, syslog => 1, example => "blog", comments_pagespec => "posts/* and !*/Discussion", blogspam_pagespec => "postcomment(*)", archive_pagespec => "page(posts/*) and !*/Discussion", global_sidebars => 0, discussion => 0, locked_pages => "* and !postcomment(*)", tagbase => "tags", ) ikiwiki-3.20130904.1ubuntu1/README0000644000000000000000000000217612211727745013026 0ustar Use ./Makefile.PL to generate a Makefile, "make" will build the documentation wiki and a man page, and "make install" will install ikiwiki. All other documentation is in the ikiwiki documentation wiki, which is also available online at A few special variables you can set while using the Makefile.PL: PROFILE=1 turns on profiling for the build of the doc wiki. (Uses Devel::NYTProf) NOTAINT=0 turns on the taint flag in the ikiwiki program. (Not recommended unless your perl is less buggy than mine -- see http://bugs.debian.org/411786) MAKE, FIND, and SED can be used to specify where you have the GNU versions of those tools installed, if the normal make, find, and sed are not GNU. There are also other variables supported by MakeMaker, including PREFIX, INSTALL_BASE, and DESTDIR. See ExtUtils::MakeMaker(3). In particular, INSTALL_BASE is very useful if you want to install ikiwiki to some other location, as it configures it to see the perl libraries there. See `doc/tips/nearlyfreespeech.mdwn` for an example of using this to install ikiwiki and its dependencies in a home directory. ikiwiki-3.20130904.1ubuntu1/auto.setup0000644000000000000000000000331312211727745014172 0ustar #!/usr/bin/perl # Ikiwiki setup automator. # # This setup file causes ikiwiki to create a wiki, check it into revision # control, generate a setup file for the new wiki, and set everything up. # # Just run: ikiwiki -setup /etc/ikiwiki/auto.setup # # By default, it asks a few questions, and confines itself to the user's home # directory. You can edit it to change what it asks questions about, or to # modify the values to use site-specific settings. require IkiWiki::Setup::Automator; our $wikiname=IkiWiki::Setup::Automator::ask( gettext("What will the wiki be named?"), gettext("wiki")); our $wikiname_short=IkiWiki::Setup::Automator::sanitize_wikiname($wikiname); our $rcs=IkiWiki::Setup::Automator::ask( gettext("What revision control system to use?"), "git"); our $admin=IkiWiki::Setup::Automator::ask( gettext("Which user (wiki account or openid) will be admin?"), $ENV{USER}); use Net::Domain q{hostfqdn}; our $domain=hostfqdn() || IkiWiki::Setup::Automator::ask( gettext("What is the domain name of the web server?"), ""); IkiWiki::Setup::Automator->import( wikiname => $wikiname, adminuser => [$admin], rcs => $rcs, srcdir => "$ENV{HOME}/$wikiname_short", destdir => "$ENV{HOME}/public_html/$wikiname_short", repository => "$ENV{HOME}/$wikiname_short.".($rcs eq "monotone" ? "mtn" : $rcs), dumpsetup => "$ENV{HOME}/$wikiname_short.setup", url => "http://$domain/~$ENV{USER}/$wikiname_short", cgiurl => "http://$domain/~$ENV{USER}/$wikiname_short/ikiwiki.cgi", cgi_wrapper => "$ENV{HOME}/public_html/$wikiname_short/ikiwiki.cgi", adminemail => "$ENV{USER}\@$domain", add_plugins => [qw{goodstuff websetup}], disable_plugins => [qw{}], libdir => "$ENV{HOME}/.ikiwiki", rss => 1, atom => 1, syslog => 1, ) ikiwiki-3.20130904.1ubuntu1/templates/0000755000000000000000000000000012211730003014113 5ustar ikiwiki-3.20130904.1ubuntu1/templates/inlinepage.tmpl0000644000000000000000000000412312211727775017152 0ustar ikiwiki-3.20130904.1ubuntu1/templates/revert.tmpl0000644000000000000000000000066112211727775016351 0ustar

Diff being reverted:

ikiwiki-3.20130904.1ubuntu1/templates/autoindex.tmpl0000644000000000000000000000007512211727775017041 0ustar [[!map pages="/* and ! /*/*"]] ikiwiki-3.20130904.1ubuntu1/templates/recentchanges.tmpl0000644000000000000000000000015312211727775017647 0ustar
ikiwiki-3.20130904.1ubuntu1/templates/editpagegone.tmpl0000644000000000000000000000031612211727775017472 0ustar

The page you were editing has disappeared.

Perhaps someone else has deleted it or moved it. If you want to recreate this page with your text, click "Save Page" again.

ikiwiki-3.20130904.1ubuntu1/templates/editfailedsave.tmpl0000644000000000000000000000041112211727775020004 0ustar

Failed to save your changes.

Your changes were not able to be saved to disk. The system gave the error:

Your changes are preserved below, and you can try again to save them.

ikiwiki-3.20130904.1ubuntu1/templates/editcreationconflict.tmpl0000644000000000000000000000044612211727775021237 0ustar

While you were creating this page, someone else independently created a page with the same name.

The edit box below contains the page's current content, followed by the content you entered previously, to allow you to merge the two together before saving.

ikiwiki-3.20130904.1ubuntu1/templates/passwordmail.tmpl0000644000000000000000000000066612211727775017554 0ustar Someone[1], possibly you, requested that the password for on [2] be reset. To change your password, visit the following url, and enter a new password: This url can only be used once to change your password, and it will also stop working the next time you successfully log in. -- ikiwiki [1] Their IP address was [2] Located at ikiwiki-3.20130904.1ubuntu1/templates/titlepage.tmpl0000644000000000000000000000023212211727775017012 0ustar

ikiwiki-3.20130904.1ubuntu1/templates/calendaryear.tmpl0000644000000000000000000000011112211727775017462 0ustar [[!calendar type=year year= pages=""]] ikiwiki-3.20130904.1ubuntu1/templates/blogpost.tmpl0000644000000000000000000000131712211727775016672 0ustar
id="" action="" method="get"> ikiwiki-3.20130904.1ubuntu1/templates/openid-selector.tmpl0000644000000000000000000000231112211727775020130 0ustar

ikiwiki-3.20130904.1ubuntu1/templates/archivepage.tmpl0000644000000000000000000000065412211727775017322 0ustar


Posted by
ikiwiki-3.20130904.1ubuntu1/templates/rsspage.tmpl0000644000000000000000000000072612211727775016510 0ustar <TMPL_VAR TITLE> ikiwiki ikiwiki-3.20130904.1ubuntu1/templates/notifyemail.tmpl0000644000000000000000000000035512211727775017362 0ustar A comment has been posted atchange has been made to ---- ---- To stop these notifications, visit ikiwiki-3.20130904.1ubuntu1/templates/editpage.tmpl0000644000000000000000000000531612211727775016626 0ustar


Attachments
" />

Page preview:

Diff:
ikiwiki-3.20130904.1ubuntu1/templates/editcomment.tmpl0000644000000000000000000000226312211727775017352 0ustar

(optional, or signin)
(optional)

(You might want to Signin first?)




Comment preview:
ikiwiki-3.20130904.1ubuntu1/templates/rssitem.tmpl0000644000000000000000000000146212211727775016530 0ustar <TMPL_VAR TITLE> ikiwiki-3.20130904.1ubuntu1/templates/feedlink.tmpl0000644000000000000000000000061212211727775016617 0ustar
id="" class="feedlink"> RSS Atom
ikiwiki-3.20130904.1ubuntu1/templates/trails.tmpl0000644000000000000000000000127612211727775016343 0ustar
ikiwiki-3.20130904.1ubuntu1/templates/microblog.tmpl0000644000000000000000000000056012211727775017015 0ustar
ikiwiki-3.20130904.1ubuntu1/templates/googleform.tmpl0000644000000000000000000000045012211727775017176 0ustar
placeholder="search" />
ikiwiki-3.20130904.1ubuntu1/templates/atompage.tmpl0000644000000000000000000000164612211727775016643 0ustar <TMPL_VAR TITLE> ikiwiki ikiwiki-3.20130904.1ubuntu1/templates/searchquery.tmpl0000644000000000000000000001115112211727775017371 0ustar $setmap{prefix,title,S} $setmap{prefix,link,XLINK} $set{thousand,$.}$set{decimal,.}$setmap{BN,,Any Country,uk,England,fr,France} ${ $def{PREV, $if{$ne{$topdoc,0},, } } $def{NEXT, $if{$ne{$last,$msize},, } } $def{P,} $def{PAGE,$if{$gt{$1,9},$if{$gt{$1,99},$P{$1,$div{$1,100}}}$P{$1,$mod{$div{$1,10},10}}}$P{$1,$mod{$1,10}}} $def{S,$1} $def{SPAGE,$if{$gt{$1,9},$if{$gt{$1,99},$S{$1,$div{$1,100}}}$S{$1,$mod{$div{$1,10},10}}}$S{$1,$mod{$1,10}}} } $def{PREV,$if{$ne{$topdoc,0},}} $def{PAGE,} $def{SPAGE,} $def{NEXT,$if{$ne{$last,$msize},}}

$env{HELPLINK}
$or{$html{$error}, $if{$eq{$msize,0}, $if{$query,No documents match your query,
Searching $nice{$dbsize} documents }, $if{$not{$msizeexact}, $nice{$add{$topdoc,1}}-$nice{$last} of about $nice{$msize} matches, $if{$and{$eq{$last,$msize},$eq{$topdoc,0}}, All $nice{$msize} matches, $nice{$add{$topdoc,1}}$if{$ne{$add{$topdoc,1},$last},-$nice{$last}} of exactly $nice{$msize} matches} }
$list{$map{$queryterms,$list{$html{$uniq{$unstem{$_}}},,/,}: $nice{$freq{$_}}},Term frequencies: ,$. ,}
Search took $time seconds $hitlist{}
${$percentage%}
Modified:
$html{$date{$field{modtime},%Y-%m-%d}}

$if{$field{language},Language: $html{$field{language}}
} $if{$field{size},Size: $html{$filesize{$field{size}}}
}
$html{$or{$field{caption},$field{title},$field{url},Untitled}}
$highlight{$field{sample},$terms}$if{$field{sample},...}
$percentage% relevant$. matching: $list{$map{$terms,$html{$prettyterm{$_}}},$. , and }${for lynx:}


${suppress next, prev, and page links if there's only one page} $if{$ne{$lastpage,1}, $set{a,$if{$opt{pagelink_height}, HEIGHT=$opt{pagelink_height}}$if{$opt{pagelink_width}, WIDTH=$opt{pagelink_width}}} ${1-W ... X-(this)-Y ...} $set{w,$min{3,$add{$thispage,-1}}} $set{x,$max{$add{$opt{w},1},$add{$thispage,-3}}} $set{y,$min{$lastpage,$add{$thispage,8}}} $PREV $map{$range{1,$opt{w}},$PAGE{$_}} $if{$ne{$add{$opt{w},1},$opt{x}},...} $map{$range{$opt{x},$add{$thispage,-1}},$PAGE{$_}} $SPAGE{$thispage} $map{$range{$add{$thispage,1},$opt{y}},$PAGE{$_}} $if{$ne{$opt{y},$lastpage},...} $NEXT } }}

$if{$dbname,} $if{$ne{$topdoc,0},} $if{$ne{$hitsperpage,10},} $if{$fmt,} $if{$cgi{COLLAPSE},} $if{$queryterms,} $list{$relevants,} $if{$cgi{THRESHOLD},}

$html{$version}
ikiwiki-3.20130904.1ubuntu1/templates/commentmoderation.tmpl0000644000000000000000000000222412211727775020563 0ustar


No comments need moderation at this time.

ikiwiki-3.20130904.1ubuntu1/templates/change.tmpl0000644000000000000000000000322612211727775016267 0ustar [[!meta author=""""""]] [[!meta authorurl=""""""]] [[!meta title="""change to on """]] [[!meta permalink=""]]


ikiwiki-3.20130904.1ubuntu1/templates/editconflict.tmpl0000644000000000000000000000033312211727775017505 0ustar

Your changes conflict with other changes made to the page.

Conflict markers have been inserted into the page content. Reconcile the conflict and commit again to save your changes.

ikiwiki-3.20130904.1ubuntu1/templates/calendarmonth.tmpl0000644000000000000000000000041312211727775017654 0ustar [[!sidebar content=""" [[!calendar type=month month= year= pages=""]] """]] [[!inline pages="creation_month() and creation_year() and " show=0 feeds=no reverse=yes]] ikiwiki-3.20130904.1ubuntu1/templates/searchform.tmpl0000644000000000000000000000031112211727775017163 0ustar
placeholder="search" />
ikiwiki-3.20130904.1ubuntu1/templates/renamesummary.tmpl0000644000000000000000000000112612211727775017724 0ustar

Failed to rename to :

Successfully renamed to .

The following pages have been automatically modified to update their links to :

The following pages still link to :

ikiwiki-3.20130904.1ubuntu1/templates/comment.tmpl0000644000000000000000000000370012211727775016501 0ustar
Comment by Comment by
ikiwiki-3.20130904.1ubuntu1/templates/aggregatepost.tmpl0000644000000000000000000000102512211727775017671 0ustar [[!tag ]] [[!meta title=""]] [[!meta permalink=""]] [[!meta copyright=""]] [[!meta author=": "]] [[!meta author=""]] [[!meta authorurl=""]] ikiwiki-3.20130904.1ubuntu1/templates/pocreatepage.tmpl0000644000000000000000000000010612211727775017473 0ustar

WARNING this page must be written in .

ikiwiki-3.20130904.1ubuntu1/templates/page.tmpl0000644000000000000000000001365512211727775015765 0ustar <TMPL_VAR TITLE> ikiwiki-3.20130904.1ubuntu1/templates/atomitem.tmpl0000644000000000000000000000241412211727775016657 0ustar <TMPL_VAR TITLE> ikiwiki-3.20130904.1ubuntu1/templates/autotag.tmpl0000644000000000000000000000020512211727775016500 0ustar [[!meta title="pages tagged "]] [[!inline pages="tagged()" actions="no" archive="yes" feedshow=10]] ikiwiki-3.20130904.1ubuntu1/ikiwiki-calendar.in0000755000000000000000000000324312211727745015704 0ustar #!/usr/bin/perl use warnings; use strict; use lib '.'; # For use in nonstandard directory, munged by Makefile. use IkiWiki; use IkiWiki::Setup; use Getopt::Long; sub usage () { die gettext("usage: ikiwiki-calendar [-f] your.setup [pagespec] [startyear [endyear]]"), "\n"; } my $force=0; GetOptions( "force" => \$force, ) || usage(); my $setup=shift || usage(); my $pagespec; if (@ARGV && $ARGV[0] !~ /^\d+$/) { $pagespec=shift; } my $startyear=shift || 1900+(localtime(time))[5]; my $endyear=shift || $startyear; %config=IkiWiki::defaultconfig(); IkiWiki::Setup::load($setup); IkiWiki::loadplugins(); IkiWiki::checkconfig(); my $archivebase = 'archives'; $archivebase = $config{archivebase} if defined $config{archivebase}; if (! defined $pagespec) { $pagespec=$config{archive_pagespec} || "*"; } sub writearchive ($$;$) { my $template=template(shift); my $year=shift; my $month=shift; my $page=defined $month ? "$year/$month" : $year; my $pagefile=newpagefile("$archivebase/$page", $config{default_pageext}); $template->param(pagespec => $pagespec); $template->param(year => $year); $template->param(month => $month) if defined $month; if ($force || ! -e "$config{srcdir}/$pagefile") { writefile($pagefile, $config{srcdir}, $template->output); IkiWiki::rcs_add($pagefile) if $config{rcs}; } } foreach my $y ($startyear..$endyear) { writearchive("calendaryear.tmpl", $y); foreach my $m (qw{01 02 03 04 05 06 07 08 09 10 11 12}) { writearchive("calendarmonth.tmpl", $y, $m); } } IkiWiki::rcs_commit_staged(message => gettext("calendar update")) if $config{rcs}; exec("ikiwiki", "-setup", $setup, "-refresh"); die "failed to run ikiwiki -setup $setup -refresh\n"; ikiwiki-3.20130904.1ubuntu1/mdwn2man0000755000000000000000000000114012211727745013605 0ustar #!/usr/bin/perl # Warning: hack my $prog=shift; my $section=shift; print ".TH $prog $section\n"; while (<>) { s{(\\?)\[\[([^\s\|\]]+)(\|[^\s\]]+)?\]\]}{$1 ? "[[$2]]" : $2}eg; s/\`//g; s/^\s*\./\\&./g; if (/^#\s/) { s/^#\s/.SH /; <>; # blank; } s/^[ \n]+//; s/^\t/ /; s/-/\\-/g; s/^Warning:.*//g; s/^$/.PP\n/; s/^\*\s+(.*)/.IP "$1"/; next if $_ eq ".PP\n" && $skippara; if (/^.IP /) { $inlist=1; $spippara=0; } elsif (/.SH/) { $skippara=0; $inlist=0; } elsif (/^\./) { $skippara=1; } else { $skippara=0; } if ($inlist && $_ eq ".PP\n") { $_=".IP\n"; } print $_; } ikiwiki-3.20130904.1ubuntu1/CHANGELOG0000777000000000000000000000000012211730002016417 2debian/changelogustar ikiwiki-3.20130904.1ubuntu1/Bundle/0000755000000000000000000000000012211730003013326 5ustar ikiwiki-3.20130904.1ubuntu1/Bundle/IkiWiki/0000755000000000000000000000000012211730003014666 5ustar ikiwiki-3.20130904.1ubuntu1/Bundle/IkiWiki/Extras.pm0000644000000000000000000000107312211730002016472 0ustar package Bundle::IkiWiki::Extras; $VERSION = '0.01'; 1; __END__ =head1 NAME Bundle::IkiWiki - modules used by ikiwiki plugins =head1 SYNOPSIS perl -MCPAN -e 'install Bundle::IkiWiki::Extras' =head1 CONTENTS Authen::Passphrase Search::Xapian File::MimeInfo Locale::gettext Net::OpenID::Consumer LWPx::ParanoidAgent Crypt::SSLeay Text::CSV Text::Typography Text::Textile Text::WikiFormat XML::Feed Net::Amazon::S3 Text::WikiCreole Term::ReadLine::Gnu HTML::Tree Sort::Naturally Gravatar::URL Net::INET6Glue XML::Writer =head1 AUTHOR Joey Hess ikiwiki-3.20130904.1ubuntu1/Bundle/IkiWiki.pm0000644000000000000000000000065712211730002015233 0ustar package Bundle::IkiWiki; $VERSION = '0.01'; 1; __END__ =head1 NAME Bundle::IkiWiki - core modules that ikiwiki needs =head1 SYNOPSIS perl -MCPAN -e 'install Bundle::IkiWiki' =head1 CONTENTS Text::Markdown::Discount HTML::Scrubber HTML::Template HTML::Parser URI 1.36 XML::Simple Date::Parse CGI::FormBuilder CGI::Session Mail::Sendmail CGI Data::Dumper YAML::XS JSON RPC::XML =head1 AUTHOR Joey Hess ikiwiki-3.20130904.1ubuntu1/NEWS0000777000000000000000000000000012211730002014531 2debian/NEWSustar ikiwiki-3.20130904.1ubuntu1/pm_filter0000755000000000000000000000135312211727745014051 0ustar #!/usr/bin/perl -i -p BEGIN { $prefix=shift; $ver=shift; $libdir=shift; } if (/INSTALLDIR_AUTOREPLACE/) { $_=qq{our \$installdir="$prefix";}; } elsif (/VERSION_AUTOREPLACE/) { $_=qq{our \$version="$ver";}; } elsif (/^use lib/) { # The idea here is to figure out if the libdir the Makefile.PL # was configured to use is in perl's normal search path. # If not, hard code it into ikiwiki. if ((grep { $_ eq $libdir } @INC) && (! exists $ENV{PERL5LIB} || ! length $ENV{PERL5LIB} || $ENV{PERL5LIB} ne $libdir)) { $_=""; } else { $_="use lib '$libdir';\n"; } } elsif ($. == 1 && ($ENV{NOTAINT}=0) && m{^(#!.*perl.*?)$}) { $_=qq{$1 -T\n}; } elsif (/^\$ENV{PATH}="(.*)";/) { $_="\$ENV{PATH}=\"$1:$prefix/bin\";\n"; } ikiwiki-3.20130904.1ubuntu1/debian/0000755000000000000000000000000012235473050013353 5ustar ikiwiki-3.20130904.1ubuntu1/debian/copyright0000644000000000000000000002356312211727775015331 0ustar Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Source: native package Files: * Copyright: © 2006-2011 Joey Hess License: GPL-2+ Files: templates/* underlays/basewiki/* doc/ikiwiki/directive/* ikiwiki.setup po/underlay/* Copyright: © 2006-2010 Joey Hess License: other Redistribution and use in source and compiled forms, with or without modification, are permitted under any circumstances. No warranty. Files: IkiWiki/Plugin/bzr.pm Copyright: © 2008 Jelmer Vernooij © 2006 Emanuele Aina License: GPL-2+ Files: IkiWiki/Plugin/git.pm Copyright: © 2006-2007 Recai Oktaş License: GPL-2+ Files: IkiWiki/Plugin/mercurial.pm Copyright: © 2006 Emanuele Aina License: GPL-2+ Files: IkiWiki/Plugin/monotone.pm IkiWiki/Plugin/listdirectives.pm IkiWiki/Plugin/progress.pm Copyright: © 2007, 2008 William Uther License: GPL-2+ Files: IkiWiki/Plugin/tla.pm Copyright: © 2006 Clint Adams License: GPL-2+ Files: IkiWiki/Plugin/darcs.pm Copyright: © 2006 Thomas Schwinge 2007 Benjamin A'Lee Tuomo Valkonen 2008 Simon Michael Petr Ročkai Sven M. Hallberg License: GPL-2+ Files: IkiWiki/Plugin/teximg.pm Copyright: © 2007 Patrick Winnertz License: GPL-2+ Files: IkiWiki/Plugin/editdiff.pm Copyright: © 2007 Jeremie Koenig License: GPL-2+ Files: IkiWiki/Plugin/graphviz.pm Copyright: © 2007 Josh Triplett License: GPL-2+ Files: IkiWiki/Plugin/table.pm Copyright: © 2007 Victor Moral License: GPL-2+ Files: IkiWiki/Plugin/more.pm Copyright: © 2007 Ben License: GPL-2+ Files: IkiWiki/Plugin/textile.pm Copyright: © 2006 mazirian License: GPL-2+ Files: IkiWiki/Plugin/img.pm Copyright: © 2006 Christian Mock License: GPL-2+ Files: IkiWiki/Plugin/topography.pm Copyright: © 2006 Recai Oktaş License: GPL-2+ Files: IkiWiki/Plugin/map.pm Copyright: © 2006 Alessandro Dotti Contra License: GPL-2+ Files: IkiWiki/Plugin/sidebar.pm Copyright: © 2006 Tuomo Valkonen License: GPL-2+ Files: IkiWiki/Plugin/htmltidy.pm Copyright: © 2006 Faidon Liambotis License: GPL-2+ Files: IkiWiki/Plugin/htmlbalance.pm IkiWiki/Plugin/underlay.pm Copyright: © 2008 Simon McVittie License: GPL-2+ Files: IkiWiki/Plugin/polygen.pm IkiWiki/Plugin/pagestats.pm IkiWiki/Plugin/cutpaste.pm Copyright: © 2006 Enrico Zini License: GPL-2+ Files: plugins/rst plugins/proxy.py plugins/pythondemo Copyright: © martin f. krafft License: BSD-2-clause Files: IkiWiki/Plugin/calendar.pm Copyright: (c) 2006, 2007 Manoj Srivastava License: GPL-2+ Files: IkiWiki/Plugin/hnb.pm Copyright: Copyright (C) 2008 Axel Beckert License: GPL-2+ Files: IkiWiki/Plugin/creole.pm Copyright: Copyright (C) 2008 Bernd Zeimetz License: GPL-2+ Files: IkiWiki/Plugin/txt.pm Copyright: Copyright (C) 2008 Gabriel McManus License: GPL-2+ Files: IkiWiki/Plugin/color.pm Copyright: Copyright (C) 2008 Paweł Tęcza License: GPL-2+ Files: IkiWiki/Plugin/google.pm Copyright: Copyright (C) 2008 Peter Simons License: GPL-2+ Files: IkiWiki/Plugin/comments.pm Copyright: © 2006-2008 Joey Hess © 2008 Simon McVittie License: GPL-2+ Files: IkiWiki/Plugin/po.pm Copyright: © 2008-2009 intrigeri License: GPL-2+ Files: IkiWiki/Plugin/404.pm Copyright: © 2009 Simon McVittie License: GPL-2+ Files: IkiWiki/Plugin/transient.pm Copyright: © 2011 Simon McVittie License: GPL-2+ Files: IkiWiki/Plugin/wmd.pm IkiWiki/Plugin/getsource.pm Copyright: © 2009 William Uther License: GPL-2+ Files: IkiWiki/Plugin/cvs.pm Copyright: © 2009 Amitai Schlair License: BSD-2-clause Files: IkiWiki/Plugin/rsync.pm Copyright: © 2009 Amitai Schlair License: BSD-2-clause Files: IkiWiki/Plugin/osm.pm Copyright: © 2011 Blars Blarson, Antoine Beaupré License: GPL-2 Files: IkiWiki/Plugin/trail.pm Copyright: 2009-2012 Simon McVittie License: GPL-2+ Files: doc/logo/* Copyright: © 2006 Recai Oktaş License: GPL-2+ Files: po/* Copyright: 2007 Damyan Ivanov 2007-2009 Miroslav Kure 2008 Jonas Smedegaard 2008 Kai Wasserbäch 2008-2009 Kurt Gramlich 2007 Kartik Mistry 2007 Pawel Tecza 2007, 2009 Víctor Moral 2007 Jean-Luc Coulon (f5ibh) 2008 Christian Perrier 2007 Cyril Brulebois 2007 Daniel Nylander 2009 Fernando González de Requena 2010 Sebastian Kuhnert License: other Redistribution and use in source and compiled forms, with or without modification, are permitted under any circumstances. No warranty. Files: doc/smileys/* Copyright: (c) 2000 - 2004 by Jürgen Hermann (c) 1999, 2000 Martin Pool License: GPL-2+ Smileys were copied from Moin Moin. Files: doc/smileys/neutral.png doc/smileys/question.pn Copyright: (c) 2002 phpBB Group License: GPL-2 These smileys were copied from phpBB. Files: doc/tips/integrated_issue_tracking_with_ikiwiki.mdwn Copyright: © 2007 Joey Hess , LinuxWorld.com Comment: First published on LinuxWorld.com, a publication of Network World Inc., 118 Turnpike Rd., Southboro, MA 01772. . Republished with permission. License: GPL-2+ Files: underlays/openid-selector/ikiwiki/openid/* Copyright: © 2008-2010 andyjm, david.j.boden Comment: From http://code.google.com/p/openid-selector/ License: BSD-2-clause Files: underlays/jquery/* Copyright: © 2005-2011 by John Resig, Branden Aaron & Jörn Zaefferer © 2011 The Dojo Foundation License: GPL-2 Files: underlays/attachments/ikiwiki/jquery-ui* Copyright: © 2008 Paul Bakaus © 2011 the jQuery UI Authors (http://jqueryui.com/about) License: GPL-2 Files: underlays/attachments/ikiwiki/jquery.tmpl* Copyright: © Boris Moore License: GPL-2 Files: underlays/attachments/ikiwiki/ Copyright: 2010, 2011 Sebastian Tschan Comment: blueimp / jQuery-File-Upload widget, from https://github.com/blueimp/jQuery-File-Upload License: Expat Files: underlays/themes/blueview/style.css Copyright: © 2009,2010 Bernd Zeimetz © 2008 Yahoo! Inc. Comment: Parts derived from BSD-3-clause licensed YUI library. http://developer.yahoo.com/yui/license.html License: GPL-2+ Files: underlays/themes/blueview/* Copyright: © 2009,2010 Bernd Zeimetz License: GPL-2+ Files: underlays/themes/goldtype/* Copyright: © Lars Wirzenius License: GPL-2+ Files: underlays/themes/monochrome/* Copyright: © 2012 Jon Dowland License: GPL-2+ License: BSD-2-clause Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. . THIS SOFTWARE IS PROVIDED BY IKIWIKI AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. License: GPL-2 The full text of the GPL is distributed as doc/GPL in ikiwiki's source, and is distributed in /usr/share/common-licenses/GPL-2 on Debian systems. License: GPL-2+ The full text of the GPL is distributed as doc/GPL in ikiwiki's source, and is distributed in /usr/share/common-licenses/GPL-2 on Debian systems. License: Expat Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: . The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. . THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ikiwiki-3.20130904.1ubuntu1/debian/changelog0000644000000000000000000074607312235472720015251 0ustar ikiwiki (3.20130904.1ubuntu1) trusty; urgency=low * Merge from Debian unstable. Remaining changes: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Bhavani Shankar Sun, 03 Nov 2013 21:40:59 +0530 ikiwiki (3.20130904.1) unstable; urgency=low * Fix cookiejar default setting. -- Joey Hess Wed, 04 Sep 2013 10:15:37 -0400 ikiwiki (3.20130904) unstable; urgency=low * calendar: Display the popup mouseover when there is only 1 page for a given day, for better UI consistency. * meta: Can now be used to add an enclosure to a page, which is a fancier way to do podcasting than just inlining the media files directly; this way you can write a post about the podcast episode with show notes, author information, etc. (schmonz) * aggregate: Show author in addition to feedname, if different. (schmonz) * Consistently configure LWP::UserAgent to allow use of http_proxy and no_proxy environment variables, as well as ~/.ikiwiki/cookies (schmonz) * Fix test suite to work with perl 5.18. Closes: #719969 -- Joey Hess Wed, 04 Sep 2013 08:54:31 -0400 ikiwiki (3.20130711) unstable; urgency=low * Deal with git behavior change in 1.7.2 and newer that broke support for commits with an empty commit message. * Pass --no-edit when used with git 1.7.8 and newer. -- Joey Hess Wed, 10 Jul 2013 21:49:23 -0400 ikiwiki (3.20130710) unstable; urgency=low * blogspam: Fix encoding issue in RPC::XML call. Thanks, Changaco * comments: The formats allowed to be used in comments can be configured using comments_allowformats. Thanks, Michal Sojka * calendar: When there are multiple pages for a given day, they're displayed in a popup on mouseover. Thanks, Louis * osm: Remove trailing slash from KML maps icon. * page.tmpl: omit searchform, trails, sidebar and most metadata in CGI (smcv) * openid: Automatically upgrade openid_realm to https when accessed via https. * The ip() pagespec can now contain glob characters to match eg, a subnet full of spammers. * Fix crash that could occur when a needsbuild hook returned a file that does not exist. * Fix python proxy to not crash when fed unicode data in getstate and setstate. Thanks, chrysn * Fix committing attachments when using svn. -- Joey Hess Wed, 10 Jul 2013 17:45:40 -0400 ikiwiki (3.20130518) unstable; urgency=low * Fix test suite to not fail when XML::Twig is not installed. Closes: #707436 * theme: Now can be used in all templates when a theme is enabled. * notifyemail: Fix bug that caused duplicate emails to be sent when site was rebuilt. * bzr: bzr rm no longer has a --force option, remove -- Joey Hess Sat, 18 May 2013 16:28:21 -0400 ikiwiki (3.20130504) unstable; urgency=low * Allow dots in directive parameter names. (tango) * Add missing plugin section, and deal with missing sections with a warning. * Detect plugins with a broken getsetup and warn. * map: Correct reversion introduced in version 3.20110225 that could generate invalid html. (smcv) * Makefile.PL: overwrite theme style.css instead of appending (Thanks, Mikko Rapeli) * meta: Fix anchors used to link to the page's license and copyright. Closes: #706437 -- Joey Hess Sat, 04 May 2013 23:47:21 -0400 ikiwiki (3.20130212ubuntu1) raring; urgency=low * Merge from Debian unstable. Remaining change: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Bhavani Shankar Tue, 19 Feb 2013 19:18:44 +0530 ikiwiki (3.20130212) unstable; urgency=low * htmlscrubber: Allow the bitcoin URI scheme. * htmlscrubber: Allow the URI schemes of major VCS's. * aggregate: When run with --aggregate, if an aggregation is already running, don't go on and --refresh. * trail: Avoid excess dependencies between pages in the trail and the page defining the trail. Thanks, smcv. * opendiscussion: Don't allow editing discussion pages if discussion pages are disabled. (smcv) * poll: Add expandable option to allow users to easily add new choices to a poll. * trail: Avoid massive slowdown caused by pagetemplate hook when displaying dynamic cgi pages, which cannot use trail anyway. * Deal with empty diffurl in configuration. * cvs: Various fixes. (schmonz) * highlight: Now adds a span with class highlight- around highlighted content, allowing for language-specific css styling. -- Joey Hess Tue, 12 Feb 2013 21:48:02 -0400 ikiwiki (3.20121212ubuntu1) raring; urgency=low * Merge from Debian unstable. Remaining changes: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Angel Abad Wed, 12 Dec 2012 10:09:25 +0100 ikiwiki (3.20121212) unstable; urgency=low * filecheck: Fix bug that prevented File::MimeInfo::Magic from ever being used. * openid: Display openid in Preferences page as a comment, so it can be selected in all browsers. -- Joey Hess Tue, 11 Dec 2012 12:12:12 -0400 ikiwiki (3.20121017ubuntu1) raring; urgency=low * Merge from Debian unstable. Remaining changes: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Angel Abad Tue, 30 Oct 2012 17:35:03 +0100 ikiwiki (3.20121017) unstable; urgency=low * recentchangesdiff: fix further breakage to the template from 3.20120725 -- Joey Hess Tue, 16 Oct 2012 20:49:27 -0400 ikiwiki (3.20121016) unstable; urgency=low * monochrome: New theme, contributed by Jon Dowland. * rst: Ported to python 3, while still also being valid python 2. Thanks, W. Trevor King * Try to avoid a situation in which so many ikiwiki cgi wrapper programs are running, all waiting on some long-running thing like a site rebuild, that it prevents the web server from doing anything else. The current approach only avoids this problem for GET requests; if multiple cgi's run GETs on a site at the same time, one will display a "please wait" page for a configurable number of seconds, which then redirects to retry. To enable this protection, set cgi_overload_delay to the number of seconds to wait. This is not enabled by default. * Add back a 1em margin between archivepage divs. * recentchangesdiff: Correct broken template that resulted in duplicate diff icons being displayed, and bloated the recentchanges page with inline diffs when the configuration should have not allowed them. -- Joey Hess Tue, 16 Oct 2012 15:14:19 -0400 ikiwiki (3.20120725ubuntu1) quantal; urgency=low * Merge from Debian unstable. Remaining changes: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Angel Abad Wed, 12 Sep 2012 13:02:38 +0200 ikiwiki (3.20120725) unstable; urgency=low * recentchangesdiff: When diffurl is not set, provide inline diffs in the recentchanges page, with visibility toggleable via javascript. Thanks, Antoine Beaupré * Split CFLAGS into words when building wrapper. Closes: #682237 * osm: Avoid calling urlto before generated files are registered. Thanks, Philippe Gauthier and Antoine Beaupré * osm: Add osm_openlayers_url configuration setting. Thanks, Genevieve * osm: osm_layers can be used to configured the layers displayed on the map. Thanks, Antoine Beaupré * comments: Remove ipv6 address specific code. -- Joey Hess Sat, 25 Aug 2012 10:58:42 -0400 ikiwiki (3.20120629ubuntu1) quantal; urgency=low * Merge from Debian unstable. Remaining changes: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Angel Abad Mon, 02 Jul 2012 11:09:12 +0200 ikiwiki (3.20120629) unstable; urgency=low * mirrorlist: Add mirrorlist_use_cgi setting that avoids usedirs or other config differences by linking to the mirror's CGI. (intrigeri) -- Joey Hess Fri, 29 Jun 2012 10:16:08 -0400 ikiwiki (3.20120516ubuntu1) quantal; urgency=low * Merge from Debian unstable. Remaining changes: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Angel Abad Thu, 17 May 2012 14:23:52 +0200 ikiwiki (3.20120516) unstable; urgency=high * meta: Security fix; add missing sanitization of author and authorurl. CVE-2012-0220 Thanks, Raúl Benencia -- Joey Hess Wed, 16 May 2012 19:51:27 -0400 ikiwiki (3.20120419ubuntu1) quantal; urgency=low * Merge from Debian unstable. Remaining changes: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Bhavani Shankar Mon, 07 May 2012 16:41:42 -0700 ikiwiki (3.20120419) unstable; urgency=low * Remove dead link from plugins/teximg. Closes: #664885 * inline: When the pagenames list includes pages that do not exist, skip them. * meta: Export author information in html tag. Closes: #664779 Thanks, Martin Michlmayr * notifyemail: New plugin, sends email notifications about new and changed pages, and allows subscribing to comments. * Added a "changes" hook. Renamed the "change" hook to "rendered", but the old hook name is called for now for back-compat. * meta: Support keywords header. Closes: #664780 Thanks, Martin Michlmayr * passwordauth: Fix url in password recovery email to be absolute. * httpauth: When it's the only auth method, avoid a pointless and confusing signin form, and go right to the httpauthurl. * rename: Allow rename to be started not from the edit page; return to the renamed page in this case. * remove: Support removing of pages in the transient underlay. (smcv) * inline, trail: The pagenames parameter is now a list of absolute pagenames, not relative wikilink type names. This is necessary to fix a bug, and makes pagenames more consistent with the pagespec used in the pages parameter. (smcv) * link: Fix renaming wikilinks that contain embedded urls. * graphviz: Handle self-links. * trail: Improve CSS, also display trail links at bottom of page, and a bug fix. (smcv) -- Joey Hess Thu, 19 Apr 2012 15:32:07 -0400 ikiwiki (3.20120319) unstable; urgency=low * osm: New plugin to embed an OpenStreetMap into a wiki page. Supports waypoints, tags, and can even draw paths matching wikilinks between pages containing waypoints. Thanks to Blars Blarson and Antoine Beaupré, as well as the worldwide OpenStreetMap community for this utter awesomeness. * trail: New plugin to add navigation trails through pages via Next and Previous links. Trails can easily be added to existing inlines by setting trail=yes in the inline. Thanks to Simon McVittie for his persistance developing this feature. * Fix a snail mail address. Closes: #659158 * openid-jquery.js: Update URL of Wordpress favicon. Closes: #660549 * Drop the version attribute on the generator tag in Atom feeds to make builds more reproducible. Closes: #661569 (Paul Wise) * shortcut: Support Wikipedia's form of url-encoding for unicode characters, which involves mojibake. Closes: #661198 * Add a few missing jquery UI icons to attachment upload widget underlay. * URI escape filename when generating the diffurl. * Add build-affected hook. Used by trail. -- Joey Hess Mon, 19 Mar 2012 14:24:43 -0400 ikiwiki (3.20120202ubuntu1) precise; urgency=low * Merge from Debian testing. Remaining changes: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Angel Abad Thu, 16 Feb 2012 09:12:04 +0100 ikiwiki (3.20120202) unstable; urgency=low * mdwn: Added nodiscount setting, which can be used to avoid using the markdown discount engine, when maximum compatability is needed. * Switch to YAML::XS to work around insanity in YAML::Mo. Closes: #657533 * cvs: Ensure text files are added in non-binary mode. (Amitai Schlair) * cvs: Various cleanups and testing. (Amitai Schlair) * calendar: Fix strftime encoding bug. * shortcuts: Fixed a broken shortcut to wikipedia (accidentially made into a shortcut to wikiMedia). * Various portability improvements. (Amitai Schlair) -- Joey Hess Thu, 02 Feb 2012 21:42:40 -0400 ikiwiki (3.20120115ubuntu1) precise; urgency=low * Merge from Debian testing. Remaining changes: - Drop wdg-html-validator from Build-Depends field. The xmlns set in the generated elements causes /usr/bin/validate to attempt to download a file from w3.org. This doesn't work without Internet access. -- Angel Abad Fri, 27 Jan 2012 08:37:47 +0100 ikiwiki (3.20120115) unstable; urgency=low * Make backlink(.) work. Thanks, Giuseppe Bilotta. * mdwn: Workaround discount's eliding of '); hook("onload", inittoggle); } function inittoggle() { var as = getElementsByClass('toggle'); for (var i = 0; i < as.length; i++) { var id = as[i].href.match(/#(\w.+)/)[1]; if (document.getElementById(id).className == "toggleable") document.getElementById(id).style.display="none"; as[i].onclick = function() { toggle(this); return false; } } } function toggle(s) { var id = s.href.match(/#(\w.+)/)[1]; style = document.getElementById(id).style; if (style.display == "none") style.display = "block"; else style.display = "none"; } ikiwiki-3.20130904.1ubuntu1/underlays/javascript/ikiwiki/relativedate.js0000644000000000000000000000344012211730002022740 0ustar // Causes html elements in the 'relativedate' class to be displayed // as relative dates. The date is parsed from the title attribute, or from // the element content. var dateElements; hook("onload", getDates); function getDates() { dateElements = getElementsByClass('relativedate'); for (var i = 0; i < dateElements.length; i++) { var elt = dateElements[i]; var title = elt.attributes.title; var d = new Date(title ? title.value : elt.innerHTML); if (! isNaN(d)) { dateElements[i].date=d; elt.title=elt.innerHTML; } } showDates(); } function showDates() { for (var i = 0; i < dateElements.length; i++) { var elt = dateElements[i]; var d = elt.date; if (! isNaN(d)) { elt.innerHTML=relativeDate(d); } } setTimeout(showDates,30000); // keep updating every 30s } var timeUnits = [ { unit: 'year', seconds: 60 * 60 * 24 * 364 }, { unit: 'month', seconds: 60 * 60 * 24 * 30 }, { unit: 'day', seconds: 60 * 60 * 24 }, { unit: 'hour', seconds: 60 * 60 }, { unit: 'minute', seconds: 60 }, ]; function relativeDate(date) { var now = new Date(); var offset = date.getTime() - now.getTime(); var seconds = Math.round(Math.abs(offset) / 1000); // hack to avoid reading just in the future if there is a minor // amount of clock slip if (offset >= 0 && seconds < 30 * 60 * 60) { return "just now"; } var ret = ""; var shown = 0; for (i = 0; i < timeUnits.length; i++) { if (seconds >= timeUnits[i].seconds) { var num = Math.floor(seconds / timeUnits[i].seconds); seconds -= num * timeUnits[i].seconds; if (ret) ret += "and "; ret += num + " " + timeUnits[i].unit + (num > 1 ? "s" : "") + " "; if (++shown == 2) break; } else if (shown) break; } if (! ret) ret = "less than a minute " return ret + (offset < 0 ? "ago" : "from now"); } ikiwiki-3.20130904.1ubuntu1/underlays/smiley/0000755000000000000000000000000012211730003015425 5ustar ikiwiki-3.20130904.1ubuntu1/underlays/smiley/smileys.mdwn0000777000000000000000000000000012211730003023545 2../../doc/smileys.mdwnustar ikiwiki-3.20130904.1ubuntu1/underlays/smiley/smileys0000777000000000000000000000000012211730003021635 2../../doc/smileysustar ikiwiki-3.20130904.1ubuntu1/underlays/osm/0000755000000000000000000000000012211730003014721 5ustar ikiwiki-3.20130904.1ubuntu1/underlays/osm/ikiwiki/0000755000000000000000000000000012211730003016361 5ustar ikiwiki-3.20130904.1ubuntu1/underlays/osm/ikiwiki/images/0000755000000000000000000000000012211730003017626 5ustar ikiwiki-3.20130904.1ubuntu1/underlays/osm/ikiwiki/images/osm.png0000644000000000000000000000564612211730001021143 0ustar PNG  IHDR;0bKGDC pHYsHHFk> vpAgO= IDATHǵY\wV]]]TZx60Q&0 Ei@Q'y`4B5a$%3™m{ߪk}ֽHGtwQ/3X~KBwC-{*8@ `YVݱ/.<>;]7x7P,,n?$P-5n /i쭙 oDz?~wV9g'=3tuu @XUV+\4di qнKz_}M*ڸݹS˅Zw,<5<68765t+~b&~`OTeJPbBH$ك=ݝRCԱqB o}W.^:m=KxԺlAc4|OO_0s&E+D 8m(fj- R՜wc[~28_uSG/8w$nfuz&H .,r.y"l_5_x~Ծ:U&qrvԱ;[_ e{;˝[щ>q2Q2p:au NubAʭ=^WHGI)ۅ_qcS vwwEH3믿L&cO4v7s7s{ÍF"yxGD&="aicO)Y0xAW EQxSn~m7nPꀫ<;X<^aFU xCR46NjLdz/.Q0 c#%f1 >T_{5CT'B9Y-A\Ӣ S )$N}*l[%zhKrtFanl9>iڔ\[[㟾S#hExK~vEQ{\2JJ;niےhJZ@ +ēdh(8H!CZE*P]QQVi6:=2PUs1&{<ؼzU4!TdêH:A5>p=A<6z=CNhĢxHTrZ 6Y;}}в,k_{3cI]J92)-n6f#c \8{= hsɗ4Y\ELGx6=LǑxY}P{w|Rm4.{]__Gneyk8#U_56`<)&˫KA/ Z0mO zi54c1y`ܑaQ.]đ\;8vPݧR^ ,[ Pk u3ɭoa)~^4׏HEx^ȁw޾RW \wezfhg^)x>"6-qM桫ˬ/ēқj$~R.zld@ЏAw_Ge\.-o# Dh方m}~ $U-TCO8uz-H!_>=Xc}VbK4'=qn{N6 ${'#>A?B4]^o+^]vc;,,,=uU49++וo.6׋esѪyHu3ɞѾӣhOaR\M/7stzi v6_~2W\9w\IGNu\oΤ{ŭ&@s T<:FW?d2ʁS1sMP !ԑݟT*iNǞSVLTɋE9_뇳ٮJP(7+rir6GSvwj]|[tE25f-o;? }6r%tEXtdate:create2010-11-06T21:30:27-07:00{K%tEXtdate:modify2010-10-30T13:39:49-07:00"OtEXtSoftwarewww.inkscape.org<IENDB`ikiwiki-3.20130904.1ubuntu1/underlays/osm/ikiwiki/osm.js0000644000000000000000000001260612211730001017520 0ustar // taken from http://stackoverflow.com/questions/901115/get-query-string-values-in-javascript var urlParams = {}; (function () { var e, a = /\\+/g, // Regex for replacing addition symbol with a space r = /([^&=]+)=?([^&]*)/g, d = function (s) { return decodeURIComponent(s.replace(a, " ")); }, q = window.location.search.substring(1); while (e = r.exec(q)) urlParams[d(e[1])] = d(e[2]); })(); function mapsetup(divname, options) { div = document.getElementById(divname); if (options.fullscreen) { permalink = 'permalink'; div.style.top = 0; div.style.left = 0; div.style.position = 'absolute'; div.style.width = '100%'; div.style.height = '100%'; } else { div.style.height = options.height; div.style.width = options.width; div.style.float = options.float; permalink = {base: options.href, title: "View larger map"}; } map = new OpenLayers.Map(divname, { controls: [ new OpenLayers.Control.Navigation(), new OpenLayers.Control.ScaleLine(), new OpenLayers.Control.Permalink(permalink) ], displayProjection: new OpenLayers.Projection("EPSG:4326"), maxExtent: new OpenLayers.Bounds(-20037508.34,-20037508.34,20037508.34,20037508.34), projection: "EPSG:900913", units: "m", maxResolution: 156543.0339, numZoomLevels: 19 }); for (x in options.layers) { layer = options.layers[x]; console.log("setting up layer: " + layer); if (layer.indexOf("Google") >= 0) { if (options.google_apikey && options.google_apikey != 'null') { var gtype = G_NORMAL_MAP; if (layer.indexOf("Satellite") >= 0) { gtype = G_SATELLITE_MAP; } else if (layer.indexOf("Hybrid") >= 0) { gtype = G_HYBRID_MAP // the normal map overlaying the satellite photographs } else if (layer.indexOf("Physical") >= 0) { gtype = G_PHYSICAL_MAP // terrain information } // this nightmare is possible through http://docs.openlayers.org/library/spherical_mercator.html googleLayer = new OpenLayers.Layer.Google( layer, {type: gtype, 'sphericalMercator': true, 'maxExtent': new OpenLayers.Bounds(-20037508.34,-20037508.34,20037508.34,20037508.34), projection: new OpenLayers.Projection("EPSG:3857")} ); map.addLayer(googleLayer); } else { console.log("no API key defined for Google layer, skipping"); } } else if (layer == 'OSM') { // OSM default layer map.addLayer(new OpenLayers.Layer.OSM("OSM (Mapnik)")); } else { // assumed to be a URL text = layer.match(/([^.\/]*\.[^.\/]*(\/[^\$]*)?)\/.*$/i) // take the first two parts of the FQDN and everything before the first $ map.addLayer(new OpenLayers.Layer.OSM("OSM (" + text[1] + ")", layer)); } } if (options.format == 'CSV') { pois = new OpenLayers.Layer.Text( "CSV", { location: options.csvurl, projection: new OpenLayers.Projection("EPSG:4326") }); } else if (options.format == 'GeoJSON') { pois = new OpenLayers.Layer.Vector("GeoJSON", { protocol: new OpenLayers.Protocol.HTTP({ url: options.jsonurl, format: new OpenLayers.Format.GeoJSON() }), strategies: [new OpenLayers.Strategy.Fixed()], projection: new OpenLayers.Projection("EPSG:4326") }); } else { pois = new OpenLayers.Layer.Vector("KML", { protocol: new OpenLayers.Protocol.HTTP({ url: options.kmlurl, format: new OpenLayers.Format.KML({ extractStyles: true, extractAttributes: true }) }), strategies: [new OpenLayers.Strategy.Fixed()], projection: new OpenLayers.Projection("EPSG:4326") }); } map.addLayer(pois); select = new OpenLayers.Control.SelectFeature(pois); map.addControl(select); select.activate(); pois.events.on({ "featureselected": function (event) { var feature = event.feature; var content = '

' +feature.attributes.name + "

" + feature.attributes.description; popup = new OpenLayers.Popup.FramedCloud("chicken", feature.geometry.getBounds().getCenterLonLat(), new OpenLayers.Size(100,100), content, null, true, function () {select.unselectAll()}); feature.popup = popup; map.addPopup(popup); }, "featureunselected": function (event) { var feature = event.feature; if (feature.popup) { map.removePopup(feature.popup); feature.popup.destroy(); delete feature.popup; } } }); if (options.editable) { vlayer = new OpenLayers.Layer.Vector( "Editable" ); map.addControl(new OpenLayers.Control.EditingToolbar(vlayer)); map.addLayer(vlayer); } if (options.fullscreen) { map.addControl(new OpenLayers.Control.PanZoomBar()); map.addControl(new OpenLayers.Control.LayerSwitcher()); map.addControl(new OpenLayers.Control.MousePosition()); map.addControl(new OpenLayers.Control.KeyboardDefaults()); } else { map.addControl(new OpenLayers.Control.ZoomPanel()); } //Set start centrepoint and zoom if (!options.lat || !options.lon) { options.lat = urlParams['lat']; options.lon = urlParams['lon']; } if (!options.zoom) { options.zoom = urlParams['zoom']; } if (options.lat && options.lon) { var lat = options.lat; var lon = options.lon; var zoom= options.zoom || 10; center = new OpenLayers.LonLat( lon, lat ).transform( new OpenLayers.Projection("EPSG:4326"), // transform from WGS 1984 map.getProjectionObject() // to Spherical Mercator Projection ); map.setCenter (center, zoom); } else { pois.events.register("loadend", this, function () { map.zoomToExtent(pois.getDataExtent()); }); } } ikiwiki-3.20130904.1ubuntu1/underlays/jquery/0000755000000000000000000000000012211730003015442 5ustar ikiwiki-3.20130904.1ubuntu1/underlays/jquery/ikiwiki/0000755000000000000000000000000012211730003017102 5ustar ikiwiki-3.20130904.1ubuntu1/underlays/jquery/ikiwiki/jquery.min.js0000644000000000000000000030622512211730002021550 0ustar /* * jQuery JavaScript Library v1.6.2 * http://jquery.com/ * * Copyright 2011, John Resig * Dual licensed under the MIT or GPL Version 2 licenses. * http://jquery.org/license * * Includes Sizzle.js * http://sizzlejs.com/ * Copyright 2011, The Dojo Foundation * Released under the MIT, BSD, and GPL Licenses. * * Date: Thu Jun 30 14:16:56 2011 -0400 */ (function(a9,K){var aq=a9.document,br=a9.navigator,bi=a9.location;var b=(function(){var bC=function(bX,bY){return new bC.fn.init(bX,bY,bA)},bS=a9.jQuery,bE=a9.$,bA,bW=/^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,bK=/\S/,bG=/^\s+/,bB=/\s+$/,bF=/\d/,bx=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,bL=/^[\],:{}\s]*$/,bU=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,bN=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,bH=/(?:^|:|,)(?:\s*\[)+/g,bv=/(webkit)[ \/]([\w.]+)/,bP=/(opera)(?:.*version)?[ \/]([\w.]+)/,bO=/(msie) ([\w.]+)/,bQ=/(mozilla)(?:.*? rv:([\w.]+))?/,by=/-([a-z])/ig,bR=function(bX,bY){return bY.toUpperCase()},bV=br.userAgent,bT,bz,e,bJ=Object.prototype.toString,bD=Object.prototype.hasOwnProperty,bw=Array.prototype.push,bI=Array.prototype.slice,bM=String.prototype.trim,bs=Array.prototype.indexOf,bu={};bC.fn=bC.prototype={constructor:bC,init:function(bX,b1,b0){var bZ,b2,bY,b3;if(!bX){return this}if(bX.nodeType){this.context=this[0]=bX;this.length=1;return this}if(bX==="body"&&!b1&&aq.body){this.context=aq;this[0]=aq.body;this.selector=bX;this.length=1;return this}if(typeof bX==="string"){if(bX.charAt(0)==="<"&&bX.charAt(bX.length-1)===">"&&bX.length>=3){bZ=[null,bX,null]}else{bZ=bW.exec(bX)}if(bZ&&(bZ[1]||!b1)){if(bZ[1]){b1=b1 instanceof bC?b1[0]:b1;b3=(b1?b1.ownerDocument||b1:aq);bY=bx.exec(bX);if(bY){if(bC.isPlainObject(b1)){bX=[aq.createElement(bY[1])];bC.fn.attr.call(bX,b1,true)}else{bX=[b3.createElement(bY[1])]}}else{bY=bC.buildFragment([bZ[1]],[b3]);bX=(bY.cacheable?bC.clone(bY.fragment):bY.fragment).childNodes}return bC.merge(this,bX)}else{b2=aq.getElementById(bZ[2]);if(b2&&b2.parentNode){if(b2.id!==bZ[2]){return b0.find(bX)}this.length=1;this[0]=b2}this.context=aq;this.selector=bX;return this}}else{if(!b1||b1.jquery){return(b1||b0).find(bX)}else{return this.constructor(b1).find(bX)}}}else{if(bC.isFunction(bX)){return b0.ready(bX)}}if(bX.selector!==K){this.selector=bX.selector;this.context=bX.context}return bC.makeArray(bX,this)},selector:"",jquery:"1.6.2",length:0,size:function(){return this.length},toArray:function(){return bI.call(this,0)},get:function(bX){return bX==null?this.toArray():(bX<0?this[this.length+bX]:this[bX])},pushStack:function(bY,b0,bX){var bZ=this.constructor();if(bC.isArray(bY)){bw.apply(bZ,bY)}else{bC.merge(bZ,bY)}bZ.prevObject=this;bZ.context=this.context;if(b0==="find"){bZ.selector=this.selector+(this.selector?" ":"")+bX}else{if(b0){bZ.selector=this.selector+"."+b0+"("+bX+")"}}return bZ},each:function(bY,bX){return bC.each(this,bY,bX)},ready:function(bX){bC.bindReady();bz.done(bX);return this},eq:function(bX){return bX===-1?this.slice(bX):this.slice(bX,+bX+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(bI.apply(this,arguments),"slice",bI.call(arguments).join(","))},map:function(bX){return this.pushStack(bC.map(this,function(bZ,bY){return bX.call(bZ,bY,bZ)}))},end:function(){return this.prevObject||this.constructor(null)},push:bw,sort:[].sort,splice:[].splice};bC.fn.init.prototype=bC.fn;bC.extend=bC.fn.extend=function(){var b6,bZ,bX,bY,b3,b4,b2=arguments[0]||{},b1=1,b0=arguments.length,b5=false;if(typeof b2==="boolean"){b5=b2;b2=arguments[1]||{};b1=2}if(typeof b2!=="object"&&!bC.isFunction(b2)){b2={}}if(b0===b1){b2=this;--b1}for(;b10){return}bz.resolveWith(aq,[bC]);if(bC.fn.trigger){bC(aq).trigger("ready").unbind("ready")}}},bindReady:function(){if(bz){return}bz=bC._Deferred();if(aq.readyState==="complete"){return setTimeout(bC.ready,1)}if(aq.addEventListener){aq.addEventListener("DOMContentLoaded",e,false);a9.addEventListener("load",bC.ready,false)}else{if(aq.attachEvent){aq.attachEvent("onreadystatechange",e);a9.attachEvent("onload",bC.ready);var bX=false;try{bX=a9.frameElement==null}catch(bY){}if(aq.documentElement.doScroll&&bX){bt()}}}},isFunction:function(bX){return bC.type(bX)==="function"},isArray:Array.isArray||function(bX){return bC.type(bX)==="array"},isWindow:function(bX){return bX&&typeof bX==="object"&&"setInterval" in bX},isNaN:function(bX){return bX==null||!bF.test(bX)||isNaN(bX)},type:function(bX){return bX==null?String(bX):bu[bJ.call(bX)]||"object"},isPlainObject:function(bY){if(!bY||bC.type(bY)!=="object"||bY.nodeType||bC.isWindow(bY)){return false}if(bY.constructor&&!bD.call(bY,"constructor")&&!bD.call(bY.constructor.prototype,"isPrototypeOf")){return false}var bX;for(bX in bY){}return bX===K||bD.call(bY,bX)},isEmptyObject:function(bY){for(var bX in bY){return false}return true},error:function(bX){throw bX},parseJSON:function(bX){if(typeof bX!=="string"||!bX){return null}bX=bC.trim(bX);if(a9.JSON&&a9.JSON.parse){return a9.JSON.parse(bX)}if(bL.test(bX.replace(bU,"@").replace(bN,"]").replace(bH,""))){return(new Function("return "+bX))()}bC.error("Invalid JSON: "+bX)},parseXML:function(bZ,bX,bY){if(a9.DOMParser){bY=new DOMParser();bX=bY.parseFromString(bZ,"text/xml")}else{bX=new ActiveXObject("Microsoft.XMLDOM");bX.async="false";bX.loadXML(bZ)}bY=bX.documentElement;if(!bY||!bY.nodeName||bY.nodeName==="parsererror"){bC.error("Invalid XML: "+bZ)}return bX},noop:function(){},globalEval:function(bX){if(bX&&bK.test(bX)){(a9.execScript||function(bY){a9["eval"].call(a9,bY)})(bX)}},camelCase:function(bX){return bX.replace(by,bR)},nodeName:function(bY,bX){return bY.nodeName&&bY.nodeName.toUpperCase()===bX.toUpperCase()},each:function(b0,b3,bZ){var bY,b1=0,b2=b0.length,bX=b2===K||bC.isFunction(b0);if(bZ){if(bX){for(bY in b0){if(b3.apply(b0[bY],bZ)===false){break}}}else{for(;b10&&bX[0]&&bX[bY-1])||bY===0||bC.isArray(bX));if(b0){for(;bZ1?aF.call(arguments,0):bz;if(!(--bv)){e.resolveWith(e,aF.call(bs,0))}}}if(bw>1){for(;bt
a";bv=bC.getElementsByTagName("*");bK=bC.getElementsByTagName("a")[0];if(!bv||!bv.length||!bK){return{}}bD=aq.createElement("select");bt=bD.appendChild(aq.createElement("option"));bB=bC.getElementsByTagName("input")[0];bz={leadingWhitespace:(bC.firstChild.nodeType===3),tbody:!bC.getElementsByTagName("tbody").length,htmlSerialize:!!bC.getElementsByTagName("link").length,style:/top/.test(bK.getAttribute("style")),hrefNormalized:(bK.getAttribute("href")==="/a"),opacity:/^0.55$/.test(bK.style.opacity),cssFloat:!!bK.style.cssFloat,checkOn:(bB.value==="on"),optSelected:bt.selected,getSetAttribute:bC.className!=="t",submitBubbles:true,changeBubbles:true,focusinBubbles:false,deleteExpando:true,noCloneEvent:true,inlineBlockNeedsLayout:false,shrinkWrapBlocks:false,reliableMarginRight:true};bB.checked=true;bz.noCloneChecked=bB.cloneNode(true).checked;bD.disabled=true;bz.optDisabled=!bt.disabled;try{delete bC.test}catch(bH){bz.deleteExpando=false}if(!bC.addEventListener&&bC.attachEvent&&bC.fireEvent){bC.attachEvent("onclick",function(){bz.noCloneEvent=false});bC.cloneNode(true).fireEvent("onclick")}bB=aq.createElement("input");bB.value="t";bB.setAttribute("type","radio");bz.radioValue=bB.value==="t";bB.setAttribute("checked","checked");bC.appendChild(bB);bs=aq.createDocumentFragment();bs.appendChild(bC.firstChild);bz.checkClone=bs.cloneNode(true).cloneNode(true).lastChild.checked;bC.innerHTML="";bC.style.width=bC.style.paddingLeft="1px";bA=aq.getElementsByTagName("body")[0];by=aq.createElement(bA?"div":"body");bI={visibility:"hidden",width:0,height:0,border:0,margin:0};if(bA){b.extend(bI,{position:"absolute",left:-1000,top:-1000})}for(bF in bI){by.style[bF]=bI[bF]}by.appendChild(bC);bE=bA||bJ;bE.insertBefore(by,bE.firstChild);bz.appendChecked=bB.checked;bz.boxModel=bC.offsetWidth===2;if("zoom" in bC.style){bC.style.display="inline";bC.style.zoom=1;bz.inlineBlockNeedsLayout=(bC.offsetWidth===2);bC.style.display="";bC.innerHTML="
";bz.shrinkWrapBlocks=(bC.offsetWidth!==2)}bC.innerHTML="
t
";bG=bC.getElementsByTagName("td");bL=(bG[0].offsetHeight===0);bG[0].style.display="";bG[1].style.display="none";bz.reliableHiddenOffsets=bL&&(bG[0].offsetHeight===0);bC.innerHTML="";if(aq.defaultView&&aq.defaultView.getComputedStyle){bw=aq.createElement("div");bw.style.width="0";bw.style.marginRight="0";bC.appendChild(bw);bz.reliableMarginRight=(parseInt((aq.defaultView.getComputedStyle(bw,null)||{marginRight:0}).marginRight,10)||0)===0}by.innerHTML="";bE.removeChild(by);if(bC.attachEvent){for(bF in {submit:1,change:1,focusin:1}){bx="on"+bF;bL=(bx in bC);if(!bL){bC.setAttribute(bx,"return;");bL=(typeof bC[bx]==="function")}bz[bF+"Bubbles"]=bL}}by=bs=bD=bt=bA=bw=bC=bB=null;return bz})();b.boxModel=b.support.boxModel;var aM=/^(?:\{.*\}|\[.*\])$/,aw=/([a-z])([A-Z])/g;b.extend({cache:{},uuid:0,expando:"jQuery"+(b.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:true,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:true},hasData:function(e){e=e.nodeType?b.cache[e[b.expando]]:e[b.expando];return !!e&&!T(e)},data:function(bu,bs,bw,bv){if(!b.acceptData(bu)){return}var bz=b.expando,by=typeof bs==="string",bx,bA=bu.nodeType,e=bA?b.cache:bu,bt=bA?bu[b.expando]:bu[b.expando]&&b.expando;if((!bt||(bv&&bt&&!e[bt][bz]))&&by&&bw===K){return}if(!bt){if(bA){bu[b.expando]=bt=++b.uuid}else{bt=b.expando}}if(!e[bt]){e[bt]={};if(!bA){e[bt].toJSON=b.noop}}if(typeof bs==="object"||typeof bs==="function"){if(bv){e[bt][bz]=b.extend(e[bt][bz],bs)}else{e[bt]=b.extend(e[bt],bs)}}bx=e[bt];if(bv){if(!bx[bz]){bx[bz]={}}bx=bx[bz]}if(bw!==K){bx[b.camelCase(bs)]=bw}if(bs==="events"&&!bx[bs]){return bx[bz]&&bx[bz].events}return by?bx[b.camelCase(bs)]||bx[bs]:bx},removeData:function(bv,bt,bw){if(!b.acceptData(bv)){return}var by=b.expando,bz=bv.nodeType,bs=bz?b.cache:bv,bu=bz?bv[b.expando]:b.expando;if(!bs[bu]){return}if(bt){var bx=bw?bs[bu][by]:bs[bu];if(bx){delete bx[bt];if(!T(bx)){return}}}if(bw){delete bs[bu][by];if(!T(bs[bu])){return}}var e=bs[bu][by];if(b.support.deleteExpando||bs!=a9){delete bs[bu]}else{bs[bu]=null}if(e){bs[bu]={};if(!bz){bs[bu].toJSON=b.noop}bs[bu][by]=e}else{if(bz){if(b.support.deleteExpando){delete bv[b.expando]}else{if(bv.removeAttribute){bv.removeAttribute(b.expando)}else{bv[b.expando]=null}}}}},_data:function(bs,e,bt){return b.data(bs,e,bt,true)},acceptData:function(bs){if(bs.nodeName){var e=b.noData[bs.nodeName.toLowerCase()];if(e){return !(e===true||bs.getAttribute("classid")!==e)}}return true}});b.fn.extend({data:function(bv,bx){var bw=null;if(typeof bv==="undefined"){if(this.length){bw=b.data(this[0]);if(this[0].nodeType===1){var e=this[0].attributes,bt;for(var bu=0,bs=e.length;bu-1){return true}}return false},val:function(bu){var e,bs,bt=this[0];if(!arguments.length){if(bt){e=b.valHooks[bt.nodeName.toLowerCase()]||b.valHooks[bt.type];if(e&&"get" in e&&(bs=e.get(bt,"value"))!==K){return bs}bs=bt.value;return typeof bs==="string"?bs.replace(aO,""):bs==null?"":bs}return K}var bv=b.isFunction(bu);return this.each(function(bx){var bw=b(this),by;if(this.nodeType!==1){return}if(bv){by=bu.call(this,bx,bw.val())}else{by=bu}if(by==null){by=""}else{if(typeof by==="number"){by+=""}else{if(b.isArray(by)){by=b.map(by,function(bz){return bz==null?"":bz+""})}}}e=b.valHooks[this.nodeName.toLowerCase()]||b.valHooks[this.type];if(!e||!("set" in e)||e.set(this,by,"value")===K){this.value=by}})}});b.extend({valHooks:{option:{get:function(e){var bs=e.attributes.value;return !bs||bs.specified?e.value:e.text}},select:{get:function(e){var bx,bv=e.selectedIndex,by=[],bz=e.options,bu=e.type==="select-one";if(bv<0){return null}for(var bs=bu?bv:0,bw=bu?bv+1:bz.length;bs=0});if(!e.length){bs.selectedIndex=-1}return e}}},attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attrFix:{tabindex:"tabIndex"},attr:function(bx,bu,by,bw){var bs=bx.nodeType;if(!bx||bs===3||bs===8||bs===2){return K}if(bw&&bu in b.attrFn){return b(bx)[bu](by)}if(!("getAttribute" in bx)){return b.prop(bx,bu,by)}var bt,e,bv=bs!==1||!b.isXMLDoc(bx);if(bv){bu=b.attrFix[bu]||bu;e=b.attrHooks[bu];if(!e){if(ak.test(bu)){e=aU}else{if(a0&&bu!=="className"&&(b.nodeName(bx,"form")||a5.test(bu))){e=a0}}}}if(by!==K){if(by===null){b.removeAttr(bx,bu);return K}else{if(e&&"set" in e&&bv&&(bt=e.set(bx,by,bu))!==K){return bt}else{bx.setAttribute(bu,""+by);return by}}}else{if(e&&"get" in e&&bv&&(bt=e.get(bx,bu))!==null){return bt}else{bt=bx.getAttribute(bu);return bt===null?K:bt}}},removeAttr:function(bs,e){var bt;if(bs.nodeType===1){e=b.attrFix[e]||e;if(b.support.getSetAttribute){bs.removeAttribute(e)}else{b.attr(bs,e,"");bs.removeAttributeNode(bs.getAttributeNode(e))}if(ak.test(e)&&(bt=b.propFix[e]||e) in bs){bs[bt]=false}}},attrHooks:{type:{set:function(e,bs){if(g.test(e.nodeName)&&e.parentNode){b.error("type property can't be changed")}else{if(!b.support.radioValue&&bs==="radio"&&b.nodeName(e,"input")){var bt=e.value;e.setAttribute("type",bs);if(bt){e.value=bt}return bs}}}},tabIndex:{get:function(bs){var e=bs.getAttributeNode("tabIndex");return e&&e.specified?parseInt(e.value,10):D.test(bs.nodeName)||l.test(bs.nodeName)&&bs.href?0:K}},value:{get:function(bs,e){if(a0&&b.nodeName(bs,"button")){return a0.get(bs,e)}return e in bs?bs.value:null},set:function(bs,bt,e){if(a0&&b.nodeName(bs,"button")){return a0.set(bs,bt,e)}bs.value=bt}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(bw,bu,bx){var bs=bw.nodeType;if(!bw||bs===3||bs===8||bs===2){return K}var bt,e,bv=bs!==1||!b.isXMLDoc(bw);if(bv){bu=b.propFix[bu]||bu;e=b.propHooks[bu]}if(bx!==K){if(e&&"set" in e&&(bt=e.set(bw,bx,bu))!==K){return bt}else{return(bw[bu]=bx)}}else{if(e&&"get" in e&&(bt=e.get(bw,bu))!==K){return bt}else{return bw[bu]}}},propHooks:{}});aU={get:function(bs,e){return b.prop(bs,e)?e.toLowerCase():K},set:function(bs,bu,e){var bt;if(bu===false){b.removeAttr(bs,e)}else{bt=b.propFix[e]||e;if(bt in bs){bs[bt]=true}bs.setAttribute(e,e.toLowerCase())}return e}};if(!b.support.getSetAttribute){b.attrFix=b.propFix;a0=b.attrHooks.name=b.attrHooks.title=b.valHooks.button={get:function(bt,bs){var e;e=bt.getAttributeNode(bs);return e&&e.nodeValue!==""?e.nodeValue:K},set:function(bt,bu,bs){var e=bt.getAttributeNode(bs);if(e){e.nodeValue=bu;return bu}}};b.each(["width","height"],function(bs,e){b.attrHooks[e]=b.extend(b.attrHooks[e],{set:function(bt,bu){if(bu===""){bt.setAttribute(e,"auto");return bu}}})})}if(!b.support.hrefNormalized){b.each(["href","src","width","height"],function(bs,e){b.attrHooks[e]=b.extend(b.attrHooks[e],{get:function(bu){var bt=bu.getAttribute(e,2);return bt===null?K:bt}})})}if(!b.support.style){b.attrHooks.style={get:function(e){return e.style.cssText.toLowerCase()||K},set:function(e,bs){return(e.style.cssText=""+bs)}}}if(!b.support.optSelected){b.propHooks.selected=b.extend(b.propHooks.selected,{get:function(bs){var e=bs.parentNode;if(e){e.selectedIndex;if(e.parentNode){e.parentNode.selectedIndex}}}})}if(!b.support.checkOn){b.each(["radio","checkbox"],function(){b.valHooks[this]={get:function(e){return e.getAttribute("value")===null?"on":e.value}}})}b.each(["radio","checkbox"],function(){b.valHooks[this]=b.extend(b.valHooks[this],{set:function(e,bs){if(b.isArray(bs)){return(e.checked=b.inArray(b(e).val(),bs)>=0)}}})});var aW=/\.(.*)$/,bb=/^(?:textarea|input|select)$/i,N=/\./g,bf=/ /g,aC=/[^\w\s.|`]/g,G=function(e){return e.replace(aC,"\\$&")};b.event={add:function(bu,by,bD,bw){if(bu.nodeType===3||bu.nodeType===8){return}if(bD===false){bD=bh}else{if(!bD){return}}var bs,bC;if(bD.handler){bs=bD;bD=bs.handler}if(!bD.guid){bD.guid=b.guid++}var bz=b._data(bu);if(!bz){return}var bE=bz.events,bx=bz.handle;if(!bE){bz.events=bE={}}if(!bx){bz.handle=bx=function(bF){return typeof b!=="undefined"&&(!bF||b.event.triggered!==bF.type)?b.event.handle.apply(bx.elem,arguments):K}}bx.elem=bu;by=by.split(" ");var bB,bv=0,e;while((bB=by[bv++])){bC=bs?b.extend({},bs):{handler:bD,data:bw};if(bB.indexOf(".")>-1){e=bB.split(".");bB=e.shift();bC.namespace=e.slice(0).sort().join(".")}else{e=[];bC.namespace=""}bC.type=bB;if(!bC.guid){bC.guid=bD.guid}var bt=bE[bB],bA=b.event.special[bB]||{};if(!bt){bt=bE[bB]=[];if(!bA.setup||bA.setup.call(bu,bw,e,bx)===false){if(bu.addEventListener){bu.addEventListener(bB,bx,false)}else{if(bu.attachEvent){bu.attachEvent("on"+bB,bx)}}}}if(bA.add){bA.add.call(bu,bC);if(!bC.handler.guid){bC.handler.guid=bD.guid}}bt.push(bC);b.event.global[bB]=true}bu=null},global:{},remove:function(bG,bB,bt,bx){if(bG.nodeType===3||bG.nodeType===8){return}if(bt===false){bt=bh}var bJ,bw,by,bD,bE=0,bu,bz,bC,bv,bA,e,bI,bF=b.hasData(bG)&&b._data(bG),bs=bF&&bF.events;if(!bF||!bs){return}if(bB&&bB.type){bt=bB.handler;bB=bB.type}if(!bB||typeof bB==="string"&&bB.charAt(0)==="."){bB=bB||"";for(bw in bs){b.event.remove(bG,bw+bB)}return}bB=bB.split(" ");while((bw=bB[bE++])){bI=bw;e=null;bu=bw.indexOf(".")<0;bz=[];if(!bu){bz=bw.split(".");bw=bz.shift();bC=new RegExp("(^|\\.)"+b.map(bz.slice(0).sort(),G).join("\\.(?:.*\\.)?")+"(\\.|$)")}bA=bs[bw];if(!bA){continue}if(!bt){for(bD=0;bD=0){bB=bB.slice(0,-1);bs=true}if(bB.indexOf(".")>=0){bt=bB.split(".");bB=bt.shift();bt.sort()}if((!bw||b.event.customEvent[bB])&&!b.event.global[bB]){return}e=typeof e==="object"?e[b.expando]?e:new b.Event(bB,e):new b.Event(bB);e.type=bB;e.exclusive=bs;e.namespace=bt.join(".");e.namespace_re=new RegExp("(^|\\.)"+bt.join("\\.(?:.*\\.)?")+"(\\.|$)");if(bD||!bw){e.preventDefault();e.stopPropagation()}if(!bw){b.each(b.cache,function(){var bF=b.expando,bE=this[bF];if(bE&&bE.events&&bE.events[bB]){b.event.trigger(e,by,bE.handle.elem)}});return}if(bw.nodeType===3||bw.nodeType===8){return}e.result=K;e.target=bw;by=by!=null?b.makeArray(by):[];by.unshift(e);var bC=bw,bu=bB.indexOf(":")<0?"on"+bB:"";do{var bz=b._data(bC,"handle");e.currentTarget=bC;if(bz){bz.apply(bC,by)}if(bu&&b.acceptData(bC)&&bC[bu]&&bC[bu].apply(bC,by)===false){e.result=false;e.preventDefault()}bC=bC.parentNode||bC.ownerDocument||bC===e.target.ownerDocument&&a9}while(bC&&!e.isPropagationStopped());if(!e.isDefaultPrevented()){var bv,bA=b.event.special[bB]||{};if((!bA._default||bA._default.call(bw.ownerDocument,e)===false)&&!(bB==="click"&&b.nodeName(bw,"a"))&&b.acceptData(bw)){try{if(bu&&bw[bB]){bv=bw[bu];if(bv){bw[bu]=null}b.event.triggered=bB;bw[bB]()}}catch(bx){}if(bv){bw[bu]=bv}b.event.triggered=K}}return e.result},handle:function(by){by=b.event.fix(by||a9.event);var bs=((b._data(this,"events")||{})[by.type]||[]).slice(0),bx=!by.exclusive&&!by.namespace,bv=Array.prototype.slice.call(arguments,0);bv[0]=by;by.currentTarget=this;for(var bu=0,e=bs.length;bu-1?b.map(bs.options,function(bu){return bu.selected}).join("-"):""}else{if(b.nodeName(bs,"select")){bt=bs.selectedIndex}}}return bt},Z=function Z(bu){var bs=bu.target,bt,bv;if(!bb.test(bs.nodeName)||bs.readOnly){return}bt=b._data(bs,"_change_data");bv=k(bs);if(bu.type!=="focusout"||bs.type!=="radio"){b._data(bs,"_change_data",bv)}if(bt===K||bv===bt){return}if(bt!=null||bv){bu.type="change";bu.liveFired=K;b.event.trigger(bu,arguments[1],bs)}};b.event.special.change={filters:{focusout:Z,beforedeactivate:Z,click:function(bu){var bt=bu.target,bs=b.nodeName(bt,"input")?bt.type:"";if(bs==="radio"||bs==="checkbox"||b.nodeName(bt,"select")){Z.call(this,bu)}},keydown:function(bu){var bt=bu.target,bs=b.nodeName(bt,"input")?bt.type:"";if((bu.keyCode===13&&!b.nodeName(bt,"textarea"))||(bu.keyCode===32&&(bs==="checkbox"||bs==="radio"))||bs==="select-multiple"){Z.call(this,bu)}},beforeactivate:function(bt){var bs=bt.target;b._data(bs,"_change_data",k(bs))}},setup:function(bt,bs){if(this.type==="file"){return false}for(var e in bk){b.event.add(this,e+".specialChange",bk[e])}return bb.test(this.nodeName)},teardown:function(e){b.event.remove(this,".specialChange");return bb.test(this.nodeName)}};bk=b.event.special.change.filters;bk.focus=bk.beforeactivate}function aT(bs,bu,e){var bt=b.extend({},e[0]);bt.type=bs;bt.originalEvent={};bt.liveFired=K;b.event.handle.call(bu,bt);if(bt.isDefaultPrevented()){e[0].preventDefault()}}if(!b.support.focusinBubbles){b.each({focus:"focusin",blur:"focusout"},function(bu,e){var bs=0;b.event.special[e]={setup:function(){if(bs++===0){aq.addEventListener(bu,bt,true)}},teardown:function(){if(--bs===0){aq.removeEventListener(bu,bt,true)}}};function bt(bv){var bw=b.event.fix(bv);bw.type=e;bw.originalEvent={};b.event.trigger(bw,null,bw.target);if(bw.isDefaultPrevented()){bv.preventDefault()}}})}b.each(["bind","one"],function(bs,e){b.fn[e]=function(by,bz,bx){var bw;if(typeof by==="object"){for(var bv in by){this[e](bv,bz,by[bv],bx)}return this}if(arguments.length===2||bz===false){bx=bz;bz=K}if(e==="one"){bw=function(bA){b(this).unbind(bA,bw);return bx.apply(this,arguments)};bw.guid=bx.guid||b.guid++}else{bw=bx}if(by==="unload"&&e!=="one"){this.one(by,bz,bx)}else{for(var bu=0,bt=this.length;bubu){break}bC.currentTarget=bw.elem;bC.data=bw.handleObj.data;bC.handleObj=bw.handleObj;bG=bw.handleObj.origHandler.apply(bw.elem,arguments);if(bG===false||bC.isPropagationStopped()){bu=bw.level;if(bG===false){bz=false}if(bC.isImmediatePropagationStopped()){break}}}return bz}function p(bs,e){return(bs&&bs!=="*"?bs+".":"")+e.replace(N,"`").replace(bf,"&")}b.each(("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error").split(" "),function(bs,e){b.fn[e]=function(bu,bt){if(bt==null){bt=bu;bu=null}return arguments.length>0?this.bind(e,bu,bt):this.trigger(e)};if(b.attrFn){b.attrFn[e]=true}}); /* * Sizzle CSS Selector Engine * Copyright 2011, The Dojo Foundation * Released under the MIT, BSD, and GPL Licenses. * More information: http://sizzlejs.com/ */ (function(){var bC=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,bD=0,bG=Object.prototype.toString,bx=false,bw=true,bE=/\\/g,bK=/\W/;[0,0].sort(function(){bw=false;return 0});var bu=function(bP,e,bS,bT){bS=bS||[];e=e||aq;var bV=e;if(e.nodeType!==1&&e.nodeType!==9){return[]}if(!bP||typeof bP!=="string"){return bS}var bM,bX,b0,bL,bW,bZ,bY,bR,bO=true,bN=bu.isXML(e),bQ=[],bU=bP;do{bC.exec("");bM=bC.exec(bU);if(bM){bU=bM[3];bQ.push(bM[1]);if(bM[2]){bL=bM[3];break}}}while(bM);if(bQ.length>1&&by.exec(bP)){if(bQ.length===2&&bz.relative[bQ[0]]){bX=bH(bQ[0]+bQ[1],e)}else{bX=bz.relative[bQ[0]]?[e]:bu(bQ.shift(),e);while(bQ.length){bP=bQ.shift();if(bz.relative[bP]){bP+=bQ.shift()}bX=bH(bP,bX)}}}else{if(!bT&&bQ.length>1&&e.nodeType===9&&!bN&&bz.match.ID.test(bQ[0])&&!bz.match.ID.test(bQ[bQ.length-1])){bW=bu.find(bQ.shift(),e,bN);e=bW.expr?bu.filter(bW.expr,bW.set)[0]:bW.set[0]}if(e){bW=bT?{expr:bQ.pop(),set:bA(bT)}:bu.find(bQ.pop(),bQ.length===1&&(bQ[0]==="~"||bQ[0]==="+")&&e.parentNode?e.parentNode:e,bN);bX=bW.expr?bu.filter(bW.expr,bW.set):bW.set;if(bQ.length>0){b0=bA(bX)}else{bO=false}while(bQ.length){bZ=bQ.pop();bY=bZ;if(!bz.relative[bZ]){bZ=""}else{bY=bQ.pop()}if(bY==null){bY=e}bz.relative[bZ](b0,bY,bN)}}else{b0=bQ=[]}}if(!b0){b0=bX}if(!b0){bu.error(bZ||bP)}if(bG.call(b0)==="[object Array]"){if(!bO){bS.push.apply(bS,b0)}else{if(e&&e.nodeType===1){for(bR=0;b0[bR]!=null;bR++){if(b0[bR]&&(b0[bR]===true||b0[bR].nodeType===1&&bu.contains(e,b0[bR]))){bS.push(bX[bR])}}}else{for(bR=0;b0[bR]!=null;bR++){if(b0[bR]&&b0[bR].nodeType===1){bS.push(bX[bR])}}}}}else{bA(b0,bS)}if(bL){bu(bL,bV,bS,bT);bu.uniqueSort(bS)}return bS};bu.uniqueSort=function(bL){if(bF){bx=bw;bL.sort(bF);if(bx){for(var e=1;e0};bu.find=function(bR,e,bS){var bQ;if(!bR){return[]}for(var bN=0,bM=bz.order.length;bN":function(bQ,bL){var bP,bO=typeof bL==="string",bM=0,e=bQ.length;if(bO&&!bK.test(bL)){bL=bL.toLowerCase();for(;bM=0)){if(!bM){e.push(bP)}}else{if(bM){bL[bO]=false}}}}return false},ID:function(e){return e[1].replace(bE,"")},TAG:function(bL,e){return bL[1].replace(bE,"").toLowerCase()},CHILD:function(e){if(e[1]==="nth"){if(!e[2]){bu.error(e[0])}e[2]=e[2].replace(/^\+|\s*/g,"");var bL=/(-?)(\d*)(?:n([+\-]?\d*))?/.exec(e[2]==="even"&&"2n"||e[2]==="odd"&&"2n+1"||!/\D/.test(e[2])&&"0n+"+e[2]||e[2]);e[2]=(bL[1]+(bL[2]||1))-0;e[3]=bL[3]-0}else{if(e[2]){bu.error(e[0])}}e[0]=bD++;return e},ATTR:function(bO,bL,bM,e,bP,bQ){var bN=bO[1]=bO[1].replace(bE,"");if(!bQ&&bz.attrMap[bN]){bO[1]=bz.attrMap[bN]}bO[4]=(bO[4]||bO[5]||"").replace(bE,"");if(bO[2]==="~="){bO[4]=" "+bO[4]+" "}return bO},PSEUDO:function(bO,bL,bM,e,bP){if(bO[1]==="not"){if((bC.exec(bO[3])||"").length>1||/^\w/.test(bO[3])){bO[3]=bu(bO[3],null,null,bL)}else{var bN=bu.filter(bO[3],bL,bM,true^bP);if(!bM){e.push.apply(e,bN)}return false}}else{if(bz.match.POS.test(bO[0])||bz.match.CHILD.test(bO[0])){return true}}return bO},POS:function(e){e.unshift(true);return e}},filters:{enabled:function(e){return e.disabled===false&&e.type!=="hidden"},disabled:function(e){return e.disabled===true},checked:function(e){return e.checked===true},selected:function(e){if(e.parentNode){e.parentNode.selectedIndex}return e.selected===true},parent:function(e){return !!e.firstChild},empty:function(e){return !e.firstChild},has:function(bM,bL,e){return !!bu(e[3],bM).length},header:function(e){return(/h\d/i).test(e.nodeName)},text:function(bM){var e=bM.getAttribute("type"),bL=bM.type;return bM.nodeName.toLowerCase()==="input"&&"text"===bL&&(e===bL||e===null)},radio:function(e){return e.nodeName.toLowerCase()==="input"&&"radio"===e.type},checkbox:function(e){return e.nodeName.toLowerCase()==="input"&&"checkbox"===e.type},file:function(e){return e.nodeName.toLowerCase()==="input"&&"file"===e.type},password:function(e){return e.nodeName.toLowerCase()==="input"&&"password"===e.type},submit:function(bL){var e=bL.nodeName.toLowerCase();return(e==="input"||e==="button")&&"submit"===bL.type},image:function(e){return e.nodeName.toLowerCase()==="input"&&"image"===e.type},reset:function(bL){var e=bL.nodeName.toLowerCase();return(e==="input"||e==="button")&&"reset"===bL.type},button:function(bL){var e=bL.nodeName.toLowerCase();return e==="input"&&"button"===bL.type||e==="button"},input:function(e){return(/input|select|textarea|button/i).test(e.nodeName)},focus:function(e){return e===e.ownerDocument.activeElement}},setFilters:{first:function(bL,e){return e===0},last:function(bM,bL,e,bN){return bL===bN.length-1},even:function(bL,e){return e%2===0},odd:function(bL,e){return e%2===1},lt:function(bM,bL,e){return bLe[3]-0},nth:function(bM,bL,e){return e[3]-0===bL},eq:function(bM,bL,e){return e[3]-0===bL}},filter:{PSEUDO:function(bM,bR,bQ,bS){var e=bR[1],bL=bz.filters[e];if(bL){return bL(bM,bQ,bR,bS)}else{if(e==="contains"){return(bM.textContent||bM.innerText||bu.getText([bM])||"").indexOf(bR[3])>=0}else{if(e==="not"){var bN=bR[3];for(var bP=0,bO=bN.length;bP=0)}}},ID:function(bL,e){return bL.nodeType===1&&bL.getAttribute("id")===e},TAG:function(bL,e){return(e==="*"&&bL.nodeType===1)||bL.nodeName.toLowerCase()===e},CLASS:function(bL,e){return(" "+(bL.className||bL.getAttribute("class"))+" ").indexOf(e)>-1},ATTR:function(bP,bN){var bM=bN[1],e=bz.attrHandle[bM]?bz.attrHandle[bM](bP):bP[bM]!=null?bP[bM]:bP.getAttribute(bM),bQ=e+"",bO=bN[2],bL=bN[4];return e==null?bO==="!=":bO==="="?bQ===bL:bO==="*="?bQ.indexOf(bL)>=0:bO==="~="?(" "+bQ+" ").indexOf(bL)>=0:!bL?bQ&&e!==false:bO==="!="?bQ!==bL:bO==="^="?bQ.indexOf(bL)===0:bO==="$="?bQ.substr(bQ.length-bL.length)===bL:bO==="|="?bQ===bL||bQ.substr(0,bL.length+1)===bL+"-":false},POS:function(bO,bL,bM,bP){var e=bL[2],bN=bz.setFilters[e];if(bN){return bN(bO,bM,bL,bP)}}}};var by=bz.match.POS,bt=function(bL,e){return"\\"+(e-0+1)};for(var bv in bz.match){bz.match[bv]=new RegExp(bz.match[bv].source+(/(?![^\[]*\])(?![^\(]*\))/.source));bz.leftMatch[bv]=new RegExp(/(^(?:.|\r|\n)*?)/.source+bz.match[bv].source.replace(/\\(\d+)/g,bt))}var bA=function(bL,e){bL=Array.prototype.slice.call(bL,0);if(e){e.push.apply(e,bL);return e}return bL};try{Array.prototype.slice.call(aq.documentElement.childNodes,0)[0].nodeType}catch(bJ){bA=function(bO,bN){var bM=0,bL=bN||[];if(bG.call(bO)==="[object Array]"){Array.prototype.push.apply(bL,bO)}else{if(typeof bO.length==="number"){for(var e=bO.length;bM";e.insertBefore(bL,e.firstChild);if(aq.getElementById(bM)){bz.find.ID=function(bO,bP,bQ){if(typeof bP.getElementById!=="undefined"&&!bQ){var bN=bP.getElementById(bO[1]);return bN?bN.id===bO[1]||typeof bN.getAttributeNode!=="undefined"&&bN.getAttributeNode("id").nodeValue===bO[1]?[bN]:K:[]}};bz.filter.ID=function(bP,bN){var bO=typeof bP.getAttributeNode!=="undefined"&&bP.getAttributeNode("id");return bP.nodeType===1&&bO&&bO.nodeValue===bN}}e.removeChild(bL);e=bL=null})();(function(){var e=aq.createElement("div");e.appendChild(aq.createComment(""));if(e.getElementsByTagName("*").length>0){bz.find.TAG=function(bL,bP){var bO=bP.getElementsByTagName(bL[1]);if(bL[1]==="*"){var bN=[];for(var bM=0;bO[bM];bM++){if(bO[bM].nodeType===1){bN.push(bO[bM])}}bO=bN}return bO}}e.innerHTML="";if(e.firstChild&&typeof e.firstChild.getAttribute!=="undefined"&&e.firstChild.getAttribute("href")!=="#"){bz.attrHandle.href=function(bL){return bL.getAttribute("href",2)}}e=null})();if(aq.querySelectorAll){(function(){var e=bu,bN=aq.createElement("div"),bM="__sizzle__";bN.innerHTML="

";if(bN.querySelectorAll&&bN.querySelectorAll(".TEST").length===0){return}bu=function(bY,bP,bT,bX){bP=bP||aq;if(!bX&&!bu.isXML(bP)){var bW=/^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(bY);if(bW&&(bP.nodeType===1||bP.nodeType===9)){if(bW[1]){return bA(bP.getElementsByTagName(bY),bT)}else{if(bW[2]&&bz.find.CLASS&&bP.getElementsByClassName){return bA(bP.getElementsByClassName(bW[2]),bT)}}}if(bP.nodeType===9){if(bY==="body"&&bP.body){return bA([bP.body],bT)}else{if(bW&&bW[3]){var bS=bP.getElementById(bW[3]);if(bS&&bS.parentNode){if(bS.id===bW[3]){return bA([bS],bT)}}else{return bA([],bT)}}}try{return bA(bP.querySelectorAll(bY),bT)}catch(bU){}}else{if(bP.nodeType===1&&bP.nodeName.toLowerCase()!=="object"){var bQ=bP,bR=bP.getAttribute("id"),bO=bR||bM,b0=bP.parentNode,bZ=/^\s*[+~]/.test(bY);if(!bR){bP.setAttribute("id",bO)}else{bO=bO.replace(/'/g,"\\$&")}if(bZ&&b0){bP=bP.parentNode}try{if(!bZ||b0){return bA(bP.querySelectorAll("[id='"+bO+"'] "+bY),bT)}}catch(bV){}finally{if(!bR){bQ.removeAttribute("id")}}}}}return e(bY,bP,bT,bX)};for(var bL in e){bu[bL]=e[bL]}bN=null})()}(function(){var e=aq.documentElement,bM=e.matchesSelector||e.mozMatchesSelector||e.webkitMatchesSelector||e.msMatchesSelector;if(bM){var bO=!bM.call(aq.createElement("div"),"div"),bL=false;try{bM.call(aq.documentElement,"[test!='']:sizzle")}catch(bN){bL=true}bu.matchesSelector=function(bQ,bS){bS=bS.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!bu.isXML(bQ)){try{if(bL||!bz.match.PSEUDO.test(bS)&&!/!=/.test(bS)){var bP=bM.call(bQ,bS);if(bP||!bO||bQ.document&&bQ.document.nodeType!==11){return bP}}}catch(bR){}}return bu(bS,null,null,[bQ]).length>0}}})();(function(){var e=aq.createElement("div");e.innerHTML="
";if(!e.getElementsByClassName||e.getElementsByClassName("e").length===0){return}e.lastChild.className="e";if(e.getElementsByClassName("e").length===1){return}bz.order.splice(1,0,"CLASS");bz.find.CLASS=function(bL,bM,bN){if(typeof bM.getElementsByClassName!=="undefined"&&!bN){return bM.getElementsByClassName(bL[1])}};e=null})();function bs(bL,bQ,bP,bT,bR,bS){for(var bN=0,bM=bT.length;bN0){bO=e;break}}}e=e[bL]}bT[bN]=bO}}}if(aq.documentElement.contains){bu.contains=function(bL,e){return bL!==e&&(bL.contains?bL.contains(e):true)}}else{if(aq.documentElement.compareDocumentPosition){bu.contains=function(bL,e){return !!(bL.compareDocumentPosition(e)&16)}}else{bu.contains=function(){return false}}}bu.isXML=function(e){var bL=(e?e.ownerDocument||e:0).documentElement;return bL?bL.nodeName!=="HTML":false};var bH=function(e,bR){var bP,bN=[],bO="",bM=bR.nodeType?[bR]:bR;while((bP=bz.match.PSEUDO.exec(e))){bO+=bP[0];e=e.replace(bz.match.PSEUDO,"")}e=bz.relative[e]?e+"*":e;for(var bQ=0,bL=bM.length;bQ0){for(by=bx;by0:this.filter(e).length>0)},closest:function(bB,bs){var by=[],bv,bt,bA=this[0];if(b.isArray(bB)){var bx,bu,bw={},e=1;if(bA&&bB.length){for(bv=0,bt=bB.length;bv-1:b(bA).is(bx)){by.push({selector:bu,elem:bA,level:e})}}bA=bA.parentNode;e++}}return by}var bz=H.test(bB)||typeof bB!=="string"?b(bB,bs||this.context):0;for(bv=0,bt=this.length;bv-1:b.find.matchesSelector(bA,bB)){by.push(bA);break}else{bA=bA.parentNode;if(!bA||!bA.ownerDocument||bA===bs||bA.nodeType===11){break}}}}by=by.length>1?b.unique(by):by;return this.pushStack(by,"closest",bB)},index:function(e){if(!e||typeof e==="string"){return b.inArray(this[0],e?b(e):this.parent().children())}return b.inArray(e.jquery?e[0]:e,this)},add:function(e,bs){var bu=typeof e==="string"?b(e,bs):b.makeArray(e&&e.nodeType?[e]:e),bt=b.merge(this.get(),bu);return this.pushStack(C(bu[0])||C(bt[0])?bt:b.unique(bt))},andSelf:function(){return this.add(this.prevObject)}});function C(e){return !e||!e.parentNode||e.parentNode.nodeType===11}b.each({parent:function(bs){var e=bs.parentNode;return e&&e.nodeType!==11?e:null},parents:function(e){return b.dir(e,"parentNode")},parentsUntil:function(bs,e,bt){return b.dir(bs,"parentNode",bt)},next:function(e){return b.nth(e,2,"nextSibling")},prev:function(e){return b.nth(e,2,"previousSibling")},nextAll:function(e){return b.dir(e,"nextSibling")},prevAll:function(e){return b.dir(e,"previousSibling")},nextUntil:function(bs,e,bt){return b.dir(bs,"nextSibling",bt)},prevUntil:function(bs,e,bt){return b.dir(bs,"previousSibling",bt)},siblings:function(e){return b.sibling(e.parentNode.firstChild,e)},children:function(e){return b.sibling(e.firstChild)},contents:function(e){return b.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:b.makeArray(e.childNodes)}},function(e,bs){b.fn[e]=function(bw,bt){var bv=b.map(this,bs,bw),bu=P.call(arguments);if(!Y.test(e)){bt=bw}if(bt&&typeof bt==="string"){bv=b.filter(bt,bv)}bv=this.length>1&&!au[e]?b.unique(bv):bv;if((this.length>1||a7.test(bt))&&am.test(e)){bv=bv.reverse()}return this.pushStack(bv,e,bu.join(","))}});b.extend({filter:function(bt,e,bs){if(bs){bt=":not("+bt+")"}return e.length===1?b.find.matchesSelector(e[0],bt)?[e[0]]:[]:b.find.matches(bt,e)},dir:function(bt,bs,bv){var e=[],bu=bt[bs];while(bu&&bu.nodeType!==9&&(bv===K||bu.nodeType!==1||!b(bu).is(bv))){if(bu.nodeType===1){e.push(bu)}bu=bu[bs]}return e},nth:function(bv,e,bt,bu){e=e||1;var bs=0;for(;bv;bv=bv[bt]){if(bv.nodeType===1&&++bs===e){break}}return bv},sibling:function(bt,bs){var e=[];for(;bt;bt=bt.nextSibling){if(bt.nodeType===1&&bt!==bs){e.push(bt)}}return e}});function aB(bu,bt,e){bt=bt||0;if(b.isFunction(bt)){return b.grep(bu,function(bw,bv){var bx=!!bt.call(bw,bv,bw);return bx===e})}else{if(bt.nodeType){return b.grep(bu,function(bw,bv){return(bw===bt)===e})}else{if(typeof bt==="string"){var bs=b.grep(bu,function(bv){return bv.nodeType===1});if(bn.test(bt)){return b.filter(bt,bs,!e)}else{bt=b.filter(bt,bs)}}}}return b.grep(bu,function(bw,bv){return(b.inArray(bw,bt)>=0)===e})}var ad=/ jQuery\d+="(?:\d+|null)"/g,an=/^\s+/,S=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,d=/<([\w:]+)/,w=/",""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]};at.optgroup=at.option;at.tbody=at.tfoot=at.colgroup=at.caption=at.thead;at.th=at.td;if(!b.support.htmlSerialize){at._default=[1,"div
","
"]}b.fn.extend({text:function(e){if(b.isFunction(e)){return this.each(function(bt){var bs=b(this);bs.text(e.call(this,bt,bs.text()))})}if(typeof e!=="object"&&e!==K){return this.empty().append((this[0]&&this[0].ownerDocument||aq).createTextNode(e))}return b.text(this)},wrapAll:function(e){if(b.isFunction(e)){return this.each(function(bt){b(this).wrapAll(e.call(this,bt))})}if(this[0]){var bs=b(e,this[0].ownerDocument).eq(0).clone(true);if(this[0].parentNode){bs.insertBefore(this[0])}bs.map(function(){var bt=this;while(bt.firstChild&&bt.firstChild.nodeType===1){bt=bt.firstChild}return bt}).append(this)}return this},wrapInner:function(e){if(b.isFunction(e)){return this.each(function(bs){b(this).wrapInner(e.call(this,bs))})}return this.each(function(){var bs=b(this),bt=bs.contents();if(bt.length){bt.wrapAll(e)}else{bs.append(e)}})},wrap:function(e){return this.each(function(){b(this).wrapAll(e)})},unwrap:function(){return this.parent().each(function(){if(!b.nodeName(this,"body")){b(this).replaceWith(this.childNodes)}}).end()},append:function(){return this.domManip(arguments,true,function(e){if(this.nodeType===1){this.appendChild(e)}})},prepend:function(){return this.domManip(arguments,true,function(e){if(this.nodeType===1){this.insertBefore(e,this.firstChild)}})},before:function(){if(this[0]&&this[0].parentNode){return this.domManip(arguments,false,function(bs){this.parentNode.insertBefore(bs,this)})}else{if(arguments.length){var e=b(arguments[0]);e.push.apply(e,this.toArray());return this.pushStack(e,"before",arguments)}}},after:function(){if(this[0]&&this[0].parentNode){return this.domManip(arguments,false,function(bs){this.parentNode.insertBefore(bs,this.nextSibling)})}else{if(arguments.length){var e=this.pushStack(this,"after",arguments);e.push.apply(e,b(arguments[0]).toArray());return e}}},remove:function(e,bu){for(var bs=0,bt;(bt=this[bs])!=null;bs++){if(!e||b.filter(e,[bt]).length){if(!bu&&bt.nodeType===1){b.cleanData(bt.getElementsByTagName("*"));b.cleanData([bt])}if(bt.parentNode){bt.parentNode.removeChild(bt)}}}return this},empty:function(){for(var e=0,bs;(bs=this[e])!=null;e++){if(bs.nodeType===1){b.cleanData(bs.getElementsByTagName("*"))}while(bs.firstChild){bs.removeChild(bs.firstChild)}}return this},clone:function(bs,e){bs=bs==null?false:bs;e=e==null?bs:e;return this.map(function(){return b.clone(this,bs,e)})},html:function(bu){if(bu===K){return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(ad,""):null}else{if(typeof bu==="string"&&!O.test(bu)&&(b.support.leadingWhitespace||!an.test(bu))&&!at[(d.exec(bu)||["",""])[1].toLowerCase()]){bu=bu.replace(S,"<$1>");try{for(var bt=0,bs=this.length;bt1&&bt0?this.clone(true):this).get();b(bz[bx])[bs](bv);bw=bw.concat(bv)}return this.pushStack(bw,e,bz.selector)}}});function bc(e){if("getElementsByTagName" in e){return e.getElementsByTagName("*")}else{if("querySelectorAll" in e){return e.querySelectorAll("*")}else{return[]}}}function av(e){if(e.type==="checkbox"||e.type==="radio"){e.defaultChecked=e.checked}}function E(e){if(b.nodeName(e,"input")){av(e)}else{if("getElementsByTagName" in e){b.grep(e.getElementsByTagName("input"),av)}}}b.extend({clone:function(bv,bx,bt){var bw=bv.cloneNode(true),e,bs,bu;if((!b.support.noCloneEvent||!b.support.noCloneChecked)&&(bv.nodeType===1||bv.nodeType===11)&&!b.isXMLDoc(bv)){ae(bv,bw);e=bc(bv);bs=bc(bw);for(bu=0;e[bu];++bu){ae(e[bu],bs[bu])}}if(bx){t(bv,bw);if(bt){e=bc(bv);bs=bc(bw);for(bu=0;e[bu];++bu){t(e[bu],bs[bu])}}}e=bs=null;return bw},clean:function(bt,bv,bE,bx){var bC;bv=bv||aq;if(typeof bv.createElement==="undefined"){bv=bv.ownerDocument||bv[0]&&bv[0].ownerDocument||aq}var bF=[],by;for(var bB=0,bw;(bw=bt[bB])!=null;bB++){if(typeof bw==="number"){bw+=""}if(!bw){continue}if(typeof bw==="string"){if(!V.test(bw)){bw=bv.createTextNode(bw)}else{bw=bw.replace(S,"<$1>");var bH=(d.exec(bw)||["",""])[1].toLowerCase(),bu=at[bH]||at._default,bA=bu[0],bs=bv.createElement("div");bs.innerHTML=bu[1]+bw+bu[2];while(bA--){bs=bs.lastChild}if(!b.support.tbody){var e=w.test(bw),bz=bH==="table"&&!e?bs.firstChild&&bs.firstChild.childNodes:bu[1]===""&&!e?bs.childNodes:[];for(by=bz.length-1;by>=0;--by){if(b.nodeName(bz[by],"tbody")&&!bz[by].childNodes.length){bz[by].parentNode.removeChild(bz[by])}}}if(!b.support.leadingWhitespace&&an.test(bw)){bs.insertBefore(bv.createTextNode(an.exec(bw)[0]),bs.firstChild)}bw=bs.childNodes}}var bD;if(!b.support.appendChecked){if(bw[0]&&typeof(bD=bw.length)==="number"){for(by=0;by=0){return bu+"px"}}else{return bu}}}});if(!b.support.opacity){b.cssHooks.opacity={get:function(bs,e){return ap.test((e&&bs.currentStyle?bs.currentStyle.filter:bs.style.filter)||"")?(parseFloat(RegExp.$1)/100)+"":e?"1":""},set:function(bv,bw){var bu=bv.style,bs=bv.currentStyle;bu.zoom=1;var e=b.isNaN(bw)?"":"alpha(opacity="+bw*100+")",bt=bs&&bs.filter||bu.filter||"";bu.filter=ah.test(bt)?bt.replace(ah,e):bt+" "+e}}}b(function(){if(!b.support.reliableMarginRight){b.cssHooks.marginRight={get:function(bt,bs){var e;b.swap(bt,{display:"inline-block"},function(){if(bs){e=W(bt,"margin-right","marginRight")}else{e=bt.style.marginRight}});return e}}}});if(aq.defaultView&&aq.defaultView.getComputedStyle){aE=function(bv,bt){var bs,bu,e;bt=bt.replace(z,"-$1").toLowerCase();if(!(bu=bv.ownerDocument.defaultView)){return K}if((e=bu.getComputedStyle(bv,null))){bs=e.getPropertyValue(bt);if(bs===""&&!b.contains(bv.ownerDocument.documentElement,bv)){bs=b.style(bv,bt)}}return bs}}if(aq.documentElement.currentStyle){aS=function(bv,bt){var bw,bs=bv.currentStyle&&bv.currentStyle[bt],e=bv.runtimeStyle&&bv.runtimeStyle[bt],bu=bv.style;if(!ba.test(bs)&&bl.test(bs)){bw=bu.left;if(e){bv.runtimeStyle.left=bv.currentStyle.left}bu.left=bt==="fontSize"?"1em":(bs||0);bs=bu.pixelLeft+"px";bu.left=bw;if(e){bv.runtimeStyle.left=e}}return bs===""?"auto":bs}}W=aE||aS;function o(bt,bs,e){var bv=bs==="width"?bt.offsetWidth:bt.offsetHeight,bu=bs==="width"?aj:aY;if(bv>0){if(e!=="border"){b.each(bu,function(){if(!e){bv-=parseFloat(b.css(bt,"padding"+this))||0}if(e==="margin"){bv+=parseFloat(b.css(bt,e+this))||0}else{bv-=parseFloat(b.css(bt,"border"+this+"Width"))||0}})}return bv+"px"}bv=W(bt,bs,bs);if(bv<0||bv==null){bv=bt.style[bs]||0}bv=parseFloat(bv)||0;if(e){b.each(bu,function(){bv+=parseFloat(b.css(bt,"padding"+this))||0;if(e!=="padding"){bv+=parseFloat(b.css(bt,"border"+this+"Width"))||0}if(e==="margin"){bv+=parseFloat(b.css(bt,e+this))||0}})}return bv+"px"}if(b.expr&&b.expr.filters){b.expr.filters.hidden=function(bt){var bs=bt.offsetWidth,e=bt.offsetHeight;return(bs===0&&e===0)||(!b.support.reliableHiddenOffsets&&(bt.style.display||b.css(bt,"display"))==="none")};b.expr.filters.visible=function(e){return !b.expr.filters.hidden(e)}}var j=/%20/g,al=/\[\]$/,bq=/\r?\n/g,bo=/#.*$/,az=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,aV=/^(?:color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,aI=/^(?:about|app|app\-storage|.+\-extension|file|widget):$/,aL=/^(?:GET|HEAD)$/,c=/^\/\//,L=/\?/,a3=/)<[^<]*)*<\/script>/gi,q=/^(?:select|textarea)/i,h=/\s+/,bp=/([?&])_=[^&]*/,J=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/,A=b.fn.load,X={},r={},aA,s;try{aA=bi.href}catch(ar){aA=aq.createElement("a");aA.href="";aA=aA.href}s=J.exec(aA.toLowerCase())||[];function f(e){return function(bv,bx){if(typeof bv!=="string"){bx=bv;bv="*"}if(b.isFunction(bx)){var bu=bv.toLowerCase().split(h),bt=0,bw=bu.length,bs,by,bz;for(;bt=0){var e=bt.slice(bv,bt.length);bt=bt.slice(0,bv)}var bu="GET";if(bw){if(b.isFunction(bw)){bx=bw;bw=K}else{if(typeof bw==="object"){bw=b.param(bw,b.ajaxSettings.traditional);bu="POST"}}}var bs=this;b.ajax({url:bt,type:bu,dataType:"html",data:bw,complete:function(bz,by,bA){bA=bz.responseText;if(bz.isResolved()){bz.done(function(bB){bA=bB});bs.html(e?b("
").append(bA.replace(a3,"")).find(e):bA)}if(bx){bs.each(bx,[bA,by,bz])}}});return this},serialize:function(){return b.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?b.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||q.test(this.nodeName)||aV.test(this.type))}).map(function(e,bs){var bt=b(this).val();return bt==null?null:b.isArray(bt)?b.map(bt,function(bv,bu){return{name:bs.name,value:bv.replace(bq,"\r\n")}}):{name:bs.name,value:bt.replace(bq,"\r\n")}}).get()}});b.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,bs){b.fn[bs]=function(bt){return this.bind(bs,bt)}});b.each(["get","post"],function(e,bs){b[bs]=function(bt,bv,bw,bu){if(b.isFunction(bv)){bu=bu||bw;bw=bv;bv=K}return b.ajax({type:bs,url:bt,data:bv,success:bw,dataType:bu})}});b.extend({getScript:function(e,bs){return b.get(e,K,bs,"script")},getJSON:function(e,bs,bt){return b.get(e,bs,bt,"json")},ajaxSetup:function(bt,e){if(!e){e=bt;bt=b.extend(true,b.ajaxSettings,e)}else{b.extend(true,bt,b.ajaxSettings,e)}for(var bs in {context:1,url:1}){if(bs in e){bt[bs]=e[bs]}else{if(bs in b.ajaxSettings){bt[bs]=b.ajaxSettings[bs]}}}return bt},ajaxSettings:{url:aA,isLocal:aI.test(s[1]),global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":"*/*"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a9.String,"text html":true,"text json":b.parseJSON,"text xml":b.parseXML}},ajaxPrefilter:f(X),ajaxTransport:f(r),ajax:function(bw,bu){if(typeof bw==="object"){bu=bw;bw=K}bu=bu||{};var bA=b.ajaxSetup({},bu),bP=bA.context||bA,bD=bP!==bA&&(bP.nodeType||bP instanceof b)?b(bP):b.event,bO=b.Deferred(),bK=b._Deferred(),by=bA.statusCode||{},bz,bE={},bL={},bN,bv,bI,bB,bF,bx=0,bt,bH,bG={readyState:0,setRequestHeader:function(bQ,bR){if(!bx){var e=bQ.toLowerCase();bQ=bL[e]=bL[e]||bQ;bE[bQ]=bR}return this},getAllResponseHeaders:function(){return bx===2?bN:null},getResponseHeader:function(bQ){var e;if(bx===2){if(!bv){bv={};while((e=az.exec(bN))){bv[e[1].toLowerCase()]=e[2]}}e=bv[bQ.toLowerCase()]}return e===K?null:e},overrideMimeType:function(e){if(!bx){bA.mimeType=e}return this},abort:function(e){e=e||"abort";if(bI){bI.abort(e)}bC(0,e);return this}};function bC(bV,bT,bW,bS){if(bx===2){return}bx=2;if(bB){clearTimeout(bB)}bI=K;bN=bS||"";bG.readyState=bV?4:0;var bQ,b0,bZ,bU=bW?bg(bA,bG,bW):K,bR,bY;if(bV>=200&&bV<300||bV===304){if(bA.ifModified){if((bR=bG.getResponseHeader("Last-Modified"))){b.lastModified[bz]=bR}if((bY=bG.getResponseHeader("Etag"))){b.etag[bz]=bY}}if(bV===304){bT="notmodified";bQ=true}else{try{b0=F(bA,bU);bT="success";bQ=true}catch(bX){bT="parsererror";bZ=bX}}}else{bZ=bT;if(!bT||bV){bT="error";if(bV<0){bV=0}}}bG.status=bV;bG.statusText=bT;if(bQ){bO.resolveWith(bP,[b0,bT,bG])}else{bO.rejectWith(bP,[bG,bT,bZ])}bG.statusCode(by);by=K;if(bt){bD.trigger("ajax"+(bQ?"Success":"Error"),[bG,bA,bQ?b0:bZ])}bK.resolveWith(bP,[bG,bT]);if(bt){bD.trigger("ajaxComplete",[bG,bA]);if(!(--b.active)){b.event.trigger("ajaxStop")}}}bO.promise(bG);bG.success=bG.done;bG.error=bG.fail;bG.complete=bK.done;bG.statusCode=function(bQ){if(bQ){var e;if(bx<2){for(e in bQ){by[e]=[by[e],bQ[e]]}}else{e=bQ[bG.status];bG.then(e,e)}}return this};bA.url=((bw||bA.url)+"").replace(bo,"").replace(c,s[1]+"//");bA.dataTypes=b.trim(bA.dataType||"*").toLowerCase().split(h);if(bA.crossDomain==null){bF=J.exec(bA.url.toLowerCase());bA.crossDomain=!!(bF&&(bF[1]!=s[1]||bF[2]!=s[2]||(bF[3]||(bF[1]==="http:"?80:443))!=(s[3]||(s[1]==="http:"?80:443))))}if(bA.data&&bA.processData&&typeof bA.data!=="string"){bA.data=b.param(bA.data,bA.traditional)}aQ(X,bA,bu,bG);if(bx===2){return false}bt=bA.global;bA.type=bA.type.toUpperCase();bA.hasContent=!aL.test(bA.type);if(bt&&b.active++===0){b.event.trigger("ajaxStart")}if(!bA.hasContent){if(bA.data){bA.url+=(L.test(bA.url)?"&":"?")+bA.data}bz=bA.url;if(bA.cache===false){var bs=b.now(),bM=bA.url.replace(bp,"$1_="+bs);bA.url=bM+((bM===bA.url)?(L.test(bA.url)?"&":"?")+"_="+bs:"")}}if(bA.data&&bA.hasContent&&bA.contentType!==false||bu.contentType){bG.setRequestHeader("Content-Type",bA.contentType)}if(bA.ifModified){bz=bz||bA.url;if(b.lastModified[bz]){bG.setRequestHeader("If-Modified-Since",b.lastModified[bz])}if(b.etag[bz]){bG.setRequestHeader("If-None-Match",b.etag[bz])}}bG.setRequestHeader("Accept",bA.dataTypes[0]&&bA.accepts[bA.dataTypes[0]]?bA.accepts[bA.dataTypes[0]]+(bA.dataTypes[0]!=="*"?", */*; q=0.01":""):bA.accepts["*"]);for(bH in bA.headers){bG.setRequestHeader(bH,bA.headers[bH])}if(bA.beforeSend&&(bA.beforeSend.call(bP,bG,bA)===false||bx===2)){bG.abort();return false}for(bH in {success:1,error:1,complete:1}){bG[bH](bA[bH])}bI=aQ(r,bA,bu,bG);if(!bI){bC(-1,"No Transport")}else{bG.readyState=1;if(bt){bD.trigger("ajaxSend",[bG,bA])}if(bA.async&&bA.timeout>0){bB=setTimeout(function(){bG.abort("timeout")},bA.timeout)}try{bx=1;bI.send(bE,bC)}catch(bJ){if(status<2){bC(-1,bJ)}else{b.error(bJ)}}}return bG},param:function(e,bt){var bs=[],bv=function(bw,bx){bx=b.isFunction(bx)?bx():bx;bs[bs.length]=encodeURIComponent(bw)+"="+encodeURIComponent(bx)};if(bt===K){bt=b.ajaxSettings.traditional}if(b.isArray(e)||(e.jquery&&!b.isPlainObject(e))){b.each(e,function(){bv(this.name,this.value)})}else{for(var bu in e){v(bu,e[bu],bt,bv)}}return bs.join("&").replace(j,"+")}});function v(bt,bv,bs,bu){if(b.isArray(bv)){b.each(bv,function(bx,bw){if(bs||al.test(bt)){bu(bt,bw)}else{v(bt+"["+(typeof bw==="object"||b.isArray(bw)?bx:"")+"]",bw,bs,bu)}})}else{if(!bs&&bv!=null&&typeof bv==="object"){for(var e in bv){v(bt+"["+e+"]",bv[e],bs,bu)}}else{bu(bt,bv)}}}b.extend({active:0,lastModified:{},etag:{}});function bg(bA,bz,bw){var bs=bA.contents,by=bA.dataTypes,bt=bA.responseFields,bv,bx,bu,e;for(bx in bt){if(bx in bw){bz[bt[bx]]=bw[bx]}}while(by[0]==="*"){by.shift();if(bv===K){bv=bA.mimeType||bz.getResponseHeader("content-type")}}if(bv){for(bx in bs){if(bs[bx]&&bs[bx].test(bv)){by.unshift(bx);break}}}if(by[0] in bw){bu=by[0]}else{for(bx in bw){if(!by[0]||bA.converters[bx+" "+by[0]]){bu=bx;break}if(!e){e=bx}}bu=bu||e}if(bu){if(bu!==by[0]){by.unshift(bu)}return bw[bu]}}function F(bE,bw){if(bE.dataFilter){bw=bE.dataFilter(bw,bE.dataType)}var bA=bE.dataTypes,bD={},bx,bB,bt=bA.length,by,bz=bA[0],bu,bv,bC,bs,e;for(bx=1;bx=bs.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();bs.animatedProperties[this.prop]=true;for(bt in bs.animatedProperties){if(bs.animatedProperties[bt]!==true){e=false}}if(e){if(bs.overflow!=null&&!b.support.shrinkWrapBlocks){b.each(["","X","Y"],function(bz,bA){bw.style["overflow"+bA]=bs.overflow[bz]})}if(bs.hide){b(bw).hide()}if(bs.hide||bs.show){for(var bx in bs.animatedProperties){b.style(bw,bx,bs.orig[bx])}}bs.complete.call(bw)}return false}else{if(bs.duration==Infinity){this.now=bu}else{by=bu-this.startTime;this.state=by/bs.duration;this.pos=b.easing[bs.animatedProperties[this.prop]](this.state,by,0,1,bs.duration);this.now=this.start+((this.end-this.start)*this.pos)}this.update()}return true}};b.extend(b.fx,{tick:function(){for(var bs=b.timers,e=0;e").appendTo(e),bt=bs.css("display");bs.remove();if(bt==="none"||bt===""){if(!a6){a6=aq.createElement("iframe");a6.frameBorder=a6.width=a6.height=0}e.appendChild(a6);if(!m||!a6.createElement){m=(a6.contentWindow||a6.contentDocument).document;m.write((aq.compatMode==="CSS1Compat"?"":"")+"");m.close()}bs=m.createElement(bu);m.body.appendChild(bs);bt=b.css(bs,"display");e.removeChild(a6)}Q[bu]=bt}return Q[bu]}var U=/^t(?:able|d|h)$/i,aa=/^(?:body|html)$/i;if("getBoundingClientRect" in aq.documentElement){b.fn.offset=function(bF){var bv=this[0],by;if(bF){return this.each(function(e){b.offset.setOffset(this,bF,e)})}if(!bv||!bv.ownerDocument){return null}if(bv===bv.ownerDocument.body){return b.offset.bodyOffset(bv)}try{by=bv.getBoundingClientRect()}catch(bC){}var bE=bv.ownerDocument,bt=bE.documentElement;if(!by||!b.contains(bt,bv)){return by?{top:by.top,left:by.left}:{top:0,left:0}}var bz=bE.body,bA=aG(bE),bx=bt.clientTop||bz.clientTop||0,bB=bt.clientLeft||bz.clientLeft||0,bs=bA.pageYOffset||b.support.boxModel&&bt.scrollTop||bz.scrollTop,bw=bA.pageXOffset||b.support.boxModel&&bt.scrollLeft||bz.scrollLeft,bD=by.top+bs-bx,bu=by.left+bw-bB;return{top:bD,left:bu}}}else{b.fn.offset=function(bC){var bw=this[0];if(bC){return this.each(function(bD){b.offset.setOffset(this,bC,bD)})}if(!bw||!bw.ownerDocument){return null}if(bw===bw.ownerDocument.body){return b.offset.bodyOffset(bw)}b.offset.initialize();var bz,bt=bw.offsetParent,bs=bw,bB=bw.ownerDocument,bu=bB.documentElement,bx=bB.body,by=bB.defaultView,e=by?by.getComputedStyle(bw,null):bw.currentStyle,bA=bw.offsetTop,bv=bw.offsetLeft;while((bw=bw.parentNode)&&bw!==bx&&bw!==bu){if(b.offset.supportsFixedPosition&&e.position==="fixed"){break}bz=by?by.getComputedStyle(bw,null):bw.currentStyle;bA-=bw.scrollTop;bv-=bw.scrollLeft;if(bw===bt){bA+=bw.offsetTop;bv+=bw.offsetLeft;if(b.offset.doesNotAddBorder&&!(b.offset.doesAddBorderForTableAndCells&&U.test(bw.nodeName))){bA+=parseFloat(bz.borderTopWidth)||0;bv+=parseFloat(bz.borderLeftWidth)||0}bs=bt;bt=bw.offsetParent}if(b.offset.subtractsBorderForOverflowNotVisible&&bz.overflow!=="visible"){bA+=parseFloat(bz.borderTopWidth)||0;bv+=parseFloat(bz.borderLeftWidth)||0}e=bz}if(e.position==="relative"||e.position==="static"){bA+=bx.offsetTop;bv+=bx.offsetLeft}if(b.offset.supportsFixedPosition&&e.position==="fixed"){bA+=Math.max(bu.scrollTop,bx.scrollTop);bv+=Math.max(bu.scrollLeft,bx.scrollLeft)}return{top:bA,left:bv}}}b.offset={initialize:function(){var e=aq.body,bs=aq.createElement("div"),bv,bx,bw,by,bt=parseFloat(b.css(e,"marginTop"))||0,bu="
";b.extend(bs.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});bs.innerHTML=bu;e.insertBefore(bs,e.firstChild);bv=bs.firstChild;bx=bv.firstChild;by=bv.nextSibling.firstChild.firstChild;this.doesNotAddBorder=(bx.offsetTop!==5);this.doesAddBorderForTableAndCells=(by.offsetTop===5);bx.style.position="fixed";bx.style.top="20px";this.supportsFixedPosition=(bx.offsetTop===20||bx.offsetTop===15);bx.style.position=bx.style.top="";bv.style.overflow="hidden";bv.style.position="relative";this.subtractsBorderForOverflowNotVisible=(bx.offsetTop===-5);this.doesNotIncludeMarginInBodyOffset=(e.offsetTop!==bt);e.removeChild(bs);b.offset.initialize=b.noop},bodyOffset:function(e){var bt=e.offsetTop,bs=e.offsetLeft;b.offset.initialize();if(b.offset.doesNotIncludeMarginInBodyOffset){bt+=parseFloat(b.css(e,"marginTop"))||0;bs+=parseFloat(b.css(e,"marginLeft"))||0}return{top:bt,left:bs}},setOffset:function(bu,bD,bx){var by=b.css(bu,"position");if(by==="static"){bu.style.position="relative"}var bw=b(bu),bs=bw.offset(),e=b.css(bu,"top"),bB=b.css(bu,"left"),bC=(by==="absolute"||by==="fixed")&&b.inArray("auto",[e,bB])>-1,bA={},bz={},bt,bv;if(bC){bz=bw.position();bt=bz.top;bv=bz.left}else{bt=parseFloat(e)||0;bv=parseFloat(bB)||0}if(b.isFunction(bD)){bD=bD.call(bu,bx,bs)}if(bD.top!=null){bA.top=(bD.top-bs.top)+bt}if(bD.left!=null){bA.left=(bD.left-bs.left)+bv}if("using" in bD){bD.using.call(bu,bA)}else{bw.css(bA)}}};b.fn.extend({position:function(){if(!this[0]){return null}var bt=this[0],bs=this.offsetParent(),bu=this.offset(),e=aa.test(bs[0].nodeName)?{top:0,left:0}:bs.offset();bu.top-=parseFloat(b.css(bt,"marginTop"))||0;bu.left-=parseFloat(b.css(bt,"marginLeft"))||0;e.top+=parseFloat(b.css(bs[0],"borderTopWidth"))||0;e.left+=parseFloat(b.css(bs[0],"borderLeftWidth"))||0;return{top:bu.top-e.top,left:bu.left-e.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||aq.body;while(e&&(!aa.test(e.nodeName)&&b.css(e,"position")==="static")){e=e.offsetParent}return e})}});b.each(["Left","Top"],function(bs,e){var bt="scroll"+e;b.fn[bt]=function(bw){var bu,bv;if(bw===K){bu=this[0];if(!bu){return null}bv=aG(bu);return bv?("pageXOffset" in bv)?bv[bs?"pageYOffset":"pageXOffset"]:b.support.boxModel&&bv.document.documentElement[bt]||bv.document.body[bt]:bu[bt]}return this.each(function(){bv=aG(this);if(bv){bv.scrollTo(!bs?bw:b(bv).scrollLeft(),bs?bw:b(bv).scrollTop())}else{this[bt]=bw}})}});function aG(e){return b.isWindow(e)?e:e.nodeType===9?e.defaultView||e.parentWindow:false}b.each(["Height","Width"],function(bs,e){var bt=e.toLowerCase();b.fn["inner"+e]=function(){var bu=this[0];return bu&&bu.style?parseFloat(b.css(bu,bt,"padding")):null};b.fn["outer"+e]=function(bv){var bu=this[0];return bu&&bu.style?parseFloat(b.css(bu,bt,bv?"margin":"border")):null};b.fn[bt]=function(bv){var bw=this[0];if(!bw){return bv==null?null:this}if(b.isFunction(bv)){return this.each(function(bA){var bz=b(this);bz[bt](bv.call(this,bA,bz[bt]()))})}if(b.isWindow(bw)){var bx=bw.document.documentElement["client"+e];return bw.document.compatMode==="CSS1Compat"&&bx||bw.document.body["client"+e]||bx}else{if(bw.nodeType===9){return Math.max(bw.documentElement["client"+e],bw.body["scroll"+e],bw.documentElement["scroll"+e],bw.body["offset"+e],bw.documentElement["offset"+e])}else{if(bv===K){var by=b.css(bw,bt),bu=parseFloat(by);return b.isNaN(bu)?by:bu}else{return this.css(bt,typeof bv==="string"?bv:bv+"px")}}}}});a9.jQuery=a9.$=b})(window);ikiwiki-3.20130904.1ubuntu1/underlays/jquery/ikiwiki/jquery.full.js0000644000000000000000000071525212211730002021733 0ustar /*! * jQuery JavaScript Library v1.6.2 * http://jquery.com/ * * Copyright 2011, John Resig * Dual licensed under the MIT or GPL Version 2 licenses. * http://jquery.org/license * * Includes Sizzle.js * http://sizzlejs.com/ * Copyright 2011, The Dojo Foundation * Released under the MIT, BSD, and GPL Licenses. * * Date: Thu Jun 30 14:16:56 2011 -0400 */ (function( window, undefined ) { // Use the correct document accordingly with window argument (sandbox) var document = window.document, navigator = window.navigator, location = window.location; var jQuery = (function() { // Define a local copy of jQuery var jQuery = function( selector, context ) { // The jQuery object is actually just the init constructor 'enhanced' return new jQuery.fn.init( selector, context, rootjQuery ); }, // Map over jQuery in case of overwrite _jQuery = window.jQuery, // Map over the $ in case of overwrite _$ = window.$, // A central reference to the root jQuery(document) rootjQuery, // A simple way to check for HTML strings or ID strings // (both of which we optimize for) quickExpr = /^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/, // Check if a string has a non-whitespace character in it rnotwhite = /\S/, // Used for trimming whitespace trimLeft = /^\s+/, trimRight = /\s+$/, // Check for digits rdigit = /\d/, // Match a standalone tag rsingleTag = /^<(\w+)\s*\/?>(?:<\/\1>)?$/, // JSON RegExp rvalidchars = /^[\],:{}\s]*$/, rvalidescape = /\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, rvalidtokens = /"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, rvalidbraces = /(?:^|:|,)(?:\s*\[)+/g, // Useragent RegExp rwebkit = /(webkit)[ \/]([\w.]+)/, ropera = /(opera)(?:.*version)?[ \/]([\w.]+)/, rmsie = /(msie) ([\w.]+)/, rmozilla = /(mozilla)(?:.*? rv:([\w.]+))?/, // Matches dashed string for camelizing rdashAlpha = /-([a-z])/ig, // Used by jQuery.camelCase as callback to replace() fcamelCase = function( all, letter ) { return letter.toUpperCase(); }, // Keep a UserAgent string for use with jQuery.browser userAgent = navigator.userAgent, // For matching the engine and version of the browser browserMatch, // The deferred used on DOM ready readyList, // The ready event handler DOMContentLoaded, // Save a reference to some core methods toString = Object.prototype.toString, hasOwn = Object.prototype.hasOwnProperty, push = Array.prototype.push, slice = Array.prototype.slice, trim = String.prototype.trim, indexOf = Array.prototype.indexOf, // [[Class]] -> type pairs class2type = {}; jQuery.fn = jQuery.prototype = { constructor: jQuery, init: function( selector, context, rootjQuery ) { var match, elem, ret, doc; // Handle $(""), $(null), or $(undefined) if ( !selector ) { return this; } // Handle $(DOMElement) if ( selector.nodeType ) { this.context = this[0] = selector; this.length = 1; return this; } // The body element only exists once, optimize finding it if ( selector === "body" && !context && document.body ) { this.context = document; this[0] = document.body; this.selector = selector; this.length = 1; return this; } // Handle HTML strings if ( typeof selector === "string" ) { // Are we dealing with HTML string or an ID? if ( selector.charAt(0) === "<" && selector.charAt( selector.length - 1 ) === ">" && selector.length >= 3 ) { // Assume that strings that start and end with <> are HTML and skip the regex check match = [ null, selector, null ]; } else { match = quickExpr.exec( selector ); } // Verify a match, and that no context was specified for #id if ( match && (match[1] || !context) ) { // HANDLE: $(html) -> $(array) if ( match[1] ) { context = context instanceof jQuery ? context[0] : context; doc = (context ? context.ownerDocument || context : document); // If a single string is passed in and it's a single tag // just do a createElement and skip the rest ret = rsingleTag.exec( selector ); if ( ret ) { if ( jQuery.isPlainObject( context ) ) { selector = [ document.createElement( ret[1] ) ]; jQuery.fn.attr.call( selector, context, true ); } else { selector = [ doc.createElement( ret[1] ) ]; } } else { ret = jQuery.buildFragment( [ match[1] ], [ doc ] ); selector = (ret.cacheable ? jQuery.clone(ret.fragment) : ret.fragment).childNodes; } return jQuery.merge( this, selector ); // HANDLE: $("#id") } else { elem = document.getElementById( match[2] ); // Check parentNode to catch when Blackberry 4.6 returns // nodes that are no longer in the document #6963 if ( elem && elem.parentNode ) { // Handle the case where IE and Opera return items // by name instead of ID if ( elem.id !== match[2] ) { return rootjQuery.find( selector ); } // Otherwise, we inject the element directly into the jQuery object this.length = 1; this[0] = elem; } this.context = document; this.selector = selector; return this; } // HANDLE: $(expr, $(...)) } else if ( !context || context.jquery ) { return (context || rootjQuery).find( selector ); // HANDLE: $(expr, context) // (which is just equivalent to: $(context).find(expr) } else { return this.constructor( context ).find( selector ); } // HANDLE: $(function) // Shortcut for document ready } else if ( jQuery.isFunction( selector ) ) { return rootjQuery.ready( selector ); } if (selector.selector !== undefined) { this.selector = selector.selector; this.context = selector.context; } return jQuery.makeArray( selector, this ); }, // Start with an empty selector selector: "", // The current version of jQuery being used jquery: "1.6.2", // The default length of a jQuery object is 0 length: 0, // The number of elements contained in the matched element set size: function() { return this.length; }, toArray: function() { return slice.call( this, 0 ); }, // Get the Nth element in the matched element set OR // Get the whole matched element set as a clean array get: function( num ) { return num == null ? // Return a 'clean' array this.toArray() : // Return just the object ( num < 0 ? this[ this.length + num ] : this[ num ] ); }, // Take an array of elements and push it onto the stack // (returning the new matched element set) pushStack: function( elems, name, selector ) { // Build a new jQuery matched element set var ret = this.constructor(); if ( jQuery.isArray( elems ) ) { push.apply( ret, elems ); } else { jQuery.merge( ret, elems ); } // Add the old object onto the stack (as a reference) ret.prevObject = this; ret.context = this.context; if ( name === "find" ) { ret.selector = this.selector + (this.selector ? " " : "") + selector; } else if ( name ) { ret.selector = this.selector + "." + name + "(" + selector + ")"; } // Return the newly-formed element set return ret; }, // Execute a callback for every element in the matched set. // (You can seed the arguments with an array of args, but this is // only used internally.) each: function( callback, args ) { return jQuery.each( this, callback, args ); }, ready: function( fn ) { // Attach the listeners jQuery.bindReady(); // Add the callback readyList.done( fn ); return this; }, eq: function( i ) { return i === -1 ? this.slice( i ) : this.slice( i, +i + 1 ); }, first: function() { return this.eq( 0 ); }, last: function() { return this.eq( -1 ); }, slice: function() { return this.pushStack( slice.apply( this, arguments ), "slice", slice.call(arguments).join(",") ); }, map: function( callback ) { return this.pushStack( jQuery.map(this, function( elem, i ) { return callback.call( elem, i, elem ); })); }, end: function() { return this.prevObject || this.constructor(null); }, // For internal use only. // Behaves like an Array's method, not like a jQuery method. push: push, sort: [].sort, splice: [].splice }; // Give the init function the jQuery prototype for later instantiation jQuery.fn.init.prototype = jQuery.fn; jQuery.extend = jQuery.fn.extend = function() { var options, name, src, copy, copyIsArray, clone, target = arguments[0] || {}, i = 1, length = arguments.length, deep = false; // Handle a deep copy situation if ( typeof target === "boolean" ) { deep = target; target = arguments[1] || {}; // skip the boolean and the target i = 2; } // Handle case when target is a string or something (possible in deep copy) if ( typeof target !== "object" && !jQuery.isFunction(target) ) { target = {}; } // extend jQuery itself if only one argument is passed if ( length === i ) { target = this; --i; } for ( ; i < length; i++ ) { // Only deal with non-null/undefined values if ( (options = arguments[ i ]) != null ) { // Extend the base object for ( name in options ) { src = target[ name ]; copy = options[ name ]; // Prevent never-ending loop if ( target === copy ) { continue; } // Recurse if we're merging plain objects or arrays if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) { if ( copyIsArray ) { copyIsArray = false; clone = src && jQuery.isArray(src) ? src : []; } else { clone = src && jQuery.isPlainObject(src) ? src : {}; } // Never move original objects, clone them target[ name ] = jQuery.extend( deep, clone, copy ); // Don't bring in undefined values } else if ( copy !== undefined ) { target[ name ] = copy; } } } } // Return the modified object return target; }; jQuery.extend({ noConflict: function( deep ) { if ( window.$ === jQuery ) { window.$ = _$; } if ( deep && window.jQuery === jQuery ) { window.jQuery = _jQuery; } return jQuery; }, // Is the DOM ready to be used? Set to true once it occurs. isReady: false, // A counter to track how many items to wait for before // the ready event fires. See #6781 readyWait: 1, // Hold (or release) the ready event holdReady: function( hold ) { if ( hold ) { jQuery.readyWait++; } else { jQuery.ready( true ); } }, // Handle when the DOM is ready ready: function( wait ) { // Either a released hold or an DOMready/load event and not yet ready if ( (wait === true && !--jQuery.readyWait) || (wait !== true && !jQuery.isReady) ) { // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). if ( !document.body ) { return setTimeout( jQuery.ready, 1 ); } // Remember that the DOM is ready jQuery.isReady = true; // If a normal DOM Ready event fired, decrement, and wait if need be if ( wait !== true && --jQuery.readyWait > 0 ) { return; } // If there are functions bound, to execute readyList.resolveWith( document, [ jQuery ] ); // Trigger any bound ready events if ( jQuery.fn.trigger ) { jQuery( document ).trigger( "ready" ).unbind( "ready" ); } } }, bindReady: function() { if ( readyList ) { return; } readyList = jQuery._Deferred(); // Catch cases where $(document).ready() is called after the // browser event has already occurred. if ( document.readyState === "complete" ) { // Handle it asynchronously to allow scripts the opportunity to delay ready return setTimeout( jQuery.ready, 1 ); } // Mozilla, Opera and webkit nightlies currently support this event if ( document.addEventListener ) { // Use the handy event callback document.addEventListener( "DOMContentLoaded", DOMContentLoaded, false ); // A fallback to window.onload, that will always work window.addEventListener( "load", jQuery.ready, false ); // If IE event model is used } else if ( document.attachEvent ) { // ensure firing before onload, // maybe late but safe also for iframes document.attachEvent( "onreadystatechange", DOMContentLoaded ); // A fallback to window.onload, that will always work window.attachEvent( "onload", jQuery.ready ); // If IE and not a frame // continually check to see if the document is ready var toplevel = false; try { toplevel = window.frameElement == null; } catch(e) {} if ( document.documentElement.doScroll && toplevel ) { doScrollCheck(); } } }, // See test/unit/core.js for details concerning isFunction. // Since version 1.3, DOM methods and functions like alert // aren't supported. They return false on IE (#2968). isFunction: function( obj ) { return jQuery.type(obj) === "function"; }, isArray: Array.isArray || function( obj ) { return jQuery.type(obj) === "array"; }, // A crude way of determining if an object is a window isWindow: function( obj ) { return obj && typeof obj === "object" && "setInterval" in obj; }, isNaN: function( obj ) { return obj == null || !rdigit.test( obj ) || isNaN( obj ); }, type: function( obj ) { return obj == null ? String( obj ) : class2type[ toString.call(obj) ] || "object"; }, isPlainObject: function( obj ) { // Must be an Object. // Because of IE, we also have to check the presence of the constructor property. // Make sure that DOM nodes and window objects don't pass through, as well if ( !obj || jQuery.type(obj) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) { return false; } // Not own constructor property must be Object if ( obj.constructor && !hasOwn.call(obj, "constructor") && !hasOwn.call(obj.constructor.prototype, "isPrototypeOf") ) { return false; } // Own properties are enumerated firstly, so to speed up, // if last one is own, then all properties are own. var key; for ( key in obj ) {} return key === undefined || hasOwn.call( obj, key ); }, isEmptyObject: function( obj ) { for ( var name in obj ) { return false; } return true; }, error: function( msg ) { throw msg; }, parseJSON: function( data ) { if ( typeof data !== "string" || !data ) { return null; } // Make sure leading/trailing whitespace is removed (IE can't handle it) data = jQuery.trim( data ); // Attempt to parse using the native JSON parser first if ( window.JSON && window.JSON.parse ) { return window.JSON.parse( data ); } // Make sure the incoming data is actual JSON // Logic borrowed from http://json.org/json2.js if ( rvalidchars.test( data.replace( rvalidescape, "@" ) .replace( rvalidtokens, "]" ) .replace( rvalidbraces, "")) ) { return (new Function( "return " + data ))(); } jQuery.error( "Invalid JSON: " + data ); }, // Cross-browser xml parsing // (xml & tmp used internally) parseXML: function( data , xml , tmp ) { if ( window.DOMParser ) { // Standard tmp = new DOMParser(); xml = tmp.parseFromString( data , "text/xml" ); } else { // IE xml = new ActiveXObject( "Microsoft.XMLDOM" ); xml.async = "false"; xml.loadXML( data ); } tmp = xml.documentElement; if ( ! tmp || ! tmp.nodeName || tmp.nodeName === "parsererror" ) { jQuery.error( "Invalid XML: " + data ); } return xml; }, noop: function() {}, // Evaluates a script in a global context // Workarounds based on findings by Jim Driscoll // http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context globalEval: function( data ) { if ( data && rnotwhite.test( data ) ) { // We use execScript on Internet Explorer // We use an anonymous function so that context is window // rather than jQuery in Firefox ( window.execScript || function( data ) { window[ "eval" ].call( window, data ); } )( data ); } }, // Converts a dashed string to camelCased string; // Used by both the css and data modules camelCase: function( string ) { return string.replace( rdashAlpha, fcamelCase ); }, nodeName: function( elem, name ) { return elem.nodeName && elem.nodeName.toUpperCase() === name.toUpperCase(); }, // args is for internal usage only each: function( object, callback, args ) { var name, i = 0, length = object.length, isObj = length === undefined || jQuery.isFunction( object ); if ( args ) { if ( isObj ) { for ( name in object ) { if ( callback.apply( object[ name ], args ) === false ) { break; } } } else { for ( ; i < length; ) { if ( callback.apply( object[ i++ ], args ) === false ) { break; } } } // A special, fast, case for the most common use of each } else { if ( isObj ) { for ( name in object ) { if ( callback.call( object[ name ], name, object[ name ] ) === false ) { break; } } } else { for ( ; i < length; ) { if ( callback.call( object[ i ], i, object[ i++ ] ) === false ) { break; } } } } return object; }, // Use native String.trim function wherever possible trim: trim ? function( text ) { return text == null ? "" : trim.call( text ); } : // Otherwise use our own trimming functionality function( text ) { return text == null ? "" : text.toString().replace( trimLeft, "" ).replace( trimRight, "" ); }, // results is for internal usage only makeArray: function( array, results ) { var ret = results || []; if ( array != null ) { // The window, strings (and functions) also have 'length' // The extra typeof function check is to prevent crashes // in Safari 2 (See: #3039) // Tweaked logic slightly to handle Blackberry 4.7 RegExp issues #6930 var type = jQuery.type( array ); if ( array.length == null || type === "string" || type === "function" || type === "regexp" || jQuery.isWindow( array ) ) { push.call( ret, array ); } else { jQuery.merge( ret, array ); } } return ret; }, inArray: function( elem, array ) { if ( indexOf ) { return indexOf.call( array, elem ); } for ( var i = 0, length = array.length; i < length; i++ ) { if ( array[ i ] === elem ) { return i; } } return -1; }, merge: function( first, second ) { var i = first.length, j = 0; if ( typeof second.length === "number" ) { for ( var l = second.length; j < l; j++ ) { first[ i++ ] = second[ j ]; } } else { while ( second[j] !== undefined ) { first[ i++ ] = second[ j++ ]; } } first.length = i; return first; }, grep: function( elems, callback, inv ) { var ret = [], retVal; inv = !!inv; // Go through the array, only saving the items // that pass the validator function for ( var i = 0, length = elems.length; i < length; i++ ) { retVal = !!callback( elems[ i ], i ); if ( inv !== retVal ) { ret.push( elems[ i ] ); } } return ret; }, // arg is for internal usage only map: function( elems, callback, arg ) { var value, key, ret = [], i = 0, length = elems.length, // jquery objects are treated as arrays isArray = elems instanceof jQuery || length !== undefined && typeof length === "number" && ( ( length > 0 && elems[ 0 ] && elems[ length -1 ] ) || length === 0 || jQuery.isArray( elems ) ) ; // Go through the array, translating each of the items to their if ( isArray ) { for ( ; i < length; i++ ) { value = callback( elems[ i ], i, arg ); if ( value != null ) { ret[ ret.length ] = value; } } // Go through every key on the object, } else { for ( key in elems ) { value = callback( elems[ key ], key, arg ); if ( value != null ) { ret[ ret.length ] = value; } } } // Flatten any nested arrays return ret.concat.apply( [], ret ); }, // A global GUID counter for objects guid: 1, // Bind a function to a context, optionally partially applying any // arguments. proxy: function( fn, context ) { if ( typeof context === "string" ) { var tmp = fn[ context ]; context = fn; fn = tmp; } // Quick check to determine if target is callable, in the spec // this throws a TypeError, but we will just return undefined. if ( !jQuery.isFunction( fn ) ) { return undefined; } // Simulated bind var args = slice.call( arguments, 2 ), proxy = function() { return fn.apply( context, args.concat( slice.call( arguments ) ) ); }; // Set the guid of unique handler to the same of original handler, so it can be removed proxy.guid = fn.guid = fn.guid || proxy.guid || jQuery.guid++; return proxy; }, // Mutifunctional method to get and set values to a collection // The value/s can optionally be executed if it's a function access: function( elems, key, value, exec, fn, pass ) { var length = elems.length; // Setting many attributes if ( typeof key === "object" ) { for ( var k in key ) { jQuery.access( elems, k, key[k], exec, fn, value ); } return elems; } // Setting one attribute if ( value !== undefined ) { // Optionally, function values get executed if exec is true exec = !pass && exec && jQuery.isFunction(value); for ( var i = 0; i < length; i++ ) { fn( elems[i], key, exec ? value.call( elems[i], i, fn( elems[i], key ) ) : value, pass ); } return elems; } // Getting an attribute return length ? fn( elems[0], key ) : undefined; }, now: function() { return (new Date()).getTime(); }, // Use of jQuery.browser is frowned upon. // More details: http://docs.jquery.com/Utilities/jQuery.browser uaMatch: function( ua ) { ua = ua.toLowerCase(); var match = rwebkit.exec( ua ) || ropera.exec( ua ) || rmsie.exec( ua ) || ua.indexOf("compatible") < 0 && rmozilla.exec( ua ) || []; return { browser: match[1] || "", version: match[2] || "0" }; }, sub: function() { function jQuerySub( selector, context ) { return new jQuerySub.fn.init( selector, context ); } jQuery.extend( true, jQuerySub, this ); jQuerySub.superclass = this; jQuerySub.fn = jQuerySub.prototype = this(); jQuerySub.fn.constructor = jQuerySub; jQuerySub.sub = this.sub; jQuerySub.fn.init = function init( selector, context ) { if ( context && context instanceof jQuery && !(context instanceof jQuerySub) ) { context = jQuerySub( context ); } return jQuery.fn.init.call( this, selector, context, rootjQuerySub ); }; jQuerySub.fn.init.prototype = jQuerySub.fn; var rootjQuerySub = jQuerySub(document); return jQuerySub; }, browser: {} }); // Populate the class2type map jQuery.each("Boolean Number String Function Array Date RegExp Object".split(" "), function(i, name) { class2type[ "[object " + name + "]" ] = name.toLowerCase(); }); browserMatch = jQuery.uaMatch( userAgent ); if ( browserMatch.browser ) { jQuery.browser[ browserMatch.browser ] = true; jQuery.browser.version = browserMatch.version; } // Deprecated, use jQuery.browser.webkit instead if ( jQuery.browser.webkit ) { jQuery.browser.safari = true; } // IE doesn't match non-breaking spaces with \s if ( rnotwhite.test( "\xA0" ) ) { trimLeft = /^[\s\xA0]+/; trimRight = /[\s\xA0]+$/; } // All jQuery objects should point back to these rootjQuery = jQuery(document); // Cleanup functions for the document ready method if ( document.addEventListener ) { DOMContentLoaded = function() { document.removeEventListener( "DOMContentLoaded", DOMContentLoaded, false ); jQuery.ready(); }; } else if ( document.attachEvent ) { DOMContentLoaded = function() { // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). if ( document.readyState === "complete" ) { document.detachEvent( "onreadystatechange", DOMContentLoaded ); jQuery.ready(); } }; } // The DOM ready check for Internet Explorer function doScrollCheck() { if ( jQuery.isReady ) { return; } try { // If IE is used, use the trick by Diego Perini // http://javascript.nwbox.com/IEContentLoaded/ document.documentElement.doScroll("left"); } catch(e) { setTimeout( doScrollCheck, 1 ); return; } // and execute any waiting functions jQuery.ready(); } return jQuery; })(); var // Promise methods promiseMethods = "done fail isResolved isRejected promise then always pipe".split( " " ), // Static reference to slice sliceDeferred = [].slice; jQuery.extend({ // Create a simple deferred (one callbacks list) _Deferred: function() { var // callbacks list callbacks = [], // stored [ context , args ] fired, // to avoid firing when already doing so firing, // flag to know if the deferred has been cancelled cancelled, // the deferred itself deferred = { // done( f1, f2, ...) done: function() { if ( !cancelled ) { var args = arguments, i, length, elem, type, _fired; if ( fired ) { _fired = fired; fired = 0; } for ( i = 0, length = args.length; i < length; i++ ) { elem = args[ i ]; type = jQuery.type( elem ); if ( type === "array" ) { deferred.done.apply( deferred, elem ); } else if ( type === "function" ) { callbacks.push( elem ); } } if ( _fired ) { deferred.resolveWith( _fired[ 0 ], _fired[ 1 ] ); } } return this; }, // resolve with given context and args resolveWith: function( context, args ) { if ( !cancelled && !fired && !firing ) { // make sure args are available (#8421) args = args || []; firing = 1; try { while( callbacks[ 0 ] ) { callbacks.shift().apply( context, args ); } } finally { fired = [ context, args ]; firing = 0; } } return this; }, // resolve with this as context and given arguments resolve: function() { deferred.resolveWith( this, arguments ); return this; }, // Has this deferred been resolved? isResolved: function() { return !!( firing || fired ); }, // Cancel cancel: function() { cancelled = 1; callbacks = []; return this; } }; return deferred; }, // Full fledged deferred (two callbacks list) Deferred: function( func ) { var deferred = jQuery._Deferred(), failDeferred = jQuery._Deferred(), promise; // Add errorDeferred methods, then and promise jQuery.extend( deferred, { then: function( doneCallbacks, failCallbacks ) { deferred.done( doneCallbacks ).fail( failCallbacks ); return this; }, always: function() { return deferred.done.apply( deferred, arguments ).fail.apply( this, arguments ); }, fail: failDeferred.done, rejectWith: failDeferred.resolveWith, reject: failDeferred.resolve, isRejected: failDeferred.isResolved, pipe: function( fnDone, fnFail ) { return jQuery.Deferred(function( newDefer ) { jQuery.each( { done: [ fnDone, "resolve" ], fail: [ fnFail, "reject" ] }, function( handler, data ) { var fn = data[ 0 ], action = data[ 1 ], returned; if ( jQuery.isFunction( fn ) ) { deferred[ handler ](function() { returned = fn.apply( this, arguments ); if ( returned && jQuery.isFunction( returned.promise ) ) { returned.promise().then( newDefer.resolve, newDefer.reject ); } else { newDefer[ action ]( returned ); } }); } else { deferred[ handler ]( newDefer[ action ] ); } }); }).promise(); }, // Get a promise for this deferred // If obj is provided, the promise aspect is added to the object promise: function( obj ) { if ( obj == null ) { if ( promise ) { return promise; } promise = obj = {}; } var i = promiseMethods.length; while( i-- ) { obj[ promiseMethods[i] ] = deferred[ promiseMethods[i] ]; } return obj; } }); // Make sure only one callback list will be used deferred.done( failDeferred.cancel ).fail( deferred.cancel ); // Unexpose cancel delete deferred.cancel; // Call given func if any if ( func ) { func.call( deferred, deferred ); } return deferred; }, // Deferred helper when: function( firstParam ) { var args = arguments, i = 0, length = args.length, count = length, deferred = length <= 1 && firstParam && jQuery.isFunction( firstParam.promise ) ? firstParam : jQuery.Deferred(); function resolveFunc( i ) { return function( value ) { args[ i ] = arguments.length > 1 ? sliceDeferred.call( arguments, 0 ) : value; if ( !( --count ) ) { // Strange bug in FF4: // Values changed onto the arguments object sometimes end up as undefined values // outside the $.when method. Cloning the object into a fresh array solves the issue deferred.resolveWith( deferred, sliceDeferred.call( args, 0 ) ); } }; } if ( length > 1 ) { for( ; i < length; i++ ) { if ( args[ i ] && jQuery.isFunction( args[ i ].promise ) ) { args[ i ].promise().then( resolveFunc(i), deferred.reject ); } else { --count; } } if ( !count ) { deferred.resolveWith( deferred, args ); } } else if ( deferred !== firstParam ) { deferred.resolveWith( deferred, length ? [ firstParam ] : [] ); } return deferred.promise(); } }); jQuery.support = (function() { var div = document.createElement( "div" ), documentElement = document.documentElement, all, a, select, opt, input, marginDiv, support, fragment, body, testElementParent, testElement, testElementStyle, tds, events, eventName, i, isSupported; // Preliminary tests div.setAttribute("className", "t"); div.innerHTML = "
a"; all = div.getElementsByTagName( "*" ); a = div.getElementsByTagName( "a" )[ 0 ]; // Can't get basic test support if ( !all || !all.length || !a ) { return {}; } // First batch of supports tests select = document.createElement( "select" ); opt = select.appendChild( document.createElement("option") ); input = div.getElementsByTagName( "input" )[ 0 ]; support = { // IE strips leading whitespace when .innerHTML is used leadingWhitespace: ( div.firstChild.nodeType === 3 ), // Make sure that tbody elements aren't automatically inserted // IE will insert them into empty tables tbody: !div.getElementsByTagName( "tbody" ).length, // Make sure that link elements get serialized correctly by innerHTML // This requires a wrapper element in IE htmlSerialize: !!div.getElementsByTagName( "link" ).length, // Get the style information from getAttribute // (IE uses .cssText instead) style: /top/.test( a.getAttribute("style") ), // Make sure that URLs aren't manipulated // (IE normalizes it by default) hrefNormalized: ( a.getAttribute( "href" ) === "/a" ), // Make sure that element opacity exists // (IE uses filter instead) // Use a regex to work around a WebKit issue. See #5145 opacity: /^0.55$/.test( a.style.opacity ), // Verify style float existence // (IE uses styleFloat instead of cssFloat) cssFloat: !!a.style.cssFloat, // Make sure that if no value is specified for a checkbox // that it defaults to "on". // (WebKit defaults to "" instead) checkOn: ( input.value === "on" ), // Make sure that a selected-by-default option has a working selected property. // (WebKit defaults to false instead of true, IE too, if it's in an optgroup) optSelected: opt.selected, // Test setAttribute on camelCase class. If it works, we need attrFixes when doing get/setAttribute (ie6/7) getSetAttribute: div.className !== "t", // Will be defined later submitBubbles: true, changeBubbles: true, focusinBubbles: false, deleteExpando: true, noCloneEvent: true, inlineBlockNeedsLayout: false, shrinkWrapBlocks: false, reliableMarginRight: true }; // Make sure checked status is properly cloned input.checked = true; support.noCloneChecked = input.cloneNode( true ).checked; // Make sure that the options inside disabled selects aren't marked as disabled // (WebKit marks them as disabled) select.disabled = true; support.optDisabled = !opt.disabled; // Test to see if it's possible to delete an expando from an element // Fails in Internet Explorer try { delete div.test; } catch( e ) { support.deleteExpando = false; } if ( !div.addEventListener && div.attachEvent && div.fireEvent ) { div.attachEvent( "onclick", function() { // Cloning a node shouldn't copy over any // bound event handlers (IE does this) support.noCloneEvent = false; }); div.cloneNode( true ).fireEvent( "onclick" ); } // Check if a radio maintains it's value // after being appended to the DOM input = document.createElement("input"); input.value = "t"; input.setAttribute("type", "radio"); support.radioValue = input.value === "t"; input.setAttribute("checked", "checked"); div.appendChild( input ); fragment = document.createDocumentFragment(); fragment.appendChild( div.firstChild ); // WebKit doesn't clone checked state correctly in fragments support.checkClone = fragment.cloneNode( true ).cloneNode( true ).lastChild.checked; div.innerHTML = ""; // Figure out if the W3C box model works as expected div.style.width = div.style.paddingLeft = "1px"; body = document.getElementsByTagName( "body" )[ 0 ]; // We use our own, invisible, body unless the body is already present // in which case we use a div (#9239) testElement = document.createElement( body ? "div" : "body" ); testElementStyle = { visibility: "hidden", width: 0, height: 0, border: 0, margin: 0 }; if ( body ) { jQuery.extend( testElementStyle, { position: "absolute", left: -1000, top: -1000 }); } for ( i in testElementStyle ) { testElement.style[ i ] = testElementStyle[ i ]; } testElement.appendChild( div ); testElementParent = body || documentElement; testElementParent.insertBefore( testElement, testElementParent.firstChild ); // Check if a disconnected checkbox will retain its checked // value of true after appended to the DOM (IE6/7) support.appendChecked = input.checked; support.boxModel = div.offsetWidth === 2; if ( "zoom" in div.style ) { // Check if natively block-level elements act like inline-block // elements when setting their display to 'inline' and giving // them layout // (IE < 8 does this) div.style.display = "inline"; div.style.zoom = 1; support.inlineBlockNeedsLayout = ( div.offsetWidth === 2 ); // Check if elements with layout shrink-wrap their children // (IE 6 does this) div.style.display = ""; div.innerHTML = "
"; support.shrinkWrapBlocks = ( div.offsetWidth !== 2 ); } div.innerHTML = "
t
"; tds = div.getElementsByTagName( "td" ); // Check if table cells still have offsetWidth/Height when they are set // to display:none and there are still other visible table cells in a // table row; if so, offsetWidth/Height are not reliable for use when // determining if an element has been hidden directly using // display:none (it is still safe to use offsets if a parent element is // hidden; don safety goggles and see bug #4512 for more information). // (only IE 8 fails this test) isSupported = ( tds[ 0 ].offsetHeight === 0 ); tds[ 0 ].style.display = ""; tds[ 1 ].style.display = "none"; // Check if empty table cells still have offsetWidth/Height // (IE < 8 fail this test) support.reliableHiddenOffsets = isSupported && ( tds[ 0 ].offsetHeight === 0 ); div.innerHTML = ""; // Check if div with explicit width and no margin-right incorrectly // gets computed margin-right based on width of container. For more // info see bug #3333 // Fails in WebKit before Feb 2011 nightlies // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right if ( document.defaultView && document.defaultView.getComputedStyle ) { marginDiv = document.createElement( "div" ); marginDiv.style.width = "0"; marginDiv.style.marginRight = "0"; div.appendChild( marginDiv ); support.reliableMarginRight = ( parseInt( ( document.defaultView.getComputedStyle( marginDiv, null ) || { marginRight: 0 } ).marginRight, 10 ) || 0 ) === 0; } // Remove the body element we added testElement.innerHTML = ""; testElementParent.removeChild( testElement ); // Technique from Juriy Zaytsev // http://thinkweb2.com/projects/prototype/detecting-event-support-without-browser-sniffing/ // We only care about the case where non-standard event systems // are used, namely in IE. Short-circuiting here helps us to // avoid an eval call (in setAttribute) which can cause CSP // to go haywire. See: https://developer.mozilla.org/en/Security/CSP if ( div.attachEvent ) { for( i in { submit: 1, change: 1, focusin: 1 } ) { eventName = "on" + i; isSupported = ( eventName in div ); if ( !isSupported ) { div.setAttribute( eventName, "return;" ); isSupported = ( typeof div[ eventName ] === "function" ); } support[ i + "Bubbles" ] = isSupported; } } // Null connected elements to avoid leaks in IE testElement = fragment = select = opt = body = marginDiv = div = input = null; return support; })(); // Keep track of boxModel jQuery.boxModel = jQuery.support.boxModel; var rbrace = /^(?:\{.*\}|\[.*\])$/, rmultiDash = /([a-z])([A-Z])/g; jQuery.extend({ cache: {}, // Please use with caution uuid: 0, // Unique for each copy of jQuery on the page // Non-digits removed to match rinlinejQuery expando: "jQuery" + ( jQuery.fn.jquery + Math.random() ).replace( /\D/g, "" ), // The following elements throw uncatchable exceptions if you // attempt to add expando properties to them. noData: { "embed": true, // Ban all objects except for Flash (which handle expandos) "object": "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000", "applet": true }, hasData: function( elem ) { elem = elem.nodeType ? jQuery.cache[ elem[jQuery.expando] ] : elem[ jQuery.expando ]; return !!elem && !isEmptyDataObject( elem ); }, data: function( elem, name, data, pvt /* Internal Use Only */ ) { if ( !jQuery.acceptData( elem ) ) { return; } var internalKey = jQuery.expando, getByName = typeof name === "string", thisCache, // We have to handle DOM nodes and JS objects differently because IE6-7 // can't GC object references properly across the DOM-JS boundary isNode = elem.nodeType, // Only DOM nodes need the global jQuery cache; JS object data is // attached directly to the object so GC can occur automatically cache = isNode ? jQuery.cache : elem, // Only defining an ID for JS objects if its cache already exists allows // the code to shortcut on the same path as a DOM node with no cache id = isNode ? elem[ jQuery.expando ] : elem[ jQuery.expando ] && jQuery.expando; // Avoid doing any more work than we need to when trying to get data on an // object that has no data at all if ( (!id || (pvt && id && !cache[ id ][ internalKey ])) && getByName && data === undefined ) { return; } if ( !id ) { // Only DOM nodes need a new unique ID for each element since their data // ends up in the global cache if ( isNode ) { elem[ jQuery.expando ] = id = ++jQuery.uuid; } else { id = jQuery.expando; } } if ( !cache[ id ] ) { cache[ id ] = {}; // TODO: This is a hack for 1.5 ONLY. Avoids exposing jQuery // metadata on plain JS objects when the object is serialized using // JSON.stringify if ( !isNode ) { cache[ id ].toJSON = jQuery.noop; } } // An object can be passed to jQuery.data instead of a key/value pair; this gets // shallow copied over onto the existing cache if ( typeof name === "object" || typeof name === "function" ) { if ( pvt ) { cache[ id ][ internalKey ] = jQuery.extend(cache[ id ][ internalKey ], name); } else { cache[ id ] = jQuery.extend(cache[ id ], name); } } thisCache = cache[ id ]; // Internal jQuery data is stored in a separate object inside the object's data // cache in order to avoid key collisions between internal data and user-defined // data if ( pvt ) { if ( !thisCache[ internalKey ] ) { thisCache[ internalKey ] = {}; } thisCache = thisCache[ internalKey ]; } if ( data !== undefined ) { thisCache[ jQuery.camelCase( name ) ] = data; } // TODO: This is a hack for 1.5 ONLY. It will be removed in 1.6. Users should // not attempt to inspect the internal events object using jQuery.data, as this // internal data object is undocumented and subject to change. if ( name === "events" && !thisCache[name] ) { return thisCache[ internalKey ] && thisCache[ internalKey ].events; } return getByName ? // Check for both converted-to-camel and non-converted data property names thisCache[ jQuery.camelCase( name ) ] || thisCache[ name ] : thisCache; }, removeData: function( elem, name, pvt /* Internal Use Only */ ) { if ( !jQuery.acceptData( elem ) ) { return; } var internalKey = jQuery.expando, isNode = elem.nodeType, // See jQuery.data for more information cache = isNode ? jQuery.cache : elem, // See jQuery.data for more information id = isNode ? elem[ jQuery.expando ] : jQuery.expando; // If there is already no cache entry for this object, there is no // purpose in continuing if ( !cache[ id ] ) { return; } if ( name ) { var thisCache = pvt ? cache[ id ][ internalKey ] : cache[ id ]; if ( thisCache ) { delete thisCache[ name ]; // If there is no data left in the cache, we want to continue // and let the cache object itself get destroyed if ( !isEmptyDataObject(thisCache) ) { return; } } } // See jQuery.data for more information if ( pvt ) { delete cache[ id ][ internalKey ]; // Don't destroy the parent cache unless the internal data object // had been the only thing left in it if ( !isEmptyDataObject(cache[ id ]) ) { return; } } var internalCache = cache[ id ][ internalKey ]; // Browsers that fail expando deletion also refuse to delete expandos on // the window, but it will allow it on all other JS objects; other browsers // don't care if ( jQuery.support.deleteExpando || cache != window ) { delete cache[ id ]; } else { cache[ id ] = null; } // We destroyed the entire user cache at once because it's faster than // iterating through each key, but we need to continue to persist internal // data if it existed if ( internalCache ) { cache[ id ] = {}; // TODO: This is a hack for 1.5 ONLY. Avoids exposing jQuery // metadata on plain JS objects when the object is serialized using // JSON.stringify if ( !isNode ) { cache[ id ].toJSON = jQuery.noop; } cache[ id ][ internalKey ] = internalCache; // Otherwise, we need to eliminate the expando on the node to avoid // false lookups in the cache for entries that no longer exist } else if ( isNode ) { // IE does not allow us to delete expando properties from nodes, // nor does it have a removeAttribute function on Document nodes; // we must handle all of these cases if ( jQuery.support.deleteExpando ) { delete elem[ jQuery.expando ]; } else if ( elem.removeAttribute ) { elem.removeAttribute( jQuery.expando ); } else { elem[ jQuery.expando ] = null; } } }, // For internal use only. _data: function( elem, name, data ) { return jQuery.data( elem, name, data, true ); }, // A method for determining if a DOM node can handle the data expando acceptData: function( elem ) { if ( elem.nodeName ) { var match = jQuery.noData[ elem.nodeName.toLowerCase() ]; if ( match ) { return !(match === true || elem.getAttribute("classid") !== match); } } return true; } }); jQuery.fn.extend({ data: function( key, value ) { var data = null; if ( typeof key === "undefined" ) { if ( this.length ) { data = jQuery.data( this[0] ); if ( this[0].nodeType === 1 ) { var attr = this[0].attributes, name; for ( var i = 0, l = attr.length; i < l; i++ ) { name = attr[i].name; if ( name.indexOf( "data-" ) === 0 ) { name = jQuery.camelCase( name.substring(5) ); dataAttr( this[0], name, data[ name ] ); } } } } return data; } else if ( typeof key === "object" ) { return this.each(function() { jQuery.data( this, key ); }); } var parts = key.split("."); parts[1] = parts[1] ? "." + parts[1] : ""; if ( value === undefined ) { data = this.triggerHandler("getData" + parts[1] + "!", [parts[0]]); // Try to fetch any internally stored data first if ( data === undefined && this.length ) { data = jQuery.data( this[0], key ); data = dataAttr( this[0], key, data ); } return data === undefined && parts[1] ? this.data( parts[0] ) : data; } else { return this.each(function() { var $this = jQuery( this ), args = [ parts[0], value ]; $this.triggerHandler( "setData" + parts[1] + "!", args ); jQuery.data( this, key, value ); $this.triggerHandler( "changeData" + parts[1] + "!", args ); }); } }, removeData: function( key ) { return this.each(function() { jQuery.removeData( this, key ); }); } }); function dataAttr( elem, key, data ) { // If nothing was found internally, try to fetch any // data from the HTML5 data-* attribute if ( data === undefined && elem.nodeType === 1 ) { var name = "data-" + key.replace( rmultiDash, "$1-$2" ).toLowerCase(); data = elem.getAttribute( name ); if ( typeof data === "string" ) { try { data = data === "true" ? true : data === "false" ? false : data === "null" ? null : !jQuery.isNaN( data ) ? parseFloat( data ) : rbrace.test( data ) ? jQuery.parseJSON( data ) : data; } catch( e ) {} // Make sure we set the data so it isn't changed later jQuery.data( elem, key, data ); } else { data = undefined; } } return data; } // TODO: This is a hack for 1.5 ONLY to allow objects with a single toJSON // property to be considered empty objects; this property always exists in // order to make sure JSON.stringify does not expose internal metadata function isEmptyDataObject( obj ) { for ( var name in obj ) { if ( name !== "toJSON" ) { return false; } } return true; } function handleQueueMarkDefer( elem, type, src ) { var deferDataKey = type + "defer", queueDataKey = type + "queue", markDataKey = type + "mark", defer = jQuery.data( elem, deferDataKey, undefined, true ); if ( defer && ( src === "queue" || !jQuery.data( elem, queueDataKey, undefined, true ) ) && ( src === "mark" || !jQuery.data( elem, markDataKey, undefined, true ) ) ) { // Give room for hard-coded callbacks to fire first // and eventually mark/queue something else on the element setTimeout( function() { if ( !jQuery.data( elem, queueDataKey, undefined, true ) && !jQuery.data( elem, markDataKey, undefined, true ) ) { jQuery.removeData( elem, deferDataKey, true ); defer.resolve(); } }, 0 ); } } jQuery.extend({ _mark: function( elem, type ) { if ( elem ) { type = (type || "fx") + "mark"; jQuery.data( elem, type, (jQuery.data(elem,type,undefined,true) || 0) + 1, true ); } }, _unmark: function( force, elem, type ) { if ( force !== true ) { type = elem; elem = force; force = false; } if ( elem ) { type = type || "fx"; var key = type + "mark", count = force ? 0 : ( (jQuery.data( elem, key, undefined, true) || 1 ) - 1 ); if ( count ) { jQuery.data( elem, key, count, true ); } else { jQuery.removeData( elem, key, true ); handleQueueMarkDefer( elem, type, "mark" ); } } }, queue: function( elem, type, data ) { if ( elem ) { type = (type || "fx") + "queue"; var q = jQuery.data( elem, type, undefined, true ); // Speed up dequeue by getting out quickly if this is just a lookup if ( data ) { if ( !q || jQuery.isArray(data) ) { q = jQuery.data( elem, type, jQuery.makeArray(data), true ); } else { q.push( data ); } } return q || []; } }, dequeue: function( elem, type ) { type = type || "fx"; var queue = jQuery.queue( elem, type ), fn = queue.shift(), defer; // If the fx queue is dequeued, always remove the progress sentinel if ( fn === "inprogress" ) { fn = queue.shift(); } if ( fn ) { // Add a progress sentinel to prevent the fx queue from being // automatically dequeued if ( type === "fx" ) { queue.unshift("inprogress"); } fn.call(elem, function() { jQuery.dequeue(elem, type); }); } if ( !queue.length ) { jQuery.removeData( elem, type + "queue", true ); handleQueueMarkDefer( elem, type, "queue" ); } } }); jQuery.fn.extend({ queue: function( type, data ) { if ( typeof type !== "string" ) { data = type; type = "fx"; } if ( data === undefined ) { return jQuery.queue( this[0], type ); } return this.each(function() { var queue = jQuery.queue( this, type, data ); if ( type === "fx" && queue[0] !== "inprogress" ) { jQuery.dequeue( this, type ); } }); }, dequeue: function( type ) { return this.each(function() { jQuery.dequeue( this, type ); }); }, // Based off of the plugin by Clint Helfers, with permission. // http://blindsignals.com/index.php/2009/07/jquery-delay/ delay: function( time, type ) { time = jQuery.fx ? jQuery.fx.speeds[time] || time : time; type = type || "fx"; return this.queue( type, function() { var elem = this; setTimeout(function() { jQuery.dequeue( elem, type ); }, time ); }); }, clearQueue: function( type ) { return this.queue( type || "fx", [] ); }, // Get a promise resolved when queues of a certain type // are emptied (fx is the type by default) promise: function( type, object ) { if ( typeof type !== "string" ) { object = type; type = undefined; } type = type || "fx"; var defer = jQuery.Deferred(), elements = this, i = elements.length, count = 1, deferDataKey = type + "defer", queueDataKey = type + "queue", markDataKey = type + "mark", tmp; function resolve() { if ( !( --count ) ) { defer.resolveWith( elements, [ elements ] ); } } while( i-- ) { if (( tmp = jQuery.data( elements[ i ], deferDataKey, undefined, true ) || ( jQuery.data( elements[ i ], queueDataKey, undefined, true ) || jQuery.data( elements[ i ], markDataKey, undefined, true ) ) && jQuery.data( elements[ i ], deferDataKey, jQuery._Deferred(), true ) )) { count++; tmp.done( resolve ); } } resolve(); return defer.promise(); } }); var rclass = /[\n\t\r]/g, rspace = /\s+/, rreturn = /\r/g, rtype = /^(?:button|input)$/i, rfocusable = /^(?:button|input|object|select|textarea)$/i, rclickable = /^a(?:rea)?$/i, rboolean = /^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i, rinvalidChar = /\:|^on/, formHook, boolHook; jQuery.fn.extend({ attr: function( name, value ) { return jQuery.access( this, name, value, true, jQuery.attr ); }, removeAttr: function( name ) { return this.each(function() { jQuery.removeAttr( this, name ); }); }, prop: function( name, value ) { return jQuery.access( this, name, value, true, jQuery.prop ); }, removeProp: function( name ) { name = jQuery.propFix[ name ] || name; return this.each(function() { // try/catch handles cases where IE balks (such as removing a property on window) try { this[ name ] = undefined; delete this[ name ]; } catch( e ) {} }); }, addClass: function( value ) { var classNames, i, l, elem, setClass, c, cl; if ( jQuery.isFunction( value ) ) { return this.each(function( j ) { jQuery( this ).addClass( value.call(this, j, this.className) ); }); } if ( value && typeof value === "string" ) { classNames = value.split( rspace ); for ( i = 0, l = this.length; i < l; i++ ) { elem = this[ i ]; if ( elem.nodeType === 1 ) { if ( !elem.className && classNames.length === 1 ) { elem.className = value; } else { setClass = " " + elem.className + " "; for ( c = 0, cl = classNames.length; c < cl; c++ ) { if ( !~setClass.indexOf( " " + classNames[ c ] + " " ) ) { setClass += classNames[ c ] + " "; } } elem.className = jQuery.trim( setClass ); } } } } return this; }, removeClass: function( value ) { var classNames, i, l, elem, className, c, cl; if ( jQuery.isFunction( value ) ) { return this.each(function( j ) { jQuery( this ).removeClass( value.call(this, j, this.className) ); }); } if ( (value && typeof value === "string") || value === undefined ) { classNames = (value || "").split( rspace ); for ( i = 0, l = this.length; i < l; i++ ) { elem = this[ i ]; if ( elem.nodeType === 1 && elem.className ) { if ( value ) { className = (" " + elem.className + " ").replace( rclass, " " ); for ( c = 0, cl = classNames.length; c < cl; c++ ) { className = className.replace(" " + classNames[ c ] + " ", " "); } elem.className = jQuery.trim( className ); } else { elem.className = ""; } } } } return this; }, toggleClass: function( value, stateVal ) { var type = typeof value, isBool = typeof stateVal === "boolean"; if ( jQuery.isFunction( value ) ) { return this.each(function( i ) { jQuery( this ).toggleClass( value.call(this, i, this.className, stateVal), stateVal ); }); } return this.each(function() { if ( type === "string" ) { // toggle individual class names var className, i = 0, self = jQuery( this ), state = stateVal, classNames = value.split( rspace ); while ( (className = classNames[ i++ ]) ) { // check each className given, space seperated list state = isBool ? state : !self.hasClass( className ); self[ state ? "addClass" : "removeClass" ]( className ); } } else if ( type === "undefined" || type === "boolean" ) { if ( this.className ) { // store className if set jQuery._data( this, "__className__", this.className ); } // toggle whole className this.className = this.className || value === false ? "" : jQuery._data( this, "__className__" ) || ""; } }); }, hasClass: function( selector ) { var className = " " + selector + " "; for ( var i = 0, l = this.length; i < l; i++ ) { if ( (" " + this[i].className + " ").replace(rclass, " ").indexOf( className ) > -1 ) { return true; } } return false; }, val: function( value ) { var hooks, ret, elem = this[0]; if ( !arguments.length ) { if ( elem ) { hooks = jQuery.valHooks[ elem.nodeName.toLowerCase() ] || jQuery.valHooks[ elem.type ]; if ( hooks && "get" in hooks && (ret = hooks.get( elem, "value" )) !== undefined ) { return ret; } ret = elem.value; return typeof ret === "string" ? // handle most common string cases ret.replace(rreturn, "") : // handle cases where value is null/undef or number ret == null ? "" : ret; } return undefined; } var isFunction = jQuery.isFunction( value ); return this.each(function( i ) { var self = jQuery(this), val; if ( this.nodeType !== 1 ) { return; } if ( isFunction ) { val = value.call( this, i, self.val() ); } else { val = value; } // Treat null/undefined as ""; convert numbers to string if ( val == null ) { val = ""; } else if ( typeof val === "number" ) { val += ""; } else if ( jQuery.isArray( val ) ) { val = jQuery.map(val, function ( value ) { return value == null ? "" : value + ""; }); } hooks = jQuery.valHooks[ this.nodeName.toLowerCase() ] || jQuery.valHooks[ this.type ]; // If set returns undefined, fall back to normal setting if ( !hooks || !("set" in hooks) || hooks.set( this, val, "value" ) === undefined ) { this.value = val; } }); } }); jQuery.extend({ valHooks: { option: { get: function( elem ) { // attributes.value is undefined in Blackberry 4.7 but // uses .value. See #6932 var val = elem.attributes.value; return !val || val.specified ? elem.value : elem.text; } }, select: { get: function( elem ) { var value, index = elem.selectedIndex, values = [], options = elem.options, one = elem.type === "select-one"; // Nothing was selected if ( index < 0 ) { return null; } // Loop through all the selected options for ( var i = one ? index : 0, max = one ? index + 1 : options.length; i < max; i++ ) { var option = options[ i ]; // Don't return options that are disabled or in a disabled optgroup if ( option.selected && (jQuery.support.optDisabled ? !option.disabled : option.getAttribute("disabled") === null) && (!option.parentNode.disabled || !jQuery.nodeName( option.parentNode, "optgroup" )) ) { // Get the specific value for the option value = jQuery( option ).val(); // We don't need an array for one selects if ( one ) { return value; } // Multi-Selects return an array values.push( value ); } } // Fixes Bug #2551 -- select.val() broken in IE after form.reset() if ( one && !values.length && options.length ) { return jQuery( options[ index ] ).val(); } return values; }, set: function( elem, value ) { var values = jQuery.makeArray( value ); jQuery(elem).find("option").each(function() { this.selected = jQuery.inArray( jQuery(this).val(), values ) >= 0; }); if ( !values.length ) { elem.selectedIndex = -1; } return values; } } }, attrFn: { val: true, css: true, html: true, text: true, data: true, width: true, height: true, offset: true }, attrFix: { // Always normalize to ensure hook usage tabindex: "tabIndex" }, attr: function( elem, name, value, pass ) { var nType = elem.nodeType; // don't get/set attributes on text, comment and attribute nodes if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { return undefined; } if ( pass && name in jQuery.attrFn ) { return jQuery( elem )[ name ]( value ); } // Fallback to prop when attributes are not supported if ( !("getAttribute" in elem) ) { return jQuery.prop( elem, name, value ); } var ret, hooks, notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); // Normalize the name if needed if ( notxml ) { name = jQuery.attrFix[ name ] || name; hooks = jQuery.attrHooks[ name ]; if ( !hooks ) { // Use boolHook for boolean attributes if ( rboolean.test( name ) ) { hooks = boolHook; // Use formHook for forms and if the name contains certain characters } else if ( formHook && name !== "className" && (jQuery.nodeName( elem, "form" ) || rinvalidChar.test( name )) ) { hooks = formHook; } } } if ( value !== undefined ) { if ( value === null ) { jQuery.removeAttr( elem, name ); return undefined; } else if ( hooks && "set" in hooks && notxml && (ret = hooks.set( elem, value, name )) !== undefined ) { return ret; } else { elem.setAttribute( name, "" + value ); return value; } } else if ( hooks && "get" in hooks && notxml && (ret = hooks.get( elem, name )) !== null ) { return ret; } else { ret = elem.getAttribute( name ); // Non-existent attributes return null, we normalize to undefined return ret === null ? undefined : ret; } }, removeAttr: function( elem, name ) { var propName; if ( elem.nodeType === 1 ) { name = jQuery.attrFix[ name ] || name; if ( jQuery.support.getSetAttribute ) { // Use removeAttribute in browsers that support it elem.removeAttribute( name ); } else { jQuery.attr( elem, name, "" ); elem.removeAttributeNode( elem.getAttributeNode( name ) ); } // Set corresponding property to false for boolean attributes if ( rboolean.test( name ) && (propName = jQuery.propFix[ name ] || name) in elem ) { elem[ propName ] = false; } } }, attrHooks: { type: { set: function( elem, value ) { // We can't allow the type property to be changed (since it causes problems in IE) if ( rtype.test( elem.nodeName ) && elem.parentNode ) { jQuery.error( "type property can't be changed" ); } else if ( !jQuery.support.radioValue && value === "radio" && jQuery.nodeName(elem, "input") ) { // Setting the type on a radio button after the value resets the value in IE6-9 // Reset value to it's default in case type is set after value // This is for element creation var val = elem.value; elem.setAttribute( "type", value ); if ( val ) { elem.value = val; } return value; } } }, tabIndex: { get: function( elem ) { // elem.tabIndex doesn't always return the correct value when it hasn't been explicitly set // http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ var attributeNode = elem.getAttributeNode("tabIndex"); return attributeNode && attributeNode.specified ? parseInt( attributeNode.value, 10 ) : rfocusable.test( elem.nodeName ) || rclickable.test( elem.nodeName ) && elem.href ? 0 : undefined; } }, // Use the value property for back compat // Use the formHook for button elements in IE6/7 (#1954) value: { get: function( elem, name ) { if ( formHook && jQuery.nodeName( elem, "button" ) ) { return formHook.get( elem, name ); } return name in elem ? elem.value : null; }, set: function( elem, value, name ) { if ( formHook && jQuery.nodeName( elem, "button" ) ) { return formHook.set( elem, value, name ); } // Does not return so that setAttribute is also used elem.value = value; } } }, propFix: { tabindex: "tabIndex", readonly: "readOnly", "for": "htmlFor", "class": "className", maxlength: "maxLength", cellspacing: "cellSpacing", cellpadding: "cellPadding", rowspan: "rowSpan", colspan: "colSpan", usemap: "useMap", frameborder: "frameBorder", contenteditable: "contentEditable" }, prop: function( elem, name, value ) { var nType = elem.nodeType; // don't get/set properties on text, comment and attribute nodes if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { return undefined; } var ret, hooks, notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); if ( notxml ) { // Fix name and attach hooks name = jQuery.propFix[ name ] || name; hooks = jQuery.propHooks[ name ]; } if ( value !== undefined ) { if ( hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ) { return ret; } else { return (elem[ name ] = value); } } else { if ( hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== undefined ) { return ret; } else { return elem[ name ]; } } }, propHooks: {} }); // Hook for boolean attributes boolHook = { get: function( elem, name ) { // Align boolean attributes with corresponding properties return jQuery.prop( elem, name ) ? name.toLowerCase() : undefined; }, set: function( elem, value, name ) { var propName; if ( value === false ) { // Remove boolean attributes when set to false jQuery.removeAttr( elem, name ); } else { // value is true since we know at this point it's type boolean and not false // Set boolean attributes to the same name and set the DOM property propName = jQuery.propFix[ name ] || name; if ( propName in elem ) { // Only set the IDL specifically if it already exists on the element elem[ propName ] = true; } elem.setAttribute( name, name.toLowerCase() ); } return name; } }; // IE6/7 do not support getting/setting some attributes with get/setAttribute if ( !jQuery.support.getSetAttribute ) { // propFix is more comprehensive and contains all fixes jQuery.attrFix = jQuery.propFix; // Use this for any attribute on a form in IE6/7 formHook = jQuery.attrHooks.name = jQuery.attrHooks.title = jQuery.valHooks.button = { get: function( elem, name ) { var ret; ret = elem.getAttributeNode( name ); // Return undefined if nodeValue is empty string return ret && ret.nodeValue !== "" ? ret.nodeValue : undefined; }, set: function( elem, value, name ) { // Check form objects in IE (multiple bugs related) // Only use nodeValue if the attribute node exists on the form var ret = elem.getAttributeNode( name ); if ( ret ) { ret.nodeValue = value; return value; } } }; // Set width and height to auto instead of 0 on empty string( Bug #8150 ) // This is for removals jQuery.each([ "width", "height" ], function( i, name ) { jQuery.attrHooks[ name ] = jQuery.extend( jQuery.attrHooks[ name ], { set: function( elem, value ) { if ( value === "" ) { elem.setAttribute( name, "auto" ); return value; } } }); }); } // Some attributes require a special call on IE if ( !jQuery.support.hrefNormalized ) { jQuery.each([ "href", "src", "width", "height" ], function( i, name ) { jQuery.attrHooks[ name ] = jQuery.extend( jQuery.attrHooks[ name ], { get: function( elem ) { var ret = elem.getAttribute( name, 2 ); return ret === null ? undefined : ret; } }); }); } if ( !jQuery.support.style ) { jQuery.attrHooks.style = { get: function( elem ) { // Return undefined in the case of empty string // Normalize to lowercase since IE uppercases css property names return elem.style.cssText.toLowerCase() || undefined; }, set: function( elem, value ) { return (elem.style.cssText = "" + value); } }; } // Safari mis-reports the default selected property of an option // Accessing the parent's selectedIndex property fixes it if ( !jQuery.support.optSelected ) { jQuery.propHooks.selected = jQuery.extend( jQuery.propHooks.selected, { get: function( elem ) { var parent = elem.parentNode; if ( parent ) { parent.selectedIndex; // Make sure that it also works with optgroups, see #5701 if ( parent.parentNode ) { parent.parentNode.selectedIndex; } } } }); } // Radios and checkboxes getter/setter if ( !jQuery.support.checkOn ) { jQuery.each([ "radio", "checkbox" ], function() { jQuery.valHooks[ this ] = { get: function( elem ) { // Handle the case where in Webkit "" is returned instead of "on" if a value isn't specified return elem.getAttribute("value") === null ? "on" : elem.value; } }; }); } jQuery.each([ "radio", "checkbox" ], function() { jQuery.valHooks[ this ] = jQuery.extend( jQuery.valHooks[ this ], { set: function( elem, value ) { if ( jQuery.isArray( value ) ) { return (elem.checked = jQuery.inArray( jQuery(elem).val(), value ) >= 0); } } }); }); var rnamespaces = /\.(.*)$/, rformElems = /^(?:textarea|input|select)$/i, rperiod = /\./g, rspaces = / /g, rescape = /[^\w\s.|`]/g, fcleanup = function( nm ) { return nm.replace(rescape, "\\$&"); }; /* * A number of helper functions used for managing events. * Many of the ideas behind this code originated from * Dean Edwards' addEvent library. */ jQuery.event = { // Bind an event to an element // Original by Dean Edwards add: function( elem, types, handler, data ) { if ( elem.nodeType === 3 || elem.nodeType === 8 ) { return; } if ( handler === false ) { handler = returnFalse; } else if ( !handler ) { // Fixes bug #7229. Fix recommended by jdalton return; } var handleObjIn, handleObj; if ( handler.handler ) { handleObjIn = handler; handler = handleObjIn.handler; } // Make sure that the function being executed has a unique ID if ( !handler.guid ) { handler.guid = jQuery.guid++; } // Init the element's event structure var elemData = jQuery._data( elem ); // If no elemData is found then we must be trying to bind to one of the // banned noData elements if ( !elemData ) { return; } var events = elemData.events, eventHandle = elemData.handle; if ( !events ) { elemData.events = events = {}; } if ( !eventHandle ) { elemData.handle = eventHandle = function( e ) { // Discard the second event of a jQuery.event.trigger() and // when an event is called after a page has unloaded return typeof jQuery !== "undefined" && (!e || jQuery.event.triggered !== e.type) ? jQuery.event.handle.apply( eventHandle.elem, arguments ) : undefined; }; } // Add elem as a property of the handle function // This is to prevent a memory leak with non-native events in IE. eventHandle.elem = elem; // Handle multiple events separated by a space // jQuery(...).bind("mouseover mouseout", fn); types = types.split(" "); var type, i = 0, namespaces; while ( (type = types[ i++ ]) ) { handleObj = handleObjIn ? jQuery.extend({}, handleObjIn) : { handler: handler, data: data }; // Namespaced event handlers if ( type.indexOf(".") > -1 ) { namespaces = type.split("."); type = namespaces.shift(); handleObj.namespace = namespaces.slice(0).sort().join("."); } else { namespaces = []; handleObj.namespace = ""; } handleObj.type = type; if ( !handleObj.guid ) { handleObj.guid = handler.guid; } // Get the current list of functions bound to this event var handlers = events[ type ], special = jQuery.event.special[ type ] || {}; // Init the event handler queue if ( !handlers ) { handlers = events[ type ] = []; // Check for a special event handler // Only use addEventListener/attachEvent if the special // events handler returns false if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) { // Bind the global event handler to the element if ( elem.addEventListener ) { elem.addEventListener( type, eventHandle, false ); } else if ( elem.attachEvent ) { elem.attachEvent( "on" + type, eventHandle ); } } } if ( special.add ) { special.add.call( elem, handleObj ); if ( !handleObj.handler.guid ) { handleObj.handler.guid = handler.guid; } } // Add the function to the element's handler list handlers.push( handleObj ); // Keep track of which events have been used, for event optimization jQuery.event.global[ type ] = true; } // Nullify elem to prevent memory leaks in IE elem = null; }, global: {}, // Detach an event or set of events from an element remove: function( elem, types, handler, pos ) { // don't do events on text and comment nodes if ( elem.nodeType === 3 || elem.nodeType === 8 ) { return; } if ( handler === false ) { handler = returnFalse; } var ret, type, fn, j, i = 0, all, namespaces, namespace, special, eventType, handleObj, origType, elemData = jQuery.hasData( elem ) && jQuery._data( elem ), events = elemData && elemData.events; if ( !elemData || !events ) { return; } // types is actually an event object here if ( types && types.type ) { handler = types.handler; types = types.type; } // Unbind all events for the element if ( !types || typeof types === "string" && types.charAt(0) === "." ) { types = types || ""; for ( type in events ) { jQuery.event.remove( elem, type + types ); } return; } // Handle multiple events separated by a space // jQuery(...).unbind("mouseover mouseout", fn); types = types.split(" "); while ( (type = types[ i++ ]) ) { origType = type; handleObj = null; all = type.indexOf(".") < 0; namespaces = []; if ( !all ) { // Namespaced event handlers namespaces = type.split("."); type = namespaces.shift(); namespace = new RegExp("(^|\\.)" + jQuery.map( namespaces.slice(0).sort(), fcleanup ).join("\\.(?:.*\\.)?") + "(\\.|$)"); } eventType = events[ type ]; if ( !eventType ) { continue; } if ( !handler ) { for ( j = 0; j < eventType.length; j++ ) { handleObj = eventType[ j ]; if ( all || namespace.test( handleObj.namespace ) ) { jQuery.event.remove( elem, origType, handleObj.handler, j ); eventType.splice( j--, 1 ); } } continue; } special = jQuery.event.special[ type ] || {}; for ( j = pos || 0; j < eventType.length; j++ ) { handleObj = eventType[ j ]; if ( handler.guid === handleObj.guid ) { // remove the given handler for the given type if ( all || namespace.test( handleObj.namespace ) ) { if ( pos == null ) { eventType.splice( j--, 1 ); } if ( special.remove ) { special.remove.call( elem, handleObj ); } } if ( pos != null ) { break; } } } // remove generic event handler if no more handlers exist if ( eventType.length === 0 || pos != null && eventType.length === 1 ) { if ( !special.teardown || special.teardown.call( elem, namespaces ) === false ) { jQuery.removeEvent( elem, type, elemData.handle ); } ret = null; delete events[ type ]; } } // Remove the expando if it's no longer used if ( jQuery.isEmptyObject( events ) ) { var handle = elemData.handle; if ( handle ) { handle.elem = null; } delete elemData.events; delete elemData.handle; if ( jQuery.isEmptyObject( elemData ) ) { jQuery.removeData( elem, undefined, true ); } } }, // Events that are safe to short-circuit if no handlers are attached. // Native DOM events should not be added, they may have inline handlers. customEvent: { "getData": true, "setData": true, "changeData": true }, trigger: function( event, data, elem, onlyHandlers ) { // Event object or event type var type = event.type || event, namespaces = [], exclusive; if ( type.indexOf("!") >= 0 ) { // Exclusive events trigger only for the exact event (no namespaces) type = type.slice(0, -1); exclusive = true; } if ( type.indexOf(".") >= 0 ) { // Namespaced trigger; create a regexp to match event type in handle() namespaces = type.split("."); type = namespaces.shift(); namespaces.sort(); } if ( (!elem || jQuery.event.customEvent[ type ]) && !jQuery.event.global[ type ] ) { // No jQuery handlers for this event type, and it can't have inline handlers return; } // Caller can pass in an Event, Object, or just an event type string event = typeof event === "object" ? // jQuery.Event object event[ jQuery.expando ] ? event : // Object literal new jQuery.Event( type, event ) : // Just the event type (string) new jQuery.Event( type ); event.type = type; event.exclusive = exclusive; event.namespace = namespaces.join("."); event.namespace_re = new RegExp("(^|\\.)" + namespaces.join("\\.(?:.*\\.)?") + "(\\.|$)"); // triggerHandler() and global events don't bubble or run the default action if ( onlyHandlers || !elem ) { event.preventDefault(); event.stopPropagation(); } // Handle a global trigger if ( !elem ) { // TODO: Stop taunting the data cache; remove global events and always attach to document jQuery.each( jQuery.cache, function() { // internalKey variable is just used to make it easier to find // and potentially change this stuff later; currently it just // points to jQuery.expando var internalKey = jQuery.expando, internalCache = this[ internalKey ]; if ( internalCache && internalCache.events && internalCache.events[ type ] ) { jQuery.event.trigger( event, data, internalCache.handle.elem ); } }); return; } // Don't do events on text and comment nodes if ( elem.nodeType === 3 || elem.nodeType === 8 ) { return; } // Clean up the event in case it is being reused event.result = undefined; event.target = elem; // Clone any incoming data and prepend the event, creating the handler arg list data = data != null ? jQuery.makeArray( data ) : []; data.unshift( event ); var cur = elem, // IE doesn't like method names with a colon (#3533, #8272) ontype = type.indexOf(":") < 0 ? "on" + type : ""; // Fire event on the current element, then bubble up the DOM tree do { var handle = jQuery._data( cur, "handle" ); event.currentTarget = cur; if ( handle ) { handle.apply( cur, data ); } // Trigger an inline bound script if ( ontype && jQuery.acceptData( cur ) && cur[ ontype ] && cur[ ontype ].apply( cur, data ) === false ) { event.result = false; event.preventDefault(); } // Bubble up to document, then to window cur = cur.parentNode || cur.ownerDocument || cur === event.target.ownerDocument && window; } while ( cur && !event.isPropagationStopped() ); // If nobody prevented the default action, do it now if ( !event.isDefaultPrevented() ) { var old, special = jQuery.event.special[ type ] || {}; if ( (!special._default || special._default.call( elem.ownerDocument, event ) === false) && !(type === "click" && jQuery.nodeName( elem, "a" )) && jQuery.acceptData( elem ) ) { // Call a native DOM method on the target with the same name name as the event. // Can't use an .isFunction)() check here because IE6/7 fails that test. // IE<9 dies on focus to hidden element (#1486), may want to revisit a try/catch. try { if ( ontype && elem[ type ] ) { // Don't re-trigger an onFOO event when we call its FOO() method old = elem[ ontype ]; if ( old ) { elem[ ontype ] = null; } jQuery.event.triggered = type; elem[ type ](); } } catch ( ieError ) {} if ( old ) { elem[ ontype ] = old; } jQuery.event.triggered = undefined; } } return event.result; }, handle: function( event ) { event = jQuery.event.fix( event || window.event ); // Snapshot the handlers list since a called handler may add/remove events. var handlers = ((jQuery._data( this, "events" ) || {})[ event.type ] || []).slice(0), run_all = !event.exclusive && !event.namespace, args = Array.prototype.slice.call( arguments, 0 ); // Use the fix-ed Event rather than the (read-only) native event args[0] = event; event.currentTarget = this; for ( var j = 0, l = handlers.length; j < l; j++ ) { var handleObj = handlers[ j ]; // Triggered event must 1) be non-exclusive and have no namespace, or // 2) have namespace(s) a subset or equal to those in the bound event. if ( run_all || event.namespace_re.test( handleObj.namespace ) ) { // Pass in a reference to the handler function itself // So that we can later remove it event.handler = handleObj.handler; event.data = handleObj.data; event.handleObj = handleObj; var ret = handleObj.handler.apply( this, args ); if ( ret !== undefined ) { event.result = ret; if ( ret === false ) { event.preventDefault(); event.stopPropagation(); } } if ( event.isImmediatePropagationStopped() ) { break; } } } return event.result; }, props: "altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "), fix: function( event ) { if ( event[ jQuery.expando ] ) { return event; } // store a copy of the original event object // and "clone" to set read-only properties var originalEvent = event; event = jQuery.Event( originalEvent ); for ( var i = this.props.length, prop; i; ) { prop = this.props[ --i ]; event[ prop ] = originalEvent[ prop ]; } // Fix target property, if necessary if ( !event.target ) { // Fixes #1925 where srcElement might not be defined either event.target = event.srcElement || document; } // check if target is a textnode (safari) if ( event.target.nodeType === 3 ) { event.target = event.target.parentNode; } // Add relatedTarget, if necessary if ( !event.relatedTarget && event.fromElement ) { event.relatedTarget = event.fromElement === event.target ? event.toElement : event.fromElement; } // Calculate pageX/Y if missing and clientX/Y available if ( event.pageX == null && event.clientX != null ) { var eventDocument = event.target.ownerDocument || document, doc = eventDocument.documentElement, body = eventDocument.body; event.pageX = event.clientX + (doc && doc.scrollLeft || body && body.scrollLeft || 0) - (doc && doc.clientLeft || body && body.clientLeft || 0); event.pageY = event.clientY + (doc && doc.scrollTop || body && body.scrollTop || 0) - (doc && doc.clientTop || body && body.clientTop || 0); } // Add which for key events if ( event.which == null && (event.charCode != null || event.keyCode != null) ) { event.which = event.charCode != null ? event.charCode : event.keyCode; } // Add metaKey to non-Mac browsers (use ctrl for PC's and Meta for Macs) if ( !event.metaKey && event.ctrlKey ) { event.metaKey = event.ctrlKey; } // Add which for click: 1 === left; 2 === middle; 3 === right // Note: button is not normalized, so don't use it if ( !event.which && event.button !== undefined ) { event.which = (event.button & 1 ? 1 : ( event.button & 2 ? 3 : ( event.button & 4 ? 2 : 0 ) )); } return event; }, // Deprecated, use jQuery.guid instead guid: 1E8, // Deprecated, use jQuery.proxy instead proxy: jQuery.proxy, special: { ready: { // Make sure the ready event is setup setup: jQuery.bindReady, teardown: jQuery.noop }, live: { add: function( handleObj ) { jQuery.event.add( this, liveConvert( handleObj.origType, handleObj.selector ), jQuery.extend({}, handleObj, {handler: liveHandler, guid: handleObj.handler.guid}) ); }, remove: function( handleObj ) { jQuery.event.remove( this, liveConvert( handleObj.origType, handleObj.selector ), handleObj ); } }, beforeunload: { setup: function( data, namespaces, eventHandle ) { // We only want to do this special case on windows if ( jQuery.isWindow( this ) ) { this.onbeforeunload = eventHandle; } }, teardown: function( namespaces, eventHandle ) { if ( this.onbeforeunload === eventHandle ) { this.onbeforeunload = null; } } } } }; jQuery.removeEvent = document.removeEventListener ? function( elem, type, handle ) { if ( elem.removeEventListener ) { elem.removeEventListener( type, handle, false ); } } : function( elem, type, handle ) { if ( elem.detachEvent ) { elem.detachEvent( "on" + type, handle ); } }; jQuery.Event = function( src, props ) { // Allow instantiation without the 'new' keyword if ( !this.preventDefault ) { return new jQuery.Event( src, props ); } // Event object if ( src && src.type ) { this.originalEvent = src; this.type = src.type; // Events bubbling up the document may have been marked as prevented // by a handler lower down the tree; reflect the correct value. this.isDefaultPrevented = (src.defaultPrevented || src.returnValue === false || src.getPreventDefault && src.getPreventDefault()) ? returnTrue : returnFalse; // Event type } else { this.type = src; } // Put explicitly provided properties onto the event object if ( props ) { jQuery.extend( this, props ); } // timeStamp is buggy for some events on Firefox(#3843) // So we won't rely on the native value this.timeStamp = jQuery.now(); // Mark it as fixed this[ jQuery.expando ] = true; }; function returnFalse() { return false; } function returnTrue() { return true; } // jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding // http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html jQuery.Event.prototype = { preventDefault: function() { this.isDefaultPrevented = returnTrue; var e = this.originalEvent; if ( !e ) { return; } // if preventDefault exists run it on the original event if ( e.preventDefault ) { e.preventDefault(); // otherwise set the returnValue property of the original event to false (IE) } else { e.returnValue = false; } }, stopPropagation: function() { this.isPropagationStopped = returnTrue; var e = this.originalEvent; if ( !e ) { return; } // if stopPropagation exists run it on the original event if ( e.stopPropagation ) { e.stopPropagation(); } // otherwise set the cancelBubble property of the original event to true (IE) e.cancelBubble = true; }, stopImmediatePropagation: function() { this.isImmediatePropagationStopped = returnTrue; this.stopPropagation(); }, isDefaultPrevented: returnFalse, isPropagationStopped: returnFalse, isImmediatePropagationStopped: returnFalse }; // Checks if an event happened on an element within another element // Used in jQuery.event.special.mouseenter and mouseleave handlers var withinElement = function( event ) { // Check if mouse(over|out) are still within the same parent element var related = event.relatedTarget, inside = false, eventType = event.type; event.type = event.data; if ( related !== this ) { if ( related ) { inside = jQuery.contains( this, related ); } if ( !inside ) { jQuery.event.handle.apply( this, arguments ); event.type = eventType; } } }, // In case of event delegation, we only need to rename the event.type, // liveHandler will take care of the rest. delegate = function( event ) { event.type = event.data; jQuery.event.handle.apply( this, arguments ); }; // Create mouseenter and mouseleave events jQuery.each({ mouseenter: "mouseover", mouseleave: "mouseout" }, function( orig, fix ) { jQuery.event.special[ orig ] = { setup: function( data ) { jQuery.event.add( this, fix, data && data.selector ? delegate : withinElement, orig ); }, teardown: function( data ) { jQuery.event.remove( this, fix, data && data.selector ? delegate : withinElement ); } }; }); // submit delegation if ( !jQuery.support.submitBubbles ) { jQuery.event.special.submit = { setup: function( data, namespaces ) { if ( !jQuery.nodeName( this, "form" ) ) { jQuery.event.add(this, "click.specialSubmit", function( e ) { var elem = e.target, type = elem.type; if ( (type === "submit" || type === "image") && jQuery( elem ).closest("form").length ) { trigger( "submit", this, arguments ); } }); jQuery.event.add(this, "keypress.specialSubmit", function( e ) { var elem = e.target, type = elem.type; if ( (type === "text" || type === "password") && jQuery( elem ).closest("form").length && e.keyCode === 13 ) { trigger( "submit", this, arguments ); } }); } else { return false; } }, teardown: function( namespaces ) { jQuery.event.remove( this, ".specialSubmit" ); } }; } // change delegation, happens here so we have bind. if ( !jQuery.support.changeBubbles ) { var changeFilters, getVal = function( elem ) { var type = elem.type, val = elem.value; if ( type === "radio" || type === "checkbox" ) { val = elem.checked; } else if ( type === "select-multiple" ) { val = elem.selectedIndex > -1 ? jQuery.map( elem.options, function( elem ) { return elem.selected; }).join("-") : ""; } else if ( jQuery.nodeName( elem, "select" ) ) { val = elem.selectedIndex; } return val; }, testChange = function testChange( e ) { var elem = e.target, data, val; if ( !rformElems.test( elem.nodeName ) || elem.readOnly ) { return; } data = jQuery._data( elem, "_change_data" ); val = getVal(elem); // the current data will be also retrieved by beforeactivate if ( e.type !== "focusout" || elem.type !== "radio" ) { jQuery._data( elem, "_change_data", val ); } if ( data === undefined || val === data ) { return; } if ( data != null || val ) { e.type = "change"; e.liveFired = undefined; jQuery.event.trigger( e, arguments[1], elem ); } }; jQuery.event.special.change = { filters: { focusout: testChange, beforedeactivate: testChange, click: function( e ) { var elem = e.target, type = jQuery.nodeName( elem, "input" ) ? elem.type : ""; if ( type === "radio" || type === "checkbox" || jQuery.nodeName( elem, "select" ) ) { testChange.call( this, e ); } }, // Change has to be called before submit // Keydown will be called before keypress, which is used in submit-event delegation keydown: function( e ) { var elem = e.target, type = jQuery.nodeName( elem, "input" ) ? elem.type : ""; if ( (e.keyCode === 13 && !jQuery.nodeName( elem, "textarea" ) ) || (e.keyCode === 32 && (type === "checkbox" || type === "radio")) || type === "select-multiple" ) { testChange.call( this, e ); } }, // Beforeactivate happens also before the previous element is blurred // with this event you can't trigger a change event, but you can store // information beforeactivate: function( e ) { var elem = e.target; jQuery._data( elem, "_change_data", getVal(elem) ); } }, setup: function( data, namespaces ) { if ( this.type === "file" ) { return false; } for ( var type in changeFilters ) { jQuery.event.add( this, type + ".specialChange", changeFilters[type] ); } return rformElems.test( this.nodeName ); }, teardown: function( namespaces ) { jQuery.event.remove( this, ".specialChange" ); return rformElems.test( this.nodeName ); } }; changeFilters = jQuery.event.special.change.filters; // Handle when the input is .focus()'d changeFilters.focus = changeFilters.beforeactivate; } function trigger( type, elem, args ) { // Piggyback on a donor event to simulate a different one. // Fake originalEvent to avoid donor's stopPropagation, but if the // simulated event prevents default then we do the same on the donor. // Don't pass args or remember liveFired; they apply to the donor event. var event = jQuery.extend( {}, args[ 0 ] ); event.type = type; event.originalEvent = {}; event.liveFired = undefined; jQuery.event.handle.call( elem, event ); if ( event.isDefaultPrevented() ) { args[ 0 ].preventDefault(); } } // Create "bubbling" focus and blur events if ( !jQuery.support.focusinBubbles ) { jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) { // Attach a single capturing handler while someone wants focusin/focusout var attaches = 0; jQuery.event.special[ fix ] = { setup: function() { if ( attaches++ === 0 ) { document.addEventListener( orig, handler, true ); } }, teardown: function() { if ( --attaches === 0 ) { document.removeEventListener( orig, handler, true ); } } }; function handler( donor ) { // Donor event is always a native one; fix it and switch its type. // Let focusin/out handler cancel the donor focus/blur event. var e = jQuery.event.fix( donor ); e.type = fix; e.originalEvent = {}; jQuery.event.trigger( e, null, e.target ); if ( e.isDefaultPrevented() ) { donor.preventDefault(); } } }); } jQuery.each(["bind", "one"], function( i, name ) { jQuery.fn[ name ] = function( type, data, fn ) { var handler; // Handle object literals if ( typeof type === "object" ) { for ( var key in type ) { this[ name ](key, data, type[key], fn); } return this; } if ( arguments.length === 2 || data === false ) { fn = data; data = undefined; } if ( name === "one" ) { handler = function( event ) { jQuery( this ).unbind( event, handler ); return fn.apply( this, arguments ); }; handler.guid = fn.guid || jQuery.guid++; } else { handler = fn; } if ( type === "unload" && name !== "one" ) { this.one( type, data, fn ); } else { for ( var i = 0, l = this.length; i < l; i++ ) { jQuery.event.add( this[i], type, handler, data ); } } return this; }; }); jQuery.fn.extend({ unbind: function( type, fn ) { // Handle object literals if ( typeof type === "object" && !type.preventDefault ) { for ( var key in type ) { this.unbind(key, type[key]); } } else { for ( var i = 0, l = this.length; i < l; i++ ) { jQuery.event.remove( this[i], type, fn ); } } return this; }, delegate: function( selector, types, data, fn ) { return this.live( types, data, fn, selector ); }, undelegate: function( selector, types, fn ) { if ( arguments.length === 0 ) { return this.unbind( "live" ); } else { return this.die( types, null, fn, selector ); } }, trigger: function( type, data ) { return this.each(function() { jQuery.event.trigger( type, data, this ); }); }, triggerHandler: function( type, data ) { if ( this[0] ) { return jQuery.event.trigger( type, data, this[0], true ); } }, toggle: function( fn ) { // Save reference to arguments for access in closure var args = arguments, guid = fn.guid || jQuery.guid++, i = 0, toggler = function( event ) { // Figure out which function to execute var lastToggle = ( jQuery.data( this, "lastToggle" + fn.guid ) || 0 ) % i; jQuery.data( this, "lastToggle" + fn.guid, lastToggle + 1 ); // Make sure that clicks stop event.preventDefault(); // and execute the function return args[ lastToggle ].apply( this, arguments ) || false; }; // link all the functions, so any of them can unbind this click handler toggler.guid = guid; while ( i < args.length ) { args[ i++ ].guid = guid; } return this.click( toggler ); }, hover: function( fnOver, fnOut ) { return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver ); } }); var liveMap = { focus: "focusin", blur: "focusout", mouseenter: "mouseover", mouseleave: "mouseout" }; jQuery.each(["live", "die"], function( i, name ) { jQuery.fn[ name ] = function( types, data, fn, origSelector /* Internal Use Only */ ) { var type, i = 0, match, namespaces, preType, selector = origSelector || this.selector, context = origSelector ? this : jQuery( this.context ); if ( typeof types === "object" && !types.preventDefault ) { for ( var key in types ) { context[ name ]( key, data, types[key], selector ); } return this; } if ( name === "die" && !types && origSelector && origSelector.charAt(0) === "." ) { context.unbind( origSelector ); return this; } if ( data === false || jQuery.isFunction( data ) ) { fn = data || returnFalse; data = undefined; } types = (types || "").split(" "); while ( (type = types[ i++ ]) != null ) { match = rnamespaces.exec( type ); namespaces = ""; if ( match ) { namespaces = match[0]; type = type.replace( rnamespaces, "" ); } if ( type === "hover" ) { types.push( "mouseenter" + namespaces, "mouseleave" + namespaces ); continue; } preType = type; if ( liveMap[ type ] ) { types.push( liveMap[ type ] + namespaces ); type = type + namespaces; } else { type = (liveMap[ type ] || type) + namespaces; } if ( name === "live" ) { // bind live handler for ( var j = 0, l = context.length; j < l; j++ ) { jQuery.event.add( context[j], "live." + liveConvert( type, selector ), { data: data, selector: selector, handler: fn, origType: type, origHandler: fn, preType: preType } ); } } else { // unbind live handler context.unbind( "live." + liveConvert( type, selector ), fn ); } } return this; }; }); function liveHandler( event ) { var stop, maxLevel, related, match, handleObj, elem, j, i, l, data, close, namespace, ret, elems = [], selectors = [], events = jQuery._data( this, "events" ); // Make sure we avoid non-left-click bubbling in Firefox (#3861) and disabled elements in IE (#6911) if ( event.liveFired === this || !events || !events.live || event.target.disabled || event.button && event.type === "click" ) { return; } if ( event.namespace ) { namespace = new RegExp("(^|\\.)" + event.namespace.split(".").join("\\.(?:.*\\.)?") + "(\\.|$)"); } event.liveFired = this; var live = events.live.slice(0); for ( j = 0; j < live.length; j++ ) { handleObj = live[j]; if ( handleObj.origType.replace( rnamespaces, "" ) === event.type ) { selectors.push( handleObj.selector ); } else { live.splice( j--, 1 ); } } match = jQuery( event.target ).closest( selectors, event.currentTarget ); for ( i = 0, l = match.length; i < l; i++ ) { close = match[i]; for ( j = 0; j < live.length; j++ ) { handleObj = live[j]; if ( close.selector === handleObj.selector && (!namespace || namespace.test( handleObj.namespace )) && !close.elem.disabled ) { elem = close.elem; related = null; // Those two events require additional checking if ( handleObj.preType === "mouseenter" || handleObj.preType === "mouseleave" ) { event.type = handleObj.preType; related = jQuery( event.relatedTarget ).closest( handleObj.selector )[0]; // Make sure not to accidentally match a child element with the same selector if ( related && jQuery.contains( elem, related ) ) { related = elem; } } if ( !related || related !== elem ) { elems.push({ elem: elem, handleObj: handleObj, level: close.level }); } } } } for ( i = 0, l = elems.length; i < l; i++ ) { match = elems[i]; if ( maxLevel && match.level > maxLevel ) { break; } event.currentTarget = match.elem; event.data = match.handleObj.data; event.handleObj = match.handleObj; ret = match.handleObj.origHandler.apply( match.elem, arguments ); if ( ret === false || event.isPropagationStopped() ) { maxLevel = match.level; if ( ret === false ) { stop = false; } if ( event.isImmediatePropagationStopped() ) { break; } } } return stop; } function liveConvert( type, selector ) { return (type && type !== "*" ? type + "." : "") + selector.replace(rperiod, "`").replace(rspaces, "&"); } jQuery.each( ("blur focus focusin focusout load resize scroll unload click dblclick " + "mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " + "change select submit keydown keypress keyup error").split(" "), function( i, name ) { // Handle event binding jQuery.fn[ name ] = function( data, fn ) { if ( fn == null ) { fn = data; data = null; } return arguments.length > 0 ? this.bind( name, data, fn ) : this.trigger( name ); }; if ( jQuery.attrFn ) { jQuery.attrFn[ name ] = true; } }); /*! * Sizzle CSS Selector Engine * Copyright 2011, The Dojo Foundation * Released under the MIT, BSD, and GPL Licenses. * More information: http://sizzlejs.com/ */ (function(){ var chunker = /((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, done = 0, toString = Object.prototype.toString, hasDuplicate = false, baseHasDuplicate = true, rBackslash = /\\/g, rNonWord = /\W/; // Here we check if the JavaScript engine is using some sort of // optimization where it does not always call our comparision // function. If that is the case, discard the hasDuplicate value. // Thus far that includes Google Chrome. [0, 0].sort(function() { baseHasDuplicate = false; return 0; }); var Sizzle = function( selector, context, results, seed ) { results = results || []; context = context || document; var origContext = context; if ( context.nodeType !== 1 && context.nodeType !== 9 ) { return []; } if ( !selector || typeof selector !== "string" ) { return results; } var m, set, checkSet, extra, ret, cur, pop, i, prune = true, contextXML = Sizzle.isXML( context ), parts = [], soFar = selector; // Reset the position of the chunker regexp (start from head) do { chunker.exec( "" ); m = chunker.exec( soFar ); if ( m ) { soFar = m[3]; parts.push( m[1] ); if ( m[2] ) { extra = m[3]; break; } } } while ( m ); if ( parts.length > 1 && origPOS.exec( selector ) ) { if ( parts.length === 2 && Expr.relative[ parts[0] ] ) { set = posProcess( parts[0] + parts[1], context ); } else { set = Expr.relative[ parts[0] ] ? [ context ] : Sizzle( parts.shift(), context ); while ( parts.length ) { selector = parts.shift(); if ( Expr.relative[ selector ] ) { selector += parts.shift(); } set = posProcess( selector, set ); } } } else { // Take a shortcut and set the context if the root selector is an ID // (but not if it'll be faster if the inner selector is an ID) if ( !seed && parts.length > 1 && context.nodeType === 9 && !contextXML && Expr.match.ID.test(parts[0]) && !Expr.match.ID.test(parts[parts.length - 1]) ) { ret = Sizzle.find( parts.shift(), context, contextXML ); context = ret.expr ? Sizzle.filter( ret.expr, ret.set )[0] : ret.set[0]; } if ( context ) { ret = seed ? { expr: parts.pop(), set: makeArray(seed) } : Sizzle.find( parts.pop(), parts.length === 1 && (parts[0] === "~" || parts[0] === "+") && context.parentNode ? context.parentNode : context, contextXML ); set = ret.expr ? Sizzle.filter( ret.expr, ret.set ) : ret.set; if ( parts.length > 0 ) { checkSet = makeArray( set ); } else { prune = false; } while ( parts.length ) { cur = parts.pop(); pop = cur; if ( !Expr.relative[ cur ] ) { cur = ""; } else { pop = parts.pop(); } if ( pop == null ) { pop = context; } Expr.relative[ cur ]( checkSet, pop, contextXML ); } } else { checkSet = parts = []; } } if ( !checkSet ) { checkSet = set; } if ( !checkSet ) { Sizzle.error( cur || selector ); } if ( toString.call(checkSet) === "[object Array]" ) { if ( !prune ) { results.push.apply( results, checkSet ); } else if ( context && context.nodeType === 1 ) { for ( i = 0; checkSet[i] != null; i++ ) { if ( checkSet[i] && (checkSet[i] === true || checkSet[i].nodeType === 1 && Sizzle.contains(context, checkSet[i])) ) { results.push( set[i] ); } } } else { for ( i = 0; checkSet[i] != null; i++ ) { if ( checkSet[i] && checkSet[i].nodeType === 1 ) { results.push( set[i] ); } } } } else { makeArray( checkSet, results ); } if ( extra ) { Sizzle( extra, origContext, results, seed ); Sizzle.uniqueSort( results ); } return results; }; Sizzle.uniqueSort = function( results ) { if ( sortOrder ) { hasDuplicate = baseHasDuplicate; results.sort( sortOrder ); if ( hasDuplicate ) { for ( var i = 1; i < results.length; i++ ) { if ( results[i] === results[ i - 1 ] ) { results.splice( i--, 1 ); } } } } return results; }; Sizzle.matches = function( expr, set ) { return Sizzle( expr, null, null, set ); }; Sizzle.matchesSelector = function( node, expr ) { return Sizzle( expr, null, null, [node] ).length > 0; }; Sizzle.find = function( expr, context, isXML ) { var set; if ( !expr ) { return []; } for ( var i = 0, l = Expr.order.length; i < l; i++ ) { var match, type = Expr.order[i]; if ( (match = Expr.leftMatch[ type ].exec( expr )) ) { var left = match[1]; match.splice( 1, 1 ); if ( left.substr( left.length - 1 ) !== "\\" ) { match[1] = (match[1] || "").replace( rBackslash, "" ); set = Expr.find[ type ]( match, context, isXML ); if ( set != null ) { expr = expr.replace( Expr.match[ type ], "" ); break; } } } } if ( !set ) { set = typeof context.getElementsByTagName !== "undefined" ? context.getElementsByTagName( "*" ) : []; } return { set: set, expr: expr }; }; Sizzle.filter = function( expr, set, inplace, not ) { var match, anyFound, old = expr, result = [], curLoop = set, isXMLFilter = set && set[0] && Sizzle.isXML( set[0] ); while ( expr && set.length ) { for ( var type in Expr.filter ) { if ( (match = Expr.leftMatch[ type ].exec( expr )) != null && match[2] ) { var found, item, filter = Expr.filter[ type ], left = match[1]; anyFound = false; match.splice(1,1); if ( left.substr( left.length - 1 ) === "\\" ) { continue; } if ( curLoop === result ) { result = []; } if ( Expr.preFilter[ type ] ) { match = Expr.preFilter[ type ]( match, curLoop, inplace, result, not, isXMLFilter ); if ( !match ) { anyFound = found = true; } else if ( match === true ) { continue; } } if ( match ) { for ( var i = 0; (item = curLoop[i]) != null; i++ ) { if ( item ) { found = filter( item, match, i, curLoop ); var pass = not ^ !!found; if ( inplace && found != null ) { if ( pass ) { anyFound = true; } else { curLoop[i] = false; } } else if ( pass ) { result.push( item ); anyFound = true; } } } } if ( found !== undefined ) { if ( !inplace ) { curLoop = result; } expr = expr.replace( Expr.match[ type ], "" ); if ( !anyFound ) { return []; } break; } } } // Improper expression if ( expr === old ) { if ( anyFound == null ) { Sizzle.error( expr ); } else { break; } } old = expr; } return curLoop; }; Sizzle.error = function( msg ) { throw "Syntax error, unrecognized expression: " + msg; }; var Expr = Sizzle.selectors = { order: [ "ID", "NAME", "TAG" ], match: { ID: /#((?:[\w\u00c0-\uFFFF\-]|\\.)+)/, CLASS: /\.((?:[\w\u00c0-\uFFFF\-]|\\.)+)/, NAME: /\[name=['"]*((?:[\w\u00c0-\uFFFF\-]|\\.)+)['"]*\]/, ATTR: /\[\s*((?:[\w\u00c0-\uFFFF\-]|\\.)+)\s*(?:(\S?=)\s*(?:(['"])(.*?)\3|(#?(?:[\w\u00c0-\uFFFF\-]|\\.)*)|)|)\s*\]/, TAG: /^((?:[\w\u00c0-\uFFFF\*\-]|\\.)+)/, CHILD: /:(only|nth|last|first)-child(?:\(\s*(even|odd|(?:[+\-]?\d+|(?:[+\-]?\d*)?n\s*(?:[+\-]\s*\d+)?))\s*\))?/, POS: /:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^\-]|$)/, PSEUDO: /:((?:[\w\u00c0-\uFFFF\-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/ }, leftMatch: {}, attrMap: { "class": "className", "for": "htmlFor" }, attrHandle: { href: function( elem ) { return elem.getAttribute( "href" ); }, type: function( elem ) { return elem.getAttribute( "type" ); } }, relative: { "+": function(checkSet, part){ var isPartStr = typeof part === "string", isTag = isPartStr && !rNonWord.test( part ), isPartStrNotTag = isPartStr && !isTag; if ( isTag ) { part = part.toLowerCase(); } for ( var i = 0, l = checkSet.length, elem; i < l; i++ ) { if ( (elem = checkSet[i]) ) { while ( (elem = elem.previousSibling) && elem.nodeType !== 1 ) {} checkSet[i] = isPartStrNotTag || elem && elem.nodeName.toLowerCase() === part ? elem || false : elem === part; } } if ( isPartStrNotTag ) { Sizzle.filter( part, checkSet, true ); } }, ">": function( checkSet, part ) { var elem, isPartStr = typeof part === "string", i = 0, l = checkSet.length; if ( isPartStr && !rNonWord.test( part ) ) { part = part.toLowerCase(); for ( ; i < l; i++ ) { elem = checkSet[i]; if ( elem ) { var parent = elem.parentNode; checkSet[i] = parent.nodeName.toLowerCase() === part ? parent : false; } } } else { for ( ; i < l; i++ ) { elem = checkSet[i]; if ( elem ) { checkSet[i] = isPartStr ? elem.parentNode : elem.parentNode === part; } } if ( isPartStr ) { Sizzle.filter( part, checkSet, true ); } } }, "": function(checkSet, part, isXML){ var nodeCheck, doneName = done++, checkFn = dirCheck; if ( typeof part === "string" && !rNonWord.test( part ) ) { part = part.toLowerCase(); nodeCheck = part; checkFn = dirNodeCheck; } checkFn( "parentNode", part, doneName, checkSet, nodeCheck, isXML ); }, "~": function( checkSet, part, isXML ) { var nodeCheck, doneName = done++, checkFn = dirCheck; if ( typeof part === "string" && !rNonWord.test( part ) ) { part = part.toLowerCase(); nodeCheck = part; checkFn = dirNodeCheck; } checkFn( "previousSibling", part, doneName, checkSet, nodeCheck, isXML ); } }, find: { ID: function( match, context, isXML ) { if ( typeof context.getElementById !== "undefined" && !isXML ) { var m = context.getElementById(match[1]); // Check parentNode to catch when Blackberry 4.6 returns // nodes that are no longer in the document #6963 return m && m.parentNode ? [m] : []; } }, NAME: function( match, context ) { if ( typeof context.getElementsByName !== "undefined" ) { var ret = [], results = context.getElementsByName( match[1] ); for ( var i = 0, l = results.length; i < l; i++ ) { if ( results[i].getAttribute("name") === match[1] ) { ret.push( results[i] ); } } return ret.length === 0 ? null : ret; } }, TAG: function( match, context ) { if ( typeof context.getElementsByTagName !== "undefined" ) { return context.getElementsByTagName( match[1] ); } } }, preFilter: { CLASS: function( match, curLoop, inplace, result, not, isXML ) { match = " " + match[1].replace( rBackslash, "" ) + " "; if ( isXML ) { return match; } for ( var i = 0, elem; (elem = curLoop[i]) != null; i++ ) { if ( elem ) { if ( not ^ (elem.className && (" " + elem.className + " ").replace(/[\t\n\r]/g, " ").indexOf(match) >= 0) ) { if ( !inplace ) { result.push( elem ); } } else if ( inplace ) { curLoop[i] = false; } } } return false; }, ID: function( match ) { return match[1].replace( rBackslash, "" ); }, TAG: function( match, curLoop ) { return match[1].replace( rBackslash, "" ).toLowerCase(); }, CHILD: function( match ) { if ( match[1] === "nth" ) { if ( !match[2] ) { Sizzle.error( match[0] ); } match[2] = match[2].replace(/^\+|\s*/g, ''); // parse equations like 'even', 'odd', '5', '2n', '3n+2', '4n-1', '-n+6' var test = /(-?)(\d*)(?:n([+\-]?\d*))?/.exec( match[2] === "even" && "2n" || match[2] === "odd" && "2n+1" || !/\D/.test( match[2] ) && "0n+" + match[2] || match[2]); // calculate the numbers (first)n+(last) including if they are negative match[2] = (test[1] + (test[2] || 1)) - 0; match[3] = test[3] - 0; } else if ( match[2] ) { Sizzle.error( match[0] ); } // TODO: Move to normal caching system match[0] = done++; return match; }, ATTR: function( match, curLoop, inplace, result, not, isXML ) { var name = match[1] = match[1].replace( rBackslash, "" ); if ( !isXML && Expr.attrMap[name] ) { match[1] = Expr.attrMap[name]; } // Handle if an un-quoted value was used match[4] = ( match[4] || match[5] || "" ).replace( rBackslash, "" ); if ( match[2] === "~=" ) { match[4] = " " + match[4] + " "; } return match; }, PSEUDO: function( match, curLoop, inplace, result, not ) { if ( match[1] === "not" ) { // If we're dealing with a complex expression, or a simple one if ( ( chunker.exec(match[3]) || "" ).length > 1 || /^\w/.test(match[3]) ) { match[3] = Sizzle(match[3], null, null, curLoop); } else { var ret = Sizzle.filter(match[3], curLoop, inplace, true ^ not); if ( !inplace ) { result.push.apply( result, ret ); } return false; } } else if ( Expr.match.POS.test( match[0] ) || Expr.match.CHILD.test( match[0] ) ) { return true; } return match; }, POS: function( match ) { match.unshift( true ); return match; } }, filters: { enabled: function( elem ) { return elem.disabled === false && elem.type !== "hidden"; }, disabled: function( elem ) { return elem.disabled === true; }, checked: function( elem ) { return elem.checked === true; }, selected: function( elem ) { // Accessing this property makes selected-by-default // options in Safari work properly if ( elem.parentNode ) { elem.parentNode.selectedIndex; } return elem.selected === true; }, parent: function( elem ) { return !!elem.firstChild; }, empty: function( elem ) { return !elem.firstChild; }, has: function( elem, i, match ) { return !!Sizzle( match[3], elem ).length; }, header: function( elem ) { return (/h\d/i).test( elem.nodeName ); }, text: function( elem ) { var attr = elem.getAttribute( "type" ), type = elem.type; // IE6 and 7 will map elem.type to 'text' for new HTML5 types (search, etc) // use getAttribute instead to test this case return elem.nodeName.toLowerCase() === "input" && "text" === type && ( attr === type || attr === null ); }, radio: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "radio" === elem.type; }, checkbox: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "checkbox" === elem.type; }, file: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "file" === elem.type; }, password: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "password" === elem.type; }, submit: function( elem ) { var name = elem.nodeName.toLowerCase(); return (name === "input" || name === "button") && "submit" === elem.type; }, image: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "image" === elem.type; }, reset: function( elem ) { var name = elem.nodeName.toLowerCase(); return (name === "input" || name === "button") && "reset" === elem.type; }, button: function( elem ) { var name = elem.nodeName.toLowerCase(); return name === "input" && "button" === elem.type || name === "button"; }, input: function( elem ) { return (/input|select|textarea|button/i).test( elem.nodeName ); }, focus: function( elem ) { return elem === elem.ownerDocument.activeElement; } }, setFilters: { first: function( elem, i ) { return i === 0; }, last: function( elem, i, match, array ) { return i === array.length - 1; }, even: function( elem, i ) { return i % 2 === 0; }, odd: function( elem, i ) { return i % 2 === 1; }, lt: function( elem, i, match ) { return i < match[3] - 0; }, gt: function( elem, i, match ) { return i > match[3] - 0; }, nth: function( elem, i, match ) { return match[3] - 0 === i; }, eq: function( elem, i, match ) { return match[3] - 0 === i; } }, filter: { PSEUDO: function( elem, match, i, array ) { var name = match[1], filter = Expr.filters[ name ]; if ( filter ) { return filter( elem, i, match, array ); } else if ( name === "contains" ) { return (elem.textContent || elem.innerText || Sizzle.getText([ elem ]) || "").indexOf(match[3]) >= 0; } else if ( name === "not" ) { var not = match[3]; for ( var j = 0, l = not.length; j < l; j++ ) { if ( not[j] === elem ) { return false; } } return true; } else { Sizzle.error( name ); } }, CHILD: function( elem, match ) { var type = match[1], node = elem; switch ( type ) { case "only": case "first": while ( (node = node.previousSibling) ) { if ( node.nodeType === 1 ) { return false; } } if ( type === "first" ) { return true; } node = elem; case "last": while ( (node = node.nextSibling) ) { if ( node.nodeType === 1 ) { return false; } } return true; case "nth": var first = match[2], last = match[3]; if ( first === 1 && last === 0 ) { return true; } var doneName = match[0], parent = elem.parentNode; if ( parent && (parent.sizcache !== doneName || !elem.nodeIndex) ) { var count = 0; for ( node = parent.firstChild; node; node = node.nextSibling ) { if ( node.nodeType === 1 ) { node.nodeIndex = ++count; } } parent.sizcache = doneName; } var diff = elem.nodeIndex - last; if ( first === 0 ) { return diff === 0; } else { return ( diff % first === 0 && diff / first >= 0 ); } } }, ID: function( elem, match ) { return elem.nodeType === 1 && elem.getAttribute("id") === match; }, TAG: function( elem, match ) { return (match === "*" && elem.nodeType === 1) || elem.nodeName.toLowerCase() === match; }, CLASS: function( elem, match ) { return (" " + (elem.className || elem.getAttribute("class")) + " ") .indexOf( match ) > -1; }, ATTR: function( elem, match ) { var name = match[1], result = Expr.attrHandle[ name ] ? Expr.attrHandle[ name ]( elem ) : elem[ name ] != null ? elem[ name ] : elem.getAttribute( name ), value = result + "", type = match[2], check = match[4]; return result == null ? type === "!=" : type === "=" ? value === check : type === "*=" ? value.indexOf(check) >= 0 : type === "~=" ? (" " + value + " ").indexOf(check) >= 0 : !check ? value && result !== false : type === "!=" ? value !== check : type === "^=" ? value.indexOf(check) === 0 : type === "$=" ? value.substr(value.length - check.length) === check : type === "|=" ? value === check || value.substr(0, check.length + 1) === check + "-" : false; }, POS: function( elem, match, i, array ) { var name = match[2], filter = Expr.setFilters[ name ]; if ( filter ) { return filter( elem, i, match, array ); } } } }; var origPOS = Expr.match.POS, fescape = function(all, num){ return "\\" + (num - 0 + 1); }; for ( var type in Expr.match ) { Expr.match[ type ] = new RegExp( Expr.match[ type ].source + (/(?![^\[]*\])(?![^\(]*\))/.source) ); Expr.leftMatch[ type ] = new RegExp( /(^(?:.|\r|\n)*?)/.source + Expr.match[ type ].source.replace(/\\(\d+)/g, fescape) ); } var makeArray = function( array, results ) { array = Array.prototype.slice.call( array, 0 ); if ( results ) { results.push.apply( results, array ); return results; } return array; }; // Perform a simple check to determine if the browser is capable of // converting a NodeList to an array using builtin methods. // Also verifies that the returned array holds DOM nodes // (which is not the case in the Blackberry browser) try { Array.prototype.slice.call( document.documentElement.childNodes, 0 )[0].nodeType; // Provide a fallback method if it does not work } catch( e ) { makeArray = function( array, results ) { var i = 0, ret = results || []; if ( toString.call(array) === "[object Array]" ) { Array.prototype.push.apply( ret, array ); } else { if ( typeof array.length === "number" ) { for ( var l = array.length; i < l; i++ ) { ret.push( array[i] ); } } else { for ( ; array[i]; i++ ) { ret.push( array[i] ); } } } return ret; }; } var sortOrder, siblingCheck; if ( document.documentElement.compareDocumentPosition ) { sortOrder = function( a, b ) { if ( a === b ) { hasDuplicate = true; return 0; } if ( !a.compareDocumentPosition || !b.compareDocumentPosition ) { return a.compareDocumentPosition ? -1 : 1; } return a.compareDocumentPosition(b) & 4 ? -1 : 1; }; } else { sortOrder = function( a, b ) { // The nodes are identical, we can exit early if ( a === b ) { hasDuplicate = true; return 0; // Fallback to using sourceIndex (in IE) if it's available on both nodes } else if ( a.sourceIndex && b.sourceIndex ) { return a.sourceIndex - b.sourceIndex; } var al, bl, ap = [], bp = [], aup = a.parentNode, bup = b.parentNode, cur = aup; // If the nodes are siblings (or identical) we can do a quick check if ( aup === bup ) { return siblingCheck( a, b ); // If no parents were found then the nodes are disconnected } else if ( !aup ) { return -1; } else if ( !bup ) { return 1; } // Otherwise they're somewhere else in the tree so we need // to build up a full list of the parentNodes for comparison while ( cur ) { ap.unshift( cur ); cur = cur.parentNode; } cur = bup; while ( cur ) { bp.unshift( cur ); cur = cur.parentNode; } al = ap.length; bl = bp.length; // Start walking down the tree looking for a discrepancy for ( var i = 0; i < al && i < bl; i++ ) { if ( ap[i] !== bp[i] ) { return siblingCheck( ap[i], bp[i] ); } } // We ended someplace up the tree so do a sibling check return i === al ? siblingCheck( a, bp[i], -1 ) : siblingCheck( ap[i], b, 1 ); }; siblingCheck = function( a, b, ret ) { if ( a === b ) { return ret; } var cur = a.nextSibling; while ( cur ) { if ( cur === b ) { return -1; } cur = cur.nextSibling; } return 1; }; } // Utility function for retreiving the text value of an array of DOM nodes Sizzle.getText = function( elems ) { var ret = "", elem; for ( var i = 0; elems[i]; i++ ) { elem = elems[i]; // Get the text from text nodes and CDATA nodes if ( elem.nodeType === 3 || elem.nodeType === 4 ) { ret += elem.nodeValue; // Traverse everything else, except comment nodes } else if ( elem.nodeType !== 8 ) { ret += Sizzle.getText( elem.childNodes ); } } return ret; }; // Check to see if the browser returns elements by name when // querying by getElementById (and provide a workaround) (function(){ // We're going to inject a fake input element with a specified name var form = document.createElement("div"), id = "script" + (new Date()).getTime(), root = document.documentElement; form.innerHTML = ""; // Inject it into the root element, check its status, and remove it quickly root.insertBefore( form, root.firstChild ); // The workaround has to do additional checks after a getElementById // Which slows things down for other browsers (hence the branching) if ( document.getElementById( id ) ) { Expr.find.ID = function( match, context, isXML ) { if ( typeof context.getElementById !== "undefined" && !isXML ) { var m = context.getElementById(match[1]); return m ? m.id === match[1] || typeof m.getAttributeNode !== "undefined" && m.getAttributeNode("id").nodeValue === match[1] ? [m] : undefined : []; } }; Expr.filter.ID = function( elem, match ) { var node = typeof elem.getAttributeNode !== "undefined" && elem.getAttributeNode("id"); return elem.nodeType === 1 && node && node.nodeValue === match; }; } root.removeChild( form ); // release memory in IE root = form = null; })(); (function(){ // Check to see if the browser returns only elements // when doing getElementsByTagName("*") // Create a fake element var div = document.createElement("div"); div.appendChild( document.createComment("") ); // Make sure no comments are found if ( div.getElementsByTagName("*").length > 0 ) { Expr.find.TAG = function( match, context ) { var results = context.getElementsByTagName( match[1] ); // Filter out possible comments if ( match[1] === "*" ) { var tmp = []; for ( var i = 0; results[i]; i++ ) { if ( results[i].nodeType === 1 ) { tmp.push( results[i] ); } } results = tmp; } return results; }; } // Check to see if an attribute returns normalized href attributes div.innerHTML = ""; if ( div.firstChild && typeof div.firstChild.getAttribute !== "undefined" && div.firstChild.getAttribute("href") !== "#" ) { Expr.attrHandle.href = function( elem ) { return elem.getAttribute( "href", 2 ); }; } // release memory in IE div = null; })(); if ( document.querySelectorAll ) { (function(){ var oldSizzle = Sizzle, div = document.createElement("div"), id = "__sizzle__"; div.innerHTML = "

"; // Safari can't handle uppercase or unicode characters when // in quirks mode. if ( div.querySelectorAll && div.querySelectorAll(".TEST").length === 0 ) { return; } Sizzle = function( query, context, extra, seed ) { context = context || document; // Only use querySelectorAll on non-XML documents // (ID selectors don't work in non-HTML documents) if ( !seed && !Sizzle.isXML(context) ) { // See if we find a selector to speed up var match = /^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec( query ); if ( match && (context.nodeType === 1 || context.nodeType === 9) ) { // Speed-up: Sizzle("TAG") if ( match[1] ) { return makeArray( context.getElementsByTagName( query ), extra ); // Speed-up: Sizzle(".CLASS") } else if ( match[2] && Expr.find.CLASS && context.getElementsByClassName ) { return makeArray( context.getElementsByClassName( match[2] ), extra ); } } if ( context.nodeType === 9 ) { // Speed-up: Sizzle("body") // The body element only exists once, optimize finding it if ( query === "body" && context.body ) { return makeArray( [ context.body ], extra ); // Speed-up: Sizzle("#ID") } else if ( match && match[3] ) { var elem = context.getElementById( match[3] ); // Check parentNode to catch when Blackberry 4.6 returns // nodes that are no longer in the document #6963 if ( elem && elem.parentNode ) { // Handle the case where IE and Opera return items // by name instead of ID if ( elem.id === match[3] ) { return makeArray( [ elem ], extra ); } } else { return makeArray( [], extra ); } } try { return makeArray( context.querySelectorAll(query), extra ); } catch(qsaError) {} // qSA works strangely on Element-rooted queries // We can work around this by specifying an extra ID on the root // and working up from there (Thanks to Andrew Dupont for the technique) // IE 8 doesn't work on object elements } else if ( context.nodeType === 1 && context.nodeName.toLowerCase() !== "object" ) { var oldContext = context, old = context.getAttribute( "id" ), nid = old || id, hasParent = context.parentNode, relativeHierarchySelector = /^\s*[+~]/.test( query ); if ( !old ) { context.setAttribute( "id", nid ); } else { nid = nid.replace( /'/g, "\\$&" ); } if ( relativeHierarchySelector && hasParent ) { context = context.parentNode; } try { if ( !relativeHierarchySelector || hasParent ) { return makeArray( context.querySelectorAll( "[id='" + nid + "'] " + query ), extra ); } } catch(pseudoError) { } finally { if ( !old ) { oldContext.removeAttribute( "id" ); } } } } return oldSizzle(query, context, extra, seed); }; for ( var prop in oldSizzle ) { Sizzle[ prop ] = oldSizzle[ prop ]; } // release memory in IE div = null; })(); } (function(){ var html = document.documentElement, matches = html.matchesSelector || html.mozMatchesSelector || html.webkitMatchesSelector || html.msMatchesSelector; if ( matches ) { // Check to see if it's possible to do matchesSelector // on a disconnected node (IE 9 fails this) var disconnectedMatch = !matches.call( document.createElement( "div" ), "div" ), pseudoWorks = false; try { // This should fail with an exception // Gecko does not error, returns false instead matches.call( document.documentElement, "[test!='']:sizzle" ); } catch( pseudoError ) { pseudoWorks = true; } Sizzle.matchesSelector = function( node, expr ) { // Make sure that attribute selectors are quoted expr = expr.replace(/\=\s*([^'"\]]*)\s*\]/g, "='$1']"); if ( !Sizzle.isXML( node ) ) { try { if ( pseudoWorks || !Expr.match.PSEUDO.test( expr ) && !/!=/.test( expr ) ) { var ret = matches.call( node, expr ); // IE 9's matchesSelector returns false on disconnected nodes if ( ret || !disconnectedMatch || // As well, disconnected nodes are said to be in a document // fragment in IE 9, so check for that node.document && node.document.nodeType !== 11 ) { return ret; } } } catch(e) {} } return Sizzle(expr, null, null, [node]).length > 0; }; } })(); (function(){ var div = document.createElement("div"); div.innerHTML = "
"; // Opera can't find a second classname (in 9.6) // Also, make sure that getElementsByClassName actually exists if ( !div.getElementsByClassName || div.getElementsByClassName("e").length === 0 ) { return; } // Safari caches class attributes, doesn't catch changes (in 3.2) div.lastChild.className = "e"; if ( div.getElementsByClassName("e").length === 1 ) { return; } Expr.order.splice(1, 0, "CLASS"); Expr.find.CLASS = function( match, context, isXML ) { if ( typeof context.getElementsByClassName !== "undefined" && !isXML ) { return context.getElementsByClassName(match[1]); } }; // release memory in IE div = null; })(); function dirNodeCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) { for ( var i = 0, l = checkSet.length; i < l; i++ ) { var elem = checkSet[i]; if ( elem ) { var match = false; elem = elem[dir]; while ( elem ) { if ( elem.sizcache === doneName ) { match = checkSet[elem.sizset]; break; } if ( elem.nodeType === 1 && !isXML ){ elem.sizcache = doneName; elem.sizset = i; } if ( elem.nodeName.toLowerCase() === cur ) { match = elem; break; } elem = elem[dir]; } checkSet[i] = match; } } } function dirCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) { for ( var i = 0, l = checkSet.length; i < l; i++ ) { var elem = checkSet[i]; if ( elem ) { var match = false; elem = elem[dir]; while ( elem ) { if ( elem.sizcache === doneName ) { match = checkSet[elem.sizset]; break; } if ( elem.nodeType === 1 ) { if ( !isXML ) { elem.sizcache = doneName; elem.sizset = i; } if ( typeof cur !== "string" ) { if ( elem === cur ) { match = true; break; } } else if ( Sizzle.filter( cur, [elem] ).length > 0 ) { match = elem; break; } } elem = elem[dir]; } checkSet[i] = match; } } } if ( document.documentElement.contains ) { Sizzle.contains = function( a, b ) { return a !== b && (a.contains ? a.contains(b) : true); }; } else if ( document.documentElement.compareDocumentPosition ) { Sizzle.contains = function( a, b ) { return !!(a.compareDocumentPosition(b) & 16); }; } else { Sizzle.contains = function() { return false; }; } Sizzle.isXML = function( elem ) { // documentElement is verified for cases where it doesn't yet exist // (such as loading iframes in IE - #4833) var documentElement = (elem ? elem.ownerDocument || elem : 0).documentElement; return documentElement ? documentElement.nodeName !== "HTML" : false; }; var posProcess = function( selector, context ) { var match, tmpSet = [], later = "", root = context.nodeType ? [context] : context; // Position selectors must be done after the filter // And so must :not(positional) so we move all PSEUDOs to the end while ( (match = Expr.match.PSEUDO.exec( selector )) ) { later += match[0]; selector = selector.replace( Expr.match.PSEUDO, "" ); } selector = Expr.relative[selector] ? selector + "*" : selector; for ( var i = 0, l = root.length; i < l; i++ ) { Sizzle( selector, root[i], tmpSet ); } return Sizzle.filter( later, tmpSet ); }; // EXPOSE jQuery.find = Sizzle; jQuery.expr = Sizzle.selectors; jQuery.expr[":"] = jQuery.expr.filters; jQuery.unique = Sizzle.uniqueSort; jQuery.text = Sizzle.getText; jQuery.isXMLDoc = Sizzle.isXML; jQuery.contains = Sizzle.contains; })(); var runtil = /Until$/, rparentsprev = /^(?:parents|prevUntil|prevAll)/, // Note: This RegExp should be improved, or likely pulled from Sizzle rmultiselector = /,/, isSimple = /^.[^:#\[\.,]*$/, slice = Array.prototype.slice, POS = jQuery.expr.match.POS, // methods guaranteed to produce a unique set when starting from a unique set guaranteedUnique = { children: true, contents: true, next: true, prev: true }; jQuery.fn.extend({ find: function( selector ) { var self = this, i, l; if ( typeof selector !== "string" ) { return jQuery( selector ).filter(function() { for ( i = 0, l = self.length; i < l; i++ ) { if ( jQuery.contains( self[ i ], this ) ) { return true; } } }); } var ret = this.pushStack( "", "find", selector ), length, n, r; for ( i = 0, l = this.length; i < l; i++ ) { length = ret.length; jQuery.find( selector, this[i], ret ); if ( i > 0 ) { // Make sure that the results are unique for ( n = length; n < ret.length; n++ ) { for ( r = 0; r < length; r++ ) { if ( ret[r] === ret[n] ) { ret.splice(n--, 1); break; } } } } } return ret; }, has: function( target ) { var targets = jQuery( target ); return this.filter(function() { for ( var i = 0, l = targets.length; i < l; i++ ) { if ( jQuery.contains( this, targets[i] ) ) { return true; } } }); }, not: function( selector ) { return this.pushStack( winnow(this, selector, false), "not", selector); }, filter: function( selector ) { return this.pushStack( winnow(this, selector, true), "filter", selector ); }, is: function( selector ) { return !!selector && ( typeof selector === "string" ? jQuery.filter( selector, this ).length > 0 : this.filter( selector ).length > 0 ); }, closest: function( selectors, context ) { var ret = [], i, l, cur = this[0]; // Array if ( jQuery.isArray( selectors ) ) { var match, selector, matches = {}, level = 1; if ( cur && selectors.length ) { for ( i = 0, l = selectors.length; i < l; i++ ) { selector = selectors[i]; if ( !matches[ selector ] ) { matches[ selector ] = POS.test( selector ) ? jQuery( selector, context || this.context ) : selector; } } while ( cur && cur.ownerDocument && cur !== context ) { for ( selector in matches ) { match = matches[ selector ]; if ( match.jquery ? match.index( cur ) > -1 : jQuery( cur ).is( match ) ) { ret.push({ selector: selector, elem: cur, level: level }); } } cur = cur.parentNode; level++; } } return ret; } // String var pos = POS.test( selectors ) || typeof selectors !== "string" ? jQuery( selectors, context || this.context ) : 0; for ( i = 0, l = this.length; i < l; i++ ) { cur = this[i]; while ( cur ) { if ( pos ? pos.index(cur) > -1 : jQuery.find.matchesSelector(cur, selectors) ) { ret.push( cur ); break; } else { cur = cur.parentNode; if ( !cur || !cur.ownerDocument || cur === context || cur.nodeType === 11 ) { break; } } } } ret = ret.length > 1 ? jQuery.unique( ret ) : ret; return this.pushStack( ret, "closest", selectors ); }, // Determine the position of an element within // the matched set of elements index: function( elem ) { if ( !elem || typeof elem === "string" ) { return jQuery.inArray( this[0], // If it receives a string, the selector is used // If it receives nothing, the siblings are used elem ? jQuery( elem ) : this.parent().children() ); } // Locate the position of the desired element return jQuery.inArray( // If it receives a jQuery object, the first element is used elem.jquery ? elem[0] : elem, this ); }, add: function( selector, context ) { var set = typeof selector === "string" ? jQuery( selector, context ) : jQuery.makeArray( selector && selector.nodeType ? [ selector ] : selector ), all = jQuery.merge( this.get(), set ); return this.pushStack( isDisconnected( set[0] ) || isDisconnected( all[0] ) ? all : jQuery.unique( all ) ); }, andSelf: function() { return this.add( this.prevObject ); } }); // A painfully simple check to see if an element is disconnected // from a document (should be improved, where feasible). function isDisconnected( node ) { return !node || !node.parentNode || node.parentNode.nodeType === 11; } jQuery.each({ parent: function( elem ) { var parent = elem.parentNode; return parent && parent.nodeType !== 11 ? parent : null; }, parents: function( elem ) { return jQuery.dir( elem, "parentNode" ); }, parentsUntil: function( elem, i, until ) { return jQuery.dir( elem, "parentNode", until ); }, next: function( elem ) { return jQuery.nth( elem, 2, "nextSibling" ); }, prev: function( elem ) { return jQuery.nth( elem, 2, "previousSibling" ); }, nextAll: function( elem ) { return jQuery.dir( elem, "nextSibling" ); }, prevAll: function( elem ) { return jQuery.dir( elem, "previousSibling" ); }, nextUntil: function( elem, i, until ) { return jQuery.dir( elem, "nextSibling", until ); }, prevUntil: function( elem, i, until ) { return jQuery.dir( elem, "previousSibling", until ); }, siblings: function( elem ) { return jQuery.sibling( elem.parentNode.firstChild, elem ); }, children: function( elem ) { return jQuery.sibling( elem.firstChild ); }, contents: function( elem ) { return jQuery.nodeName( elem, "iframe" ) ? elem.contentDocument || elem.contentWindow.document : jQuery.makeArray( elem.childNodes ); } }, function( name, fn ) { jQuery.fn[ name ] = function( until, selector ) { var ret = jQuery.map( this, fn, until ), // The variable 'args' was introduced in // https://github.com/jquery/jquery/commit/52a0238 // to work around a bug in Chrome 10 (Dev) and should be removed when the bug is fixed. // http://code.google.com/p/v8/issues/detail?id=1050 args = slice.call(arguments); if ( !runtil.test( name ) ) { selector = until; } if ( selector && typeof selector === "string" ) { ret = jQuery.filter( selector, ret ); } ret = this.length > 1 && !guaranteedUnique[ name ] ? jQuery.unique( ret ) : ret; if ( (this.length > 1 || rmultiselector.test( selector )) && rparentsprev.test( name ) ) { ret = ret.reverse(); } return this.pushStack( ret, name, args.join(",") ); }; }); jQuery.extend({ filter: function( expr, elems, not ) { if ( not ) { expr = ":not(" + expr + ")"; } return elems.length === 1 ? jQuery.find.matchesSelector(elems[0], expr) ? [ elems[0] ] : [] : jQuery.find.matches(expr, elems); }, dir: function( elem, dir, until ) { var matched = [], cur = elem[ dir ]; while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) { if ( cur.nodeType === 1 ) { matched.push( cur ); } cur = cur[dir]; } return matched; }, nth: function( cur, result, dir, elem ) { result = result || 1; var num = 0; for ( ; cur; cur = cur[dir] ) { if ( cur.nodeType === 1 && ++num === result ) { break; } } return cur; }, sibling: function( n, elem ) { var r = []; for ( ; n; n = n.nextSibling ) { if ( n.nodeType === 1 && n !== elem ) { r.push( n ); } } return r; } }); // Implement the identical functionality for filter and not function winnow( elements, qualifier, keep ) { // Can't pass null or undefined to indexOf in Firefox 4 // Set to 0 to skip string check qualifier = qualifier || 0; if ( jQuery.isFunction( qualifier ) ) { return jQuery.grep(elements, function( elem, i ) { var retVal = !!qualifier.call( elem, i, elem ); return retVal === keep; }); } else if ( qualifier.nodeType ) { return jQuery.grep(elements, function( elem, i ) { return (elem === qualifier) === keep; }); } else if ( typeof qualifier === "string" ) { var filtered = jQuery.grep(elements, function( elem ) { return elem.nodeType === 1; }); if ( isSimple.test( qualifier ) ) { return jQuery.filter(qualifier, filtered, !keep); } else { qualifier = jQuery.filter( qualifier, filtered ); } } return jQuery.grep(elements, function( elem, i ) { return (jQuery.inArray( elem, qualifier ) >= 0) === keep; }); } var rinlinejQuery = / jQuery\d+="(?:\d+|null)"/g, rleadingWhitespace = /^\s+/, rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig, rtagName = /<([\w:]+)/, rtbody = /", "" ], legend: [ 1, "
", "
" ], thead: [ 1, "", "
" ], tr: [ 2, "", "
" ], td: [ 3, "", "
" ], col: [ 2, "", "
" ], area: [ 1, "", "" ], _default: [ 0, "", "" ] }; wrapMap.optgroup = wrapMap.option; wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; wrapMap.th = wrapMap.td; // IE can't serialize and '."\n". ''; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/embed.pm0000644000000000000000000000350612211730001016327 0ustar #!/usr/bin/perl package IkiWiki::Plugin::embed; use warnings; use strict; use IkiWiki 3.00; my $attribr=qr/[^<>"]+/; # regexp matching known-safe html my $safehtml=qr{( # google maps <\s*iframe\s+width="\d+"\s+height="\d+"\s+frameborder="$attribr"\s+ scrolling="$attribr"\s+marginheight="\d+"\s+marginwidth="\d+"\s+ src="http://maps.google.com/\?$attribr"\s*>\s* | # youtube <\s*object\s+width="\d+"\s+height="\d+"\s*>\s* <\s*param\s+name="movie"\s+value="http://www.youtube.com/v/$attribr"\s*>\s* \s* <\s*param\s+name="wmode"\s+value="transparent"\s*>\s*\s* \s*\s* | # google video <\s*embed\s+style="\s*width:\d+px;\s+height:\d+px;\s*"\s+id="$attribr"\s+ type="application/x-shockwave-flash"\s+ src="http://video.google.com/googleplayer.swf\?$attribr"\s+ flashvars=""\s*>\s* | # google calendar <\s*iframe\s+src="http://www.google.com/calendar/embed\?src=$attribr"\s+ style="\s*border-width:\d+\s*"\s+width="\d+"\s+frameborder="\d+"\s* height="\d+"\s*>\s* )}sx; my @embedded; sub import { hook(type => "getsetup", id => "embed", call => \&getsetup); hook(type => "filter", id => "embed", call => \&filter); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, }, } sub embed ($) { hook(type => "format", id => "embed", call => \&format) unless @embedded; push @embedded, shift; return "
"; } sub filter (@) { my %params=@_; $params{content} =~ s/$safehtml/embed($1)/eg; return $params{content}; } sub format (@) { my %params=@_; $params{content} =~ s/
<\/div>/$embedded[$1]/eg; return $params{content}; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/cvs.pm0000644000000000000000000003272712211730001016055 0ustar #!/usr/bin/perl package IkiWiki::Plugin::cvs; # Copyright (c) 2009 Amitai Schlair # All rights reserved. # # This code is derived from software contributed to ikiwiki # by Amitai Schlair. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY IKIWIKI AND CONTRIBUTORS ``AS IS'' # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION # OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF # USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. use warnings; use strict; use IkiWiki; use URI::Escape q{uri_escape_utf8}; use File::chdir; # GENERAL PLUGIN API CALLS sub import { hook(type => "checkconfig", id => "cvs", call => \&checkconfig); hook(type => "getsetup", id => "cvs", call => \&getsetup); hook(type => "genwrapper", id => "cvs", call => \&genwrapper); hook(type => "rcs", id => "rcs_update", call => \&rcs_update); hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); hook(type => "rcs", id => "rcs_add", call => \&rcs_add); hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime); } sub checkconfig () { if (! defined $config{cvspath}) { $config{cvspath}="ikiwiki"; } if (exists $config{cvspath}) { # code depends on the path not having extraneous slashes $config{cvspath}=~tr#/#/#s; $config{cvspath}=~s/\/$//; $config{cvspath}=~s/^\///; } if (defined $config{cvs_wrapper} && length $config{cvs_wrapper}) { push @{$config{wrappers}}, { wrapper => $config{cvs_wrapper}, wrappermode => (defined $config{cvs_wrappermode} ? $config{cvs_wrappermode} : "04755"), }; } } sub getsetup () { return plugin => { safe => 0, # rcs plugin rebuild => undef, section => "rcs", }, cvsrepo => { type => "string", example => "/cvs/wikirepo", description => "cvs repository location", safe => 0, # path rebuild => 0, }, cvspath => { type => "string", example => "ikiwiki", description => "path inside repository where the wiki is located", safe => 0, # paranoia rebuild => 0, }, cvs_wrapper => { type => "string", example => "/cvs/wikirepo/CVSROOT/post-commit", description => "cvs post-commit hook to generate (triggered by CVSROOT/loginfo entry)", safe => 0, # file rebuild => 0, }, cvs_wrappermode => { type => "string", example => '04755', description => "mode for cvs_wrapper (can safely be made suid)", safe => 0, rebuild => 0, }, historyurl => { type => "string", example => "http://cvs.example.org/cvsweb.cgi/ikiwiki/[[file]]", description => "cvsweb url to show file history ([[file]] substituted)", safe => 1, rebuild => 1, }, diffurl => { type => "string", example => "http://cvs.example.org/cvsweb.cgi/ikiwiki/[[file]].diff?r1=text&tr1=[[r1]]&r2=text&tr2=[[r2]]", description => "cvsweb url to show a diff ([[file]], [[r1]], and [[r2]] substituted)", safe => 1, rebuild => 1, }, } sub genwrapper () { return <$tmpfile"); if ($? == -1) { error "couldn't run cvsps: $!\n"; } elsif (($? >> 8) != 0) { error "cvsps exited " . ($? >> 8) . ": $!\n"; } tie(*SPSVC, 'File::ReadBackwards', $tmpfile) || error "couldn't open $tmpfile for read: $!\n"; while (my $line = ) { $line =~ /^$/ || error "expected blank line, got $line"; my ($rev, $user, $committype, $when); my (@message, @pages); # We're reading backwards. # Forwards, an entry looks like so: # --------------------- # PatchSet $rev # Date: $when # Author: $user (or user CGI runs as, for web commits) # Branch: branch # Tag: tag # Log: # @message_lines # Members: # @pages (and revisions) # while ($line = ) { last if ($line =~ /^Members:/); for ($line) { s/^\s+//; s/\s+$//; } my ($page, $revs) = split(/:/, $line); my ($oldrev, $newrev) = split(/->/, $revs); $oldrev =~ s/INITIAL/0/; $newrev =~ s/\(DEAD\)//; my $diffurl = defined $config{diffurl} ? $config{diffurl} : ""; my $epage = join('/', map { uri_escape_utf8($_) } split('/', $page) ); $diffurl=~s/\[\[file\]\]/$epage/g; $diffurl=~s/\[\[r1\]\]/$oldrev/g; $diffurl=~s/\[\[r2\]\]/$newrev/g; unshift @pages, { page => pagename($page), diffurl => $diffurl, } if length $page; } while ($line = ) { last if ($line =~ /^Log:$/); chomp $line; unshift @message, { line => $line }; } $committype = "web"; if (defined $message[0] && $message[0]->{line}=~/$config{web_commit_regexp}/) { $user=defined $2 ? "$2" : "$3"; $message[0]->{line}=$4; } else { $committype="cvs"; } $line = ; # Tag $line = ; # Branch $line = ; if ($line =~ /^Author: (.*)$/) { $user = $1 unless defined $user && length $user; } else { error "expected Author, got $line"; } $line = ; if ($line =~ /^Date: (.*)$/) { $when = str2time($1, 'UTC'); } else { error "expected Date, got $line"; } $line = ; if ($line =~ /^PatchSet (.*)$/) { $rev = $1; } else { error "expected PatchSet, got $line"; } $line = ; # --------------------- push @ret, { rev => $rev, user => $user, committype => $committype, when => $when, message => [@message], pages => [@pages], } if @pages; last if @ret >= $num; } unlink($tmpfile) || error "couldn't unlink $tmpfile: $!\n"; return @ret; } sub rcs_diff ($;$) { my $rev=IkiWiki::possibly_foolish_untaint(int(shift)); my $maxlines=shift; local $CWD = $config{srcdir}; # diff output is unavoidably preceded by the cvsps PatchSet entry my @cvsps = `env TZ=UTC cvsps -q --cvs-direct -z 30 -g -s $rev`; my $blank_lines_seen = 0; # skip log, get to the diff while (my $line = shift @cvsps) { $blank_lines_seen++ if ($line =~ /^$/); last if $blank_lines_seen == 2; } @cvsps = @cvsps[0..$maxlines-1] if defined $maxlines && @cvsps > $maxlines; if (wantarray) { return @cvsps; } else { return join("", @cvsps); } } sub rcs_getctime ($) { my $file=shift; local $CWD = $config{srcdir}; my $cvs_log_infoline=qr/^date: (.+);\s+author/; open CVSLOG, "cvs -Q log -r1.1 '$file' |" || error "couldn't get cvs log output: $!\n"; my $date; while () { if (/$cvs_log_infoline/) { $date=$1; } } close CVSLOG || warn "cvs log $file exited $?"; if (! defined $date) { warn "failed to parse cvs log for $file\n"; return 0; } eval q{use Date::Parse}; error($@) if $@; $date=str2time($date, 'UTC'); debug("found ctime ".localtime($date)." for $file"); return $date; } sub rcs_getmtime ($) { error "rcs_getmtime is not implemented for cvs\n"; # TODO } # INTERNAL SUPPORT ROUTINES sub commitmessage (@) { my %params=@_; if (defined $params{session}) { if (defined $params{session}->param("name")) { return "web commit by ". $params{session}->param("name"). (length $params{message} ? ": $params{message}" : ""); } elsif (defined $params{session}->remote_addr()) { return "web commit from ". $params{session}->remote_addr(). (length $params{message} ? ": $params{message}" : ""); } } return $params{message}; } sub cvs_info ($$) { my $field=shift; my $file=shift; local $CWD = $config{srcdir}; my $info=`cvs status $file`; my ($ret)=$info=~/^\s*$field:\s*(\S+)/m; return $ret; } sub cvs_is_controlling { my $dir=shift; $dir=$config{srcdir} unless defined($dir); return (-d "$dir/CVS") ? 1 : 0; } sub cvs_keyword_subst_args ($) { my $file = shift; local $CWD = $config{srcdir}; eval q{use File::MimeInfo}; error($@) if $@; my $filemime = File::MimeInfo::default($file); # if (-T $file) { defined($filemime) && $filemime eq 'text/plain' ? return ('-kkv', $file) : return ('-kb', $file); } sub cvs_runcvs(@) { my @cmd = @_; unshift @cmd, 'cvs', '-Q'; # CVS can't operate outside a srcdir, so we're always setting $CWD. # "local $CWD" restores the previous value when we go out of scope. # Usually that's correct. But if we're removing the last file from # a directory, the post-commit hook will exec in a working directory # that's about to not exist (CVS will prune it). # # chdir() manually here, so we can selectively not chdir() back. my $oldcwd = $CWD; chdir($config{srcdir}); eval q{ use IPC::Open3; use Symbol qw(gensym); use IO::File; }; error($@) if $@; my $cvsout = ''; my $cvserr = ''; local *CATCHERR = IO::File->new_tmpfile; my $pid = open3(gensym(), \*CATCHOUT, ">&CATCHERR", @cmd); while (my $l = ) { $cvsout .= $l unless 1; } waitpid($pid, 0); my $ret = $? >> 8; seek CATCHERR, 0, 0; while (my $l = ) { $cvserr .= $l unless $l =~ /^cvs commit: changing keyword expansion /; } print STDOUT $cvsout; print STDERR $cvserr; chdir($oldcwd) if -d $oldcwd; return ($ret == 0) ? 1 : 0; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/underlay.pm0000644000000000000000000000155012211730000017072 0ustar #!/usr/bin/perl package IkiWiki::Plugin::underlay; # Copyright © 2008 Simon McVittie # Licensed under the GNU GPL, version 2, or any later version published by the # Free Software Foundation use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "underlay", call => \&getsetup); hook(type => "checkconfig", id => "underlay", call => \&checkconfig); } sub getsetup () { return plugin => { safe => 0, rebuild => undef, section => "special-purpose", }, add_underlays => { type => "string", example => ["$ENV{HOME}/wiki.underlay"], description => "extra underlay directories to add", advanced => 1, safe => 0, rebuild => 1, }, } sub checkconfig () { if ($config{add_underlays}) { foreach my $dir (@{$config{add_underlays}}) { add_underlay($dir); } } } 1; ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/link.pm0000644000000000000000000000736112211730001016213 0ustar #!/usr/bin/perl package IkiWiki::Plugin::link; use warnings; use strict; use IkiWiki 3.00; my $link_regexp; my $email_regexp = qr/^.+@.+\..+$/; my $url_regexp = qr/^(?:[^:]+:\/\/|mailto:).*/i; sub import { hook(type => "getsetup", id => "link", call => \&getsetup); hook(type => "checkconfig", id => "link", call => \&checkconfig); hook(type => "linkify", id => "link", call => \&linkify); hook(type => "scan", id => "link", call => \&scan); hook(type => "renamepage", id => "link", call => \&renamepage); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, section => "core", }, } sub checkconfig () { if ($config{prefix_directives}) { $link_regexp = qr{ \[\[(?=[^!]) # beginning of link (?: ([^\]\|]+) # 1: link text \| # followed by '|' )? # optional ([^\n\r\]#]+) # 2: page to link to (?: \# # '#', beginning of anchor ([^\s\]]+) # 3: anchor text )? # optional \]\] # end of link }x; } else { $link_regexp = qr{ \[\[ # beginning of link (?: ([^\]\|\n\s]+) # 1: link text \| # followed by '|' )? # optional ([^\s\]#]+) # 2: page to link to (?: \# # '#', beginning of anchor ([^\s\]]+) # 3: anchor text )? # optional \]\] # end of link }x; } } sub is_externallink ($$;$) { my $page = shift; my $url = shift; my $anchor = shift; if (defined $anchor) { $url.="#".$anchor; } return ($url =~ /$url_regexp|$email_regexp/) } sub externallink ($$;$) { my $url = shift; my $anchor = shift; my $pagetitle = shift; if (defined $anchor) { $url.="#".$anchor; } # build pagetitle if (! $pagetitle) { $pagetitle = $url; # use only the email address as title for mailto: urls if ($pagetitle =~ /^mailto:.*/) { $pagetitle =~ s/^mailto:([^?]+).*/$1/; } } if ($url !~ /$url_regexp/) { # handle email addresses (without mailto:) $url = "mailto:" . $url; } return "$pagetitle"; } sub linkify (@) { my %params=@_; my $page=$params{page}; my $destpage=$params{destpage}; $params{content} =~ s{(\\?)$link_regexp}{ defined $2 ? ( $1 ? "[[$2|$3".(defined $4 ? "#$4" : "")."]]" : is_externallink($page, $3, $4) ? externallink($3, $4, $2) : htmllink($page, $destpage, linkpage($3), anchor => $4, linktext => pagetitle($2))) : ( $1 ? "[[$3".(defined $4 ? "#$4" : "")."]]" : is_externallink($page, $3, $4) ? externallink($3, $4) : htmllink($page, $destpage, linkpage($3), anchor => $4)) }eg; return $params{content}; } sub scan (@) { my %params=@_; my $page=$params{page}; my $content=$params{content}; while ($content =~ /(? "checkconfig", id => "httpauth", call => \&checkconfig); hook(type => "getsetup", id => "httpauth", call => \&getsetup); hook(type => "auth", id => "httpauth", call => \&auth); hook(type => "formbuilder_setup", id => "httpauth", call => \&formbuilder_setup); hook(type => "canedit", id => "httpauth", call => \&canedit, first => 1); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "auth", }, cgiauthurl => { type => "string", example => "http://example.com/wiki/auth/ikiwiki.cgi", description => "url to redirect to when authentication is needed", safe => 1, rebuild => 0, }, httpauth_pagespec => { type => "pagespec", example => "!*/Discussion", description => "PageSpec of pages where only httpauth will be used for authentication", safe => 0, rebuild => 0, }, } sub checkconfig () { if ($config{cgi} && defined $config{cgiauthurl} && keys %{$IkiWiki::hooks{auth}} < 2) { # There are no other auth hooks registered, so avoid # the normal signin form, and jump right to httpauth. require IkiWiki::CGI; inject(name => "IkiWiki::cgi_signin", call => sub ($$) { my $cgi=shift; redir_cgiauthurl($cgi, $cgi->query_string()); }); } } sub redir_cgiauthurl ($;@) { my $cgi=shift; IkiWiki::redirect($cgi, @_ > 1 ? IkiWiki::cgiurl(cgiurl => $config{cgiauthurl}, @_) : $config{cgiauthurl}."?@_" ); exit; } sub auth ($$) { my $cgi=shift; my $session=shift; if (defined $cgi->remote_user()) { $session->param("name", $cgi->remote_user()); } } sub formbuilder_setup (@) { my %params=@_; my $form=$params{form}; my $session=$params{session}; my $cgi=$params{cgi}; my $buttons=$params{buttons}; if ($form->title eq "signin" && ! defined $cgi->remote_user() && defined $config{cgiauthurl}) { my $button_text="Login with HTTP auth"; push @$buttons, $button_text; if ($form->submitted && $form->submitted eq $button_text) { # bounce thru cgiauthurl and then back to # the stored postsignin action redir_cgiauthurl($cgi, do => "postsignin"); } } } sub canedit ($$$) { my $page=shift; my $cgi=shift; my $session=shift; if (! defined $cgi->remote_user() && (! defined $session->param("name") || ! IkiWiki::userinfo_get($session->param("name"), "regdate")) && defined $config{httpauth_pagespec} && length $config{httpauth_pagespec} && defined $config{cgiauthurl} && pagespec_match($page, $config{httpauth_pagespec})) { return sub { # bounce thru cgiauthurl and back to edit action redir_cgiauthurl($cgi, $cgi->query_string()); }; } else { return undef; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/notifyemail.pm0000644000000000000000000001103412211730001017566 0ustar #!/usr/bin/perl package IkiWiki::Plugin::notifyemail; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "formbuilder", id => "notifyemail", call => \&formbuilder); hook(type => "getsetup", id => "notifyemail", call => \&getsetup); hook(type => "changes", id => "notifyemail", call => \¬ify); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, }, } sub formbuilder (@) { my %params=@_; my $form=$params{form}; return unless $form->title eq "preferences"; my $session=$params{session}; my $username=$session->param("name"); $form->field(name => "subscriptions", size => 50, fieldset => "preferences", comment => "(".htmllink("", "", "ikiwiki/PageSpec", noimageinline => 1).")"); if (! $form->submitted) { $form->field(name => "subscriptions", force => 1, value => getsubscriptions($username)); } elsif ($form->submitted eq "Save Preferences" && $form->validate && defined $form->field("subscriptions")) { setsubscriptions($username, $form->field('subscriptions')); } } sub getsubscriptions ($) { my $user=shift; eval q{use IkiWiki::UserInfo}; error $@ if $@; IkiWiki::userinfo_get($user, "subscriptions"); } sub setsubscriptions ($$) { my $user=shift; my $subscriptions=shift; eval q{use IkiWiki::UserInfo}; error $@ if $@; IkiWiki::userinfo_set($user, "subscriptions", $subscriptions); } # Called by other plugins to subscribe the user to a pagespec. sub subscribe ($$) { my $user=shift; my $addpagespec=shift; my $pagespec=getsubscriptions($user); setsubscriptions($user, length $pagespec ? $pagespec." or ".$addpagespec : $addpagespec); } # Called by other plugins to subscribe an email to a pagespec. sub anonsubscribe ($$) { my $email=shift; my $addpagespec=shift; if (IkiWiki::Plugin::passwordauth->can("anonuser")) { my $user=IkiWiki::Plugin::passwordauth::anonuser($email); if (! defined $user) { error(gettext("Cannot subscribe your email address without logging in.")); } subscribe($user, $addpagespec); } } sub notify (@) { my @files=@_; return unless @files; return if $config{rebuild}; eval q{use Mail::Sendmail}; error $@ if $@; eval q{use IkiWiki::UserInfo}; error $@ if $@; eval q{use URI}; error($@) if $@; # Daemonize, in case the mail sending takes a while. defined(my $pid = fork) or error("Can't fork: $!"); return if $pid; # parent chdir '/'; open STDIN, '/dev/null'; open STDOUT, '>/dev/null'; POSIX::setsid() or error("Can't start a new session: $!"); open STDERR, '>&STDOUT' or error("Can't dup stdout: $!"); # Don't need to keep a lock on the wiki as a daemon. IkiWiki::unlockwiki(); my $userinfo=IkiWiki::userinfo_retrieve(); exit 0 unless defined $userinfo; foreach my $user (keys %$userinfo) { my $pagespec=$userinfo->{$user}->{"subscriptions"}; next unless defined $pagespec && length $pagespec; my $email=$userinfo->{$user}->{email}; next unless defined $email && length $email; foreach my $file (@files) { my $page=pagename($file); next unless pagespec_match($page, $pagespec); my $content=""; my $showcontent=defined pagetype($file); if ($showcontent) { $content=eval { readfile(srcfile($file)) }; $showcontent=0 if $@; } my $url; if (! IkiWiki::isinternal($page)) { $url=urlto($page, undef, 1); } elsif (defined $pagestate{$page}{meta}{permalink}) { # need to use permalink for an internal page $url=URI->new_abs($pagestate{$page}{meta}{permalink}, $config{url}); } else { $url=$config{url}; # crummy fallback url } my $pagedesc=$page; if (defined $pagestate{$page}{meta}{title} && length $pagestate{$page}{meta}{title}) { $pagedesc=qq{"$pagestate{$page}{meta}{title}"}; } my $subject=gettext("change notification:")." ".$pagedesc; if (pagetype($file) eq '_comment') { $subject=gettext("comment notification:")." ".$pagedesc; } my $prefsurl=IkiWiki::cgiurl_abs(do => 'prefs'); if (IkiWiki::Plugin::passwordauth->can("anonusertoken")) { my $token=IkiWiki::Plugin::passwordauth::anonusertoken($userinfo->{$user}); $prefsurl=IkiWiki::cgiurl_abs( do => 'tokenauth', name => $user, token => $token, ) if defined $token; } my $template=template("notifyemail.tmpl"); $template->param( wikiname => $config{wikiname}, url => $url, prefsurl => $prefsurl, showcontent => $showcontent, content => $content, ); sendmail( To => $email, From => "$config{wikiname} <$config{adminemail}>", Subject => $subject, Message => $template->output, ); } } exit 0; # daemon child } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/tag.pm0000644000000000000000000001127312211730000016025 0ustar #!/usr/bin/perl # Ikiwiki tag plugin. package IkiWiki::Plugin::tag; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "checkconfig", id => "tag", call => \&checkconfig); hook(type => "getopt", id => "tag", call => \&getopt); hook(type => "getsetup", id => "tag", call => \&getsetup); hook(type => "preprocess", id => "tag", call => \&preprocess_tag, scan => 1); hook(type => "preprocess", id => "taglink", call => \&preprocess_taglink, scan => 1); hook(type => "pagetemplate", id => "tag", call => \&pagetemplate); IkiWiki::loadplugin("transient"); } sub getopt () { eval q{use Getopt::Long}; error($@) if $@; Getopt::Long::Configure('pass_through'); GetOptions("tagbase=s" => \$config{tagbase}); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, }, tagbase => { type => "string", example => "tag", description => "parent page tags are located under", safe => 1, rebuild => 1, }, tag_autocreate => { type => "boolean", example => 1, description => "autocreate new tag pages?", safe => 1, rebuild => undef, }, tag_autocreate_commit => { type => "boolean", example => 1, default => 1, description => "commit autocreated tag pages", safe => 1, rebuild => 0, }, } sub checkconfig () { if (! defined $config{tag_autocreate_commit}) { $config{tag_autocreate_commit} = 1; } } sub taglink ($) { my $tag=shift; if ($tag !~ m{^/} && defined $config{tagbase}) { $tag="/".$config{tagbase}."/".$tag; $tag=~y#/#/#s; # squash dups } return $tag; } # Returns a tag name from a tag link sub tagname ($) { my $tag=shift; if (defined $config{tagbase}) { $tag =~ s!^/\Q$config{tagbase}\E/!!; } else { $tag =~ s!^\.?/!!; } return pagetitle($tag, 1); } sub htmllink_tag ($$$;@) { my $page=shift; my $destpage=shift; my $tag=shift; my %opts=@_; return htmllink($page, $destpage, taglink($tag), %opts); } sub gentag ($) { my $tag=shift; if ($config{tag_autocreate} || ($config{tagbase} && ! defined $config{tag_autocreate})) { my $tagpage=taglink($tag); if ($tagpage=~/^\.\/(.*)/) { $tagpage=$1; } else { $tagpage=~s/^\///; } if (exists $IkiWiki::pagecase{lc $tagpage}) { $tagpage=$IkiWiki::pagecase{lc $tagpage} } my $tagfile = newpagefile($tagpage, $config{default_pageext}); add_autofile($tagfile, "tag", sub { my $message=sprintf(gettext("creating tag page %s"), $tagpage); debug($message); my $template=template("autotag.tmpl"); $template->param(tagname => tagname($tag)); $template->param(tag => $tag); my $dir = $config{srcdir}; if (! $config{tag_autocreate_commit}) { $dir = $IkiWiki::Plugin::transient::transientdir; } writefile($tagfile, $dir, $template->output); if ($config{rcs} && $config{tag_autocreate_commit}) { IkiWiki::disable_commit_hook(); IkiWiki::rcs_add($tagfile); IkiWiki::rcs_commit_staged(message => $message); IkiWiki::enable_commit_hook(); } }); } } sub preprocess_tag (@) { if (! @_) { return ""; } my %params=@_; my $page = $params{page}; delete $params{page}; delete $params{destpage}; delete $params{preview}; foreach my $tag (keys %params) { $tag=linkpage($tag); # hidden WikiLink add_link($page, taglink($tag), 'tag'); gentag($tag); } return ""; } sub preprocess_taglink (@) { if (! @_) { return ""; } my %params=@_; return join(" ", map { if (/(.*)\|(.*)/) { my $tag=linkpage($2); add_link($params{page}, taglink($tag), 'tag'); gentag($tag); return htmllink_tag($params{page}, $params{destpage}, $tag, linktext => pagetitle($1)); } else { my $tag=linkpage($_); add_link($params{page}, taglink($tag), 'tag'); gentag($tag); return htmllink_tag($params{page}, $params{destpage}, $tag); } } grep { $_ ne 'page' && $_ ne 'destpage' && $_ ne 'preview' } keys %params); } sub pagetemplate (@) { my %params=@_; my $page=$params{page}; my $destpage=$params{destpage}; my $template=$params{template}; my $tags = $typedlinks{$page}{tag}; $template->param(tags => [ map { link => htmllink_tag($page, $destpage, $_, rel => "tag", linktext => tagname($_)) }, sort keys %$tags ]) if defined $tags && %$tags && $template->query(name => "tags"); if ($template->query(name => "categories")) { # It's an rss/atom template. Add any categories. if (defined $tags && %$tags) { eval q{use HTML::Entities}; $template->param(categories => [map { category => HTML::Entities::encode_entities_numeric(tagname($_)) }, sort keys %$tags]); } } } package IkiWiki::PageSpec; sub match_tagged ($$;@) { my $page=shift; my $glob=IkiWiki::Plugin::tag::taglink(shift); return match_link($page, $glob, linktype => 'tag', @_); } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/rename.pm0000644000000000000000000003732612211730001016531 0ustar #!/usr/bin/perl package IkiWiki::Plugin::rename; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "rename", call => \&getsetup); hook(type => "formbuilder_setup", id => "rename", call => \&formbuilder_setup); hook(type => "formbuilder", id => "rename", call => \&formbuilder); hook(type => "sessioncgi", id => "rename", call => \&sessioncgi); hook(type => "rename", id => "rename", call => \&rename_subpages); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "web", }, } sub check_canrename ($$$$$$) { my $src=shift; my $srcfile=shift; my $dest=shift; my $destfile=shift; my $q=shift; my $session=shift; my $attachment=! defined pagetype($pagesources{$src}); # Must be a known source file. if (! exists $pagesources{$src}) { error(sprintf(gettext("%s does not exist"), htmllink("", "", $src, noimageinline => 1))); } # Must exist on disk, and be a regular file. if (! -e "$config{srcdir}/$srcfile") { error(sprintf(gettext("%s is not in the srcdir, so it cannot be renamed"), $srcfile)); } elsif (-l "$config{srcdir}/$srcfile" && ! -f _) { error(sprintf(gettext("%s is not a file"), $srcfile)); } # Must be editable. IkiWiki::check_canedit($src, $q, $session); if ($attachment) { if (IkiWiki::Plugin::attachment->can("check_canattach")) { IkiWiki::Plugin::attachment::check_canattach($session, $src, "$config{srcdir}/$srcfile"); } else { error("renaming of attachments is not allowed"); } } # Dest checks can be omitted by passing undef. if (defined $dest) { if ($srcfile eq $destfile) { error(gettext("no change to the file name was specified")); } # Must be a legal filename. if (IkiWiki::file_pruned($destfile)) { error(sprintf(gettext("illegal name"))); } # Must not be a known source file. if ($src ne $dest && exists $pagesources{$dest}) { error(sprintf(gettext("%s already exists"), htmllink("", "", $dest, noimageinline => 1))); } # Must not exist on disk already. if (-l "$config{srcdir}/$destfile" || -e _) { error(sprintf(gettext("%s already exists on disk"), $destfile)); } # Must be editable. IkiWiki::check_canedit($dest, $q, $session); if ($attachment) { # Note that $srcfile is used here, not $destfile, # because it wants the current file, to check it. IkiWiki::Plugin::attachment::check_canattach($session, $dest, "$config{srcdir}/$srcfile"); } } my $canrename; IkiWiki::run_hooks(canrename => sub { return if defined $canrename; my $ret=shift->(cgi => $q, session => $session, src => $src, srcfile => $srcfile, dest => $dest, destfile => $destfile); if (defined $ret) { if ($ret eq "") { $canrename=1; } elsif (ref $ret eq 'CODE') { $ret->(); $canrename=0; } elsif (defined $ret) { error($ret); $canrename=0; } } }); return defined $canrename ? $canrename : 1; } sub rename_form ($$$) { my $q=shift; my $session=shift; my $page=shift; eval q{use CGI::FormBuilder}; error($@) if $@; my $f = CGI::FormBuilder->new( name => "rename", title => sprintf(gettext("rename %s"), pagetitle($page)), header => 0, charset => "utf-8", method => 'POST', javascript => 0, params => $q, action => IkiWiki::cgiurl(), stylesheet => 1, fields => [qw{do page new_name attachment}], ); $f->field(name => "do", type => "hidden", value => "rename", force => 1); $f->field(name => "sid", type => "hidden", value => $session->id, force => 1); $f->field(name => "page", type => "hidden", value => $page, force => 1); $f->field(name => "new_name", value => pagetitle($page, 1), size => 60); if (!$q->param("attachment")) { # insert the standard extensions my @page_types; if (exists $IkiWiki::hooks{htmlize}) { foreach my $key (grep { !/^_/ } keys %{$IkiWiki::hooks{htmlize}}) { push @page_types, [$key, $IkiWiki::hooks{htmlize}{$key}{longname} || $key]; } } @page_types=sort @page_types; # make sure the current extension is in the list my ($ext) = $pagesources{$page}=~/\.([^.]+)$/; if (! $IkiWiki::hooks{htmlize}{$ext}) { unshift(@page_types, [$ext, $ext]); } $f->field(name => "type", type => 'select', options => \@page_types, value => $ext, force => 1); foreach my $p (keys %pagesources) { if ($pagesources{$p}=~m/^\Q$page\E\//) { $f->field(name => "subpages", label => "", type => "checkbox", options => [ [ 1 => gettext("Also rename SubPages and attachments") ] ], value => 1, force => 1); last; } } } $f->field(name => "attachment", type => "hidden"); return $f, ["Rename", "Cancel"]; } sub rename_start ($$$$) { my $q=shift; my $session=shift; my $attachment=shift; my $page=shift; # Special case for renaming held attachments; normal checks # don't apply. my $held=$attachment && IkiWiki::Plugin::attachment->can("is_held_attachment") && IkiWiki::Plugin::attachment::is_held_attachment($page); if (! $held) { check_canrename($page, $pagesources{$page}, undef, undef, $q, $session); } # Save current form state to allow returning to it later # without losing any edits. # (But don't save what button was submitted, to avoid # looping back to here.) # Note: "_submit" is CGI::FormBuilder internals. $q->param(-name => "_submit", -value => ""); $session->param(postrename => scalar $q->Vars); IkiWiki::cgi_savesession($session); if (defined $attachment) { $q->param(-name => "attachment", -value => $attachment); } my ($f, $buttons)=rename_form($q, $session, $page); IkiWiki::showform($f, $buttons, $session, $q); exit 0; } sub postrename ($$$;$$) { my $cgi=shift; my $session=shift; my $src=shift; my $dest=shift; my $attachment=shift; # Load saved form state and return to edit page, using stored old # cgi state. Or, if the rename was not started on the edit page, # return to the renamed page. my $postrename=$session->param("postrename"); if (! defined $postrename) { IkiWiki::redirect($cgi, urlto(defined $dest ? $dest : $src)); exit; } my $oldcgi=CGI->new($postrename); $session->clear("postrename"); IkiWiki::cgi_savesession($session); if (defined $dest) { if (! $attachment) { # They renamed the page they were editing. This requires # fixups to the edit form state. # Tweak the edit form to be editing the new page. $oldcgi->param("page", $dest); } # Update edit form content to fix any links present # on it. $oldcgi->param("editcontent", renamepage_hook($dest, $src, $dest, $oldcgi->param("editcontent"))); # Get a new edit token; old was likely invalidated. $oldcgi->param("rcsinfo", IkiWiki::rcs_prepedit($pagesources{$dest})); } IkiWiki::cgi_editpage($oldcgi, $session); } sub formbuilder (@) { my %params=@_; my $form=$params{form}; if (defined $form->field("do") && ($form->field("do") eq "edit" || $form->field("do") eq "create")) { IkiWiki::decode_form_utf8($form); my $q=$params{cgi}; my $session=$params{session}; if ($form->submitted eq "Rename" && $form->field("do") eq "edit") { rename_start($q, $session, 0, $form->field("page")); } elsif ($form->submitted eq "Rename Attachment") { my @selected=map { Encode::decode_utf8($_) } $q->param("attachment_select"); if (@selected > 1) { error(gettext("Only one attachment can be renamed at a time.")); } elsif (! @selected) { error(gettext("Please select the attachment to rename.")) } rename_start($q, $session, 1, $selected[0]); } } } my $renamesummary; sub formbuilder_setup (@) { my %params=@_; my $form=$params{form}; my $q=$params{cgi}; if (defined $form->field("do") && ($form->field("do") eq "edit" || $form->field("do") eq "create")) { # Rename button for the page, and also for attachments. push @{$params{buttons}}, "Rename" if $form->field("do") eq "edit"; $form->tmpl_param("field-rename" => ''); if (defined $renamesummary) { $form->tmpl_param(message => $renamesummary); } } } sub sessioncgi ($$) { my $q=shift; if ($q->param("do") eq 'rename') { my $session=shift; my ($form, $buttons)=rename_form($q, $session, Encode::decode_utf8($q->param("page"))); IkiWiki::decode_form_utf8($form); my $src=$form->field("page"); if ($form->submitted eq 'Cancel') { postrename($q, $session, $src); } elsif ($form->submitted eq 'Rename' && $form->validate) { IkiWiki::checksessionexpiry($q, $session, $q->param('sid')); # These untaints are safe because of the checks # performed in check_canrename later. my $srcfile=IkiWiki::possibly_foolish_untaint($pagesources{$src}) if exists $pagesources{$src}; my $dest=IkiWiki::possibly_foolish_untaint(titlepage($form->field("new_name"))); my $destfile=$dest; if (! $q->param("attachment")) { my $type=$q->param('type'); if (defined $type && length $type && $IkiWiki::hooks{htmlize}{$type}) { $type=IkiWiki::possibly_foolish_untaint($type); } else { my ($ext)=$srcfile=~/\.([^.]+)$/; $type=$ext; } $destfile=newpagefile($dest, $type); } # Special case for renaming held attachments. my $held=$q->param("attachment") && IkiWiki::Plugin::attachment->can("is_held_attachment") && IkiWiki::Plugin::attachment::is_held_attachment($src); if ($held) { rename($held, IkiWiki::Plugin::attachment::attachment_holding_location($dest)); postrename($q, $session, $src, $dest, $q->param("attachment")) unless defined $srcfile; } # Queue of rename actions to perfom. my @torename; push @torename, { src => $src, srcfile => $srcfile, dest => $dest, destfile => $destfile, required => 1, }; @torename=rename_hook( torename => \@torename, done => {}, cgi => $q, session => $session, ); require IkiWiki::Render; IkiWiki::disable_commit_hook() if $config{rcs}; my %origpagesources=%pagesources; # First file renaming. foreach my $rename (@torename) { if ($rename->{required}) { do_rename($rename, $q, $session); } else { eval {do_rename($rename, $q, $session)}; if ($@) { $rename->{error}=$@; next; } } # Temporarily tweak pagesources to point to # the renamed file, in case fixlinks needs # to edit it. $pagesources{$rename->{src}}=$rename->{destfile}; } IkiWiki::rcs_commit_staged( message => sprintf(gettext("rename %s to %s"), $srcfile, $destfile), session => $session, ) if $config{rcs}; # Then link fixups. foreach my $rename (@torename) { next if $rename->{src} eq $rename->{dest}; next if $rename->{error}; foreach my $p (fixlinks($rename, $session)) { # map old page names to new foreach my $r (@torename) { next if $rename->{error}; if ($r->{src} eq $p) { $p=$r->{dest}; last; } } push @{$rename->{fixedlinks}}, $p; } } # Then refresh. %pagesources=%origpagesources; if ($config{rcs}) { IkiWiki::enable_commit_hook(); IkiWiki::rcs_update(); } IkiWiki::refresh(); IkiWiki::saveindex(); # Find pages with remaining, broken links. foreach my $rename (@torename) { next if $rename->{src} eq $rename->{dest}; foreach my $page (keys %links) { my $broken=0; foreach my $link (@{$links{$page}}) { my $bestlink=bestlink($page, $link); if ($bestlink eq $rename->{src}) { push @{$rename->{brokenlinks}}, $page; last; } } } } # Generate a summary, that will be shown at the top # of the edit template. $renamesummary=""; foreach my $rename (@torename) { my $template=template("renamesummary.tmpl"); $template->param(src => $rename->{srcfile}); $template->param(dest => $rename->{destfile}); $template->param(error => $rename->{error}); if ($rename->{src} ne $rename->{dest}) { $template->param(brokenlinks_checked => 1); $template->param(brokenlinks => linklist($rename->{dest}, $rename->{brokenlinks})); $template->param(fixedlinks => linklist($rename->{dest}, $rename->{fixedlinks})); } $renamesummary.=$template->output; } postrename($q, $session, $src, $dest, $q->param("attachment")); } else { IkiWiki::showform($form, $buttons, $session, $q); } exit 0; } } # Add subpages to the list of pages to be renamed, if needed. sub rename_subpages (@) { my %params = @_; my %torename = %{$params{torename}}; my $q = $params{cgi}; my $src = $torename{src}; my $srcfile = $torename{src}; my $dest = $torename{dest}; my $destfile = $torename{dest}; return () unless ($q->param("subpages") && $src ne $dest); my @ret; foreach my $p (keys %pagesources) { next unless $pagesources{$p}=~m/^\Q$src\E\//; # If indexpages is enabled, the srcfile should not be confused # with a subpage. next if $pagesources{$p} eq $srcfile; my $d=$pagesources{$p}; $d=~s/^\Q$src\E\//$dest\//; push @ret, { src => $p, srcfile => $pagesources{$p}, dest => pagename($d), destfile => $d, required => 0, }; } return @ret; } sub linklist { # generates a list of links in a form suitable for FormBuilder my $dest=shift; my $list=shift; # converts a list of pages into a list of links # in a form suitable for FormBuilder. [map { { page => htmllink($dest, $dest, $_, noimageinline => 1, linktext => pagetitle($_), ) } } @{$list}] } sub renamepage_hook ($$$$) { my ($page, $src, $dest, $content)=@_; IkiWiki::run_hooks(renamepage => sub { $content=shift->( page => $page, oldpage => $src, newpage => $dest, content => $content, ); }); return $content; } sub rename_hook { my %params = @_; my @torename=@{$params{torename}}; my %done=%{$params{done}}; my $q=$params{cgi}; my $session=$params{session}; return () unless @torename; my @nextset; foreach my $torename (@torename) { unless (exists $done{$torename->{src}} && $done{$torename->{src}}) { IkiWiki::run_hooks(rename => sub { push @nextset, shift->( torename => $torename, cgi => $q, session => $session, ); }); $done{$torename->{src}}=1; } } push @torename, rename_hook( torename => \@nextset, done => \%done, cgi => $q, session => $session, ); # dedup my %seen; return grep { ! $seen{$_->{src}}++ } @torename; } sub do_rename ($$$) { my $rename=shift; my $q=shift; my $session=shift; # First, check if this rename is allowed. check_canrename($rename->{src}, $rename->{srcfile}, $rename->{dest}, $rename->{destfile}, $q, $session); # Ensure that the dest directory exists and is ok. IkiWiki::prep_writefile($rename->{destfile}, $config{srcdir}); if ($config{rcs}) { IkiWiki::rcs_rename($rename->{srcfile}, $rename->{destfile}); } else { if (! rename($config{srcdir}."/".$rename->{srcfile}, $config{srcdir}."/".$rename->{destfile})) { error("rename: $!"); } } } sub fixlinks ($$$) { my $rename=shift; my $session=shift; my @fixedlinks; foreach my $page (keys %links) { my $needfix=0; foreach my $link (@{$links{$page}}) { my $bestlink=bestlink($page, $link); if ($bestlink eq $rename->{src}) { $needfix=1; last; } } if ($needfix) { my $file=$pagesources{$page}; next unless -e $config{srcdir}."/".$file; my $oldcontent=readfile($config{srcdir}."/".$file); my $content=renamepage_hook($page, $rename->{src}, $rename->{dest}, $oldcontent); if ($oldcontent ne $content) { my $token=IkiWiki::rcs_prepedit($file); eval { writefile($file, $config{srcdir}, $content) }; next if $@; my $conflict=IkiWiki::rcs_commit( file => $file, message => sprintf(gettext("update for rename of %s to %s"), $rename->{srcfile}, $rename->{destfile}), token => $token, session => $session, ); push @fixedlinks, $page if ! defined $conflict; } } } return @fixedlinks; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/meta.pm0000644000000000000000000003173712211730001016210 0ustar #!/usr/bin/perl # Ikiwiki metadata plugin. package IkiWiki::Plugin::meta; use warnings; use strict; use IkiWiki 3.00; my %metaheaders; sub import { hook(type => "getsetup", id => "meta", call => \&getsetup); hook(type => "needsbuild", id => "meta", call => \&needsbuild); hook(type => "preprocess", id => "meta", call => \&preprocess, scan => 1); hook(type => "pagetemplate", id => "meta", call => \&pagetemplate); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "core", }, } sub needsbuild (@) { my $needsbuild=shift; foreach my $page (keys %pagestate) { if (exists $pagestate{$page}{meta}) { if (exists $pagesources{$page} && grep { $_ eq $pagesources{$page} } @$needsbuild) { # remove state, it will be re-added # if the preprocessor directive is still # there during the rebuild delete $pagestate{$page}{meta}; } } } return $needsbuild; } sub scrub ($$$) { if (IkiWiki::Plugin::htmlscrubber->can("sanitize")) { return IkiWiki::Plugin::htmlscrubber::sanitize( content => shift, page => shift, destpage => shift); } else { return shift; } } sub safeurl ($) { my $url=shift; if (exists $IkiWiki::Plugin::htmlscrubber::{safe_url_regexp} && defined $IkiWiki::Plugin::htmlscrubber::safe_url_regexp) { return $url=~/$IkiWiki::Plugin::htmlscrubber::safe_url_regexp/; } else { return 1; } } sub htmlize ($$$) { my $page = shift; my $destpage = shift; return IkiWiki::htmlize($page, $destpage, pagetype($pagesources{$page}), IkiWiki::linkify($page, $destpage, IkiWiki::preprocess($page, $destpage, shift))); } sub preprocess (@) { return "" unless @_; my %params=@_; my $key=shift; my $value=$params{$key}; delete $params{$key}; my $page=$params{page}; delete $params{page}; my $destpage=$params{destpage}; delete $params{destpage}; delete $params{preview}; eval q{use HTML::Entities}; # Always decode, even if encoding later, since it might not be # fully encoded. $value=decode_entities($value); # Metadata collection that needs to happen during the scan pass. if ($key eq 'title') { $pagestate{$page}{meta}{title}=$value; if (exists $params{sortas}) { $pagestate{$page}{meta}{titlesort}=$params{sortas}; } else { delete $pagestate{$page}{meta}{titlesort}; } return ""; } elsif ($key eq 'description') { $pagestate{$page}{meta}{description}=$value; # fallthrough } elsif ($key eq 'guid') { $pagestate{$page}{meta}{guid}=$value; # fallthrough } elsif ($key eq 'license') { push @{$metaheaders{$page}}, ''; $pagestate{$page}{meta}{license}=$value; return ""; } elsif ($key eq 'copyright') { push @{$metaheaders{$page}}, ''; $pagestate{$page}{meta}{copyright}=$value; return ""; } elsif ($key eq 'link' && ! %params) { # hidden WikiLink add_link($page, $value); return ""; } elsif ($key eq 'enclosure') { my $link=bestlink($page, $value); if (! length $link) { error gettext("enclosure not found") } add_depends($page, $link, deptype("presence")); $value=urlto($link, $page, 1); $pagestate{$page}{meta}{enclosure}=$value; $pagestate{$page}{meta}{enclosurefile}=$link; # fallthrough } elsif ($key eq 'author') { $pagestate{$page}{meta}{author}=$value; if (exists $params{sortas}) { $pagestate{$page}{meta}{authorsort}=$params{sortas}; } else { delete $pagestate{$page}{meta}{authorsort}; } # fallthorough } elsif ($key eq 'authorurl') { $pagestate{$page}{meta}{authorurl}=$value if safeurl($value); # fallthrough } elsif ($key eq 'permalink') { $pagestate{$page}{meta}{permalink}=$value if safeurl($value); # fallthrough } elsif ($key eq 'date') { eval q{use Date::Parse}; if (! $@) { my $time = str2time($value); $IkiWiki::pagectime{$page}=$time if defined $time; } } elsif ($key eq 'updated') { eval q{use Date::Parse}; if (! $@) { my $time = str2time($value); $pagestate{$page}{meta}{updated}=$time if defined $time; } } if (! defined wantarray) { # avoid collecting duplicate data during scan pass return; } # Metadata handling that happens only during preprocessing pass. if ($key eq 'permalink') { if (safeurl($value)) { push @{$metaheaders{$page}}, scrub('', $page, $destpage); } } elsif ($key eq 'stylesheet') { my $rel=exists $params{rel} ? $params{rel} : "alternate stylesheet"; my $title=exists $params{title} ? $params{title} : $value; # adding .css to the value prevents using any old web # editable page as a stylesheet my $stylesheet=bestlink($page, $value.".css"); if (! length $stylesheet) { error gettext("stylesheet not found") } push @{$metaheaders{$page}}, scrub('", $page, $destpage); } elsif ($key eq 'script') { my $defer=exists $params{defer} ? ' defer="defer"' : ''; my $async=exists $params{async} ? ' async="async"' : ''; my $js=bestlink($page, $value.".js"); if (! length $js) { error gettext("script not found"); } push @{$metaheaders{$page}}, scrub('', $page, $destpage); } elsif ($key eq 'openid') { my $delegate=0; # both by default if (exists $params{delegate}) { $delegate = 1 if lc $params{delegate} eq 'openid'; $delegate = 2 if lc $params{delegate} eq 'openid2'; } if (exists $params{server} && safeurl($params{server})) { push @{$metaheaders{$page}}, '' if $delegate ne 2; push @{$metaheaders{$page}}, '' if $delegate ne 1; } if (safeurl($value)) { push @{$metaheaders{$page}}, '' if $delegate ne 2; push @{$metaheaders{$page}}, '' if $delegate ne 1; } if (exists $params{"xrds-location"} && safeurl($params{"xrds-location"})) { # force url absolute eval q{use URI}; error($@) if $@; my $url=URI->new_abs($params{"xrds-location"}, $config{url}); push @{$metaheaders{$page}}, ''; } } elsif ($key eq 'foaf') { if (safeurl($value)) { push @{$metaheaders{$page}}, ''; } } elsif ($key eq 'redir') { return "" if $page ne $destpage; my $safe=0; if ($value !~ /^\w+:\/\//) { my ($redir_page, $redir_anchor) = split /\#/, $value; my $link=bestlink($page, $redir_page); if (! length $link) { error gettext("redir page not found") } add_depends($page, $link, deptype("presence")); $value=urlto($link, $page); $value.='#'.$redir_anchor if defined $redir_anchor; $safe=1; # redir cycle detection $pagestate{$page}{meta}{redir}=$link; my $at=$page; my %seen; while (exists $pagestate{$at}{meta}{redir}) { if ($seen{$at}) { error gettext("redir cycle is not allowed") } $seen{$at}=1; $at=$pagestate{$at}{meta}{redir}; } } else { $value=encode_entities($value); } my $delay=int(exists $params{delay} ? $params{delay} : 0); my $redir=""; if (! $safe) { $redir=scrub($redir, $page, $destpage); } push @{$metaheaders{$page}}, $redir; } elsif ($key eq 'link') { if (%params) { push @{$metaheaders{$page}}, scrub("\n", $page, $destpage); } } elsif ($key eq 'robots') { push @{$metaheaders{$page}}, ''; } elsif ($key eq 'description' || $key eq 'author') { push @{$metaheaders{$page}}, ''; } elsif ($key eq 'name') { push @{$metaheaders{$page}}, scrub('', $page, $destpage); } elsif ($key eq 'keywords') { # Make sure the keyword string is safe: only allow alphanumeric # characters, space and comma and strip the rest. $value =~ s/[^[:alnum:], ]+//g; push @{$metaheaders{$page}}, ''; } else { push @{$metaheaders{$page}}, scrub('', $page, $destpage); } return ""; } sub pagetemplate (@) { my %params=@_; my $page=$params{page}; my $destpage=$params{destpage}; my $template=$params{template}; if (exists $metaheaders{$page} && $template->query(name => "meta")) { # avoid duplicate meta lines my %seen; $template->param(meta => join("\n", grep { (! $seen{$_}) && ($seen{$_}=1) } @{$metaheaders{$page}})); } if (exists $pagestate{$page}{meta}{title} && $template->query(name => "title")) { eval q{use HTML::Entities}; $template->param(title => HTML::Entities::encode_numeric($pagestate{$page}{meta}{title})); $template->param(title_overridden => 1); } if (exists $pagestate{$page}{meta}{enclosure}) { $template->param(enclosure => HTML::Entities::encode_entities(IkiWiki::urlabs($pagestate{$page}{meta}{enclosure}, $config{url}))); } foreach my $field (qw{authorurl}) { eval q{use HTML::Entities}; $template->param($field => HTML::Entities::encode_entities($pagestate{$page}{meta}{$field})) if exists $pagestate{$page}{meta}{$field} && $template->query(name => $field); } foreach my $field (qw{permalink}) { if (exists $pagestate{$page}{meta}{$field} && $template->query(name => $field)) { eval q{use HTML::Entities}; $template->param($field => HTML::Entities::encode_entities(IkiWiki::urlabs($pagestate{$page}{meta}{$field}, $config{url}))); } } foreach my $field (qw{description author}) { eval q{use HTML::Entities}; $template->param($field => HTML::Entities::encode_numeric($pagestate{$page}{meta}{$field})) if exists $pagestate{$page}{meta}{$field} && $template->query(name => $field); } foreach my $field (qw{license copyright}) { if (exists $pagestate{$page}{meta}{$field} && $template->query(name => $field) && ($page eq $destpage || ! exists $pagestate{$destpage}{meta}{$field} || $pagestate{$page}{meta}{$field} ne $pagestate{$destpage}{meta}{$field})) { $template->param($field => htmlize($page, $destpage, $pagestate{$page}{meta}{$field})); } } } sub get_sort_key { my $page = shift; my $meta = shift; # e.g. titlesort (also makes sense for author) my $key = $pagestate{$page}{meta}{$meta . "sort"}; return $key if defined $key; # e.g. title $key = $pagestate{$page}{meta}{$meta}; return $key if defined $key; # fall back to closer-to-core things if ($meta eq 'title') { return pagetitle(IkiWiki::basename($page)); } elsif ($meta eq 'date') { return $IkiWiki::pagectime{$page}; } elsif ($meta eq 'updated') { return $IkiWiki::pagemtime{$page}; } else { return ''; } } sub match { my $field=shift; my $page=shift; # turn glob into a safe regexp my $re=IkiWiki::glob2re(shift); my $val; if (exists $pagestate{$page}{meta}{$field}) { $val=$pagestate{$page}{meta}{$field}; } elsif ($field eq 'title') { $val = pagetitle($page); } if (defined $val) { if ($val=~$re) { return IkiWiki::SuccessReason->new("$re matches $field of $page", $page => $IkiWiki::DEPEND_CONTENT, "" => 1); } else { return IkiWiki::FailReason->new("$re does not match $field of $page", $page => $IkiWiki::DEPEND_CONTENT, "" => 1); } } else { return IkiWiki::FailReason->new("$page does not have a $field", $page => $IkiWiki::DEPEND_CONTENT); } } package IkiWiki::PageSpec; sub match_title ($$;@) { IkiWiki::Plugin::meta::match("title", @_); } sub match_author ($$;@) { IkiWiki::Plugin::meta::match("author", @_); } sub match_authorurl ($$;@) { IkiWiki::Plugin::meta::match("authorurl", @_); } sub match_license ($$;@) { IkiWiki::Plugin::meta::match("license", @_); } sub match_copyright ($$;@) { IkiWiki::Plugin::meta::match("copyright", @_); } sub match_guid ($$;@) { IkiWiki::Plugin::meta::match("guid", @_); } package IkiWiki::SortSpec; sub cmp_meta { my $meta = shift; error(gettext("sort=meta requires a parameter")) unless defined $meta; if ($meta eq 'updated' || $meta eq 'date') { return IkiWiki::Plugin::meta::get_sort_key($a, $meta) <=> IkiWiki::Plugin::meta::get_sort_key($b, $meta); } return IkiWiki::Plugin::meta::get_sort_key($a, $meta) cmp IkiWiki::Plugin::meta::get_sort_key($b, $meta); } # A prototype of how sort=title could behave in 4.0 or something sub cmp_meta_title { $_[0] = 'title'; return cmp_meta(@_); } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/postsparkline.pm0000644000000000000000000000576512211730001020162 0ustar #!/usr/bin/perl package IkiWiki::Plugin::postsparkline; use warnings; use strict; use IkiWiki 3.00; sub import { IkiWiki::loadplugin('sparkline'); hook(type => "getsetup", id => "postsparkline", call => \&getsetup); hook(type => "preprocess", id => "postsparkline", call => \&preprocess); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preprocess (@) { my %params=@_; if (! exists $params{max}) { $params{max}=100; } if (! exists $params{pages}) { return ""; } my $deptype; if (! exists $params{time} || $params{time} ne 'mtime') { $params{timehash} = \%IkiWiki::pagectime; # need to update when pages are added or removed $deptype = deptype("presence"); } else { $params{timehash} = \%IkiWiki::pagemtime; # need to update when pages are changed $deptype = deptype("content"); } if (! exists $params{formula}) { error gettext("missing formula") } my $formula=$params{formula}; $formula=~s/[^a-zA-Z0-9]*//g; $formula=IkiWiki::possibly_foolish_untaint($formula); if (! length $formula || ! IkiWiki::Plugin::postsparkline::formula->can($formula)) { error gettext("unknown formula"); } my @list=sort { $params{timehash}->{$b} <=> $params{timehash}->{$a} } pagespec_match_list($params{page}, $params{pages}, deptype => $deptype, filter => sub { $_[0] eq $params{page} }, ); my @data=eval qq{IkiWiki::Plugin::postsparkline::formula::$formula(\\\%params, \@list)}; if ($@) { error $@; } if (! @data) { # generate an empty graph push @data, 0 foreach 1..($params{max} / 2); } my $color=exists $params{color} ? "($params{color})" : ""; delete $params{pages}; delete $params{formula}; delete $params{ftime}; delete $params{color}; return IkiWiki::Plugin::sparkline::preprocess(%params, map { $_.$color => "" } reverse @data); } sub perfoo ($@) { my $sub=shift; my $params=shift; my $max=$params->{max}; my ($first, $prev, $cur); my $count=0; my @data; foreach (@_) { $cur=$sub->($params->{timehash}->{$_}); if (defined $prev) { if ($prev != $cur) { push @data, "$prev,$count"; $count=0; last if --$max <= 0; for ($cur+1 .. $prev-1) { push @data, "$_,0"; last if --$max == 0; } } } else { $first=$cur; } $count++; $prev=$cur; } return @data; } package IkiWiki::Plugin::postsparkline::formula; sub peryear (@) { return IkiWiki::Plugin::postsparkline::perfoo(sub { return (localtime $_[0])[5]; }, @_); } sub permonth (@) { return IkiWiki::Plugin::postsparkline::perfoo(sub { my ($month, $year)=(localtime $_[0])[4,5]; return $year*12+$month; }, @_); } sub perday (@) { return IkiWiki::Plugin::postsparkline::perfoo(sub { my ($year, $yday)=(localtime $_[0])[5,7]; return $year*365+$yday; }, @_); } sub interval ($@) { my $params=shift; my $max=$params->{max}; my @data; for (my $i=1; $i < @_; $i++) { push @data, $params->{timehash}->{$_[$i-1]} - $params->{timehash}->{$_[$i]}; last if --$max <= 0; } return @data; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/calendar.pm0000644000000000000000000003571612211730001017034 0ustar #! /usr/bin/perl # Copyright (c) 2006, 2007 Manoj Srivastava # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA. require 5.002; package IkiWiki::Plugin::calendar; use warnings; use strict; use IkiWiki 3.00; use Time::Local; my $time=time; my @now=localtime($time); sub import { hook(type => "getsetup", id => "calendar", call => \&getsetup); hook(type => "needsbuild", id => "calendar", call => \&needsbuild); hook(type => "preprocess", id => "calendar", call => \&preprocess); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, archivebase => { type => "string", example => "archives", description => "base of the archives hierarchy", safe => 1, rebuild => 1, }, archive_pagespec => { type => "pagespec", example => "page(posts/*) and !*/Discussion", description => "PageSpec of pages to include in the archives; used by ikiwiki-calendar command", link => 'ikiwiki/PageSpec', safe => 1, rebuild => 0, }, } sub is_leap_year (@) { my %params=@_; return ($params{year} % 4 == 0 && (($params{year} % 100 != 0) || $params{year} % 400 == 0)); } sub month_days { my %params=@_; my $days_in_month = (31,28,31,30,31,30,31,31,30,31,30,31)[$params{month}-1]; if ($params{month} == 2 && is_leap_year(%params)) { $days_in_month++; } return $days_in_month; } sub format_month (@) { my %params=@_; my %linkcache; foreach my $p (pagespec_match_list($params{page}, "creation_year($params{year}) and creation_month($params{month}) and ($params{pages})", # add presence dependencies to update # month calendar when pages are added/removed deptype => deptype("presence"))) { my $mtime = $IkiWiki::pagectime{$p}; my @date = localtime($mtime); my $mday = $date[3]; my $month = $date[4] + 1; my $year = $date[5] + 1900; my $mtag = sprintf("%02d", $month); if (! $linkcache{"$year/$mtag/$mday"}) { $linkcache{"$year/$mtag/$mday"} = []; } push(@{$linkcache{"$year/$mtag/$mday"}}, $p); } my $pmonth = $params{month} - 1; my $nmonth = $params{month} + 1; my $pyear = $params{year}; my $nyear = $params{year}; # Adjust for January and December if ($params{month} == 1) { $pmonth = 12; $pyear--; } if ($params{month} == 12) { $nmonth = 1; $nyear++; } # Add padding. $pmonth=sprintf("%02d", $pmonth); $nmonth=sprintf("%02d", $nmonth); my $calendar="\n"; # When did this month start? my @monthstart = localtime(timelocal(0,0,0,1,$params{month}-1,$params{year}-1900)); my $future_dom = 0; my $today = 0; if ($params{year} == $now[5]+1900 && $params{month} == $now[4]+1) { $future_dom = $now[3]+1; $today = $now[3]; } # Find out month names for this, next, and previous months my $monthabbrev=strftime_utf8("%b", @monthstart); my $monthname=strftime_utf8("%B", @monthstart); my $pmonthname=strftime_utf8("%B", localtime(timelocal(0,0,0,1,$pmonth-1,$pyear-1900))); my $nmonthname=strftime_utf8("%B", localtime(timelocal(0,0,0,1,$nmonth-1,$nyear-1900))); my $archivebase = 'archives'; $archivebase = $config{archivebase} if defined $config{archivebase}; $archivebase = $params{archivebase} if defined $params{archivebase}; # Calculate URL's for monthly archives. my ($url, $purl, $nurl)=("$monthname $params{year}",'',''); if (exists $pagesources{"$archivebase/$params{year}/$params{month}"}) { $url = htmllink($params{page}, $params{destpage}, "$archivebase/$params{year}/".$params{month}, noimageinline => 1, linktext => "$monthabbrev $params{year}", title => $monthname); } add_depends($params{page}, "$archivebase/$params{year}/$params{month}", deptype("presence")); if (exists $pagesources{"$archivebase/$pyear/$pmonth"}) { $purl = htmllink($params{page}, $params{destpage}, "$archivebase/$pyear/$pmonth", noimageinline => 1, linktext => "\←", title => $pmonthname); } add_depends($params{page}, "$archivebase/$pyear/$pmonth", deptype("presence")); if (exists $pagesources{"$archivebase/$nyear/$nmonth"}) { $nurl = htmllink($params{page}, $params{destpage}, "$archivebase/$nyear/$nmonth", noimageinline => 1, linktext => "\→", title => $nmonthname); } add_depends($params{page}, "$archivebase/$nyear/$nmonth", deptype("presence")); # Start producing the month calendar $calendar=< $purl $url $nurl EOF # Suppose we want to start the week with day $week_start_day # If $monthstart[6] == 1 my $week_start_day = $params{week_start_day}; my $start_day = 1 + (7 - $monthstart[6] + $week_start_day) % 7; my %downame; my %dowabbr; for my $dow ($week_start_day..$week_start_day+6) { my @day=localtime(timelocal(0,0,0,$start_day++,$params{month}-1,$params{year}-1900)); my $downame = strftime_utf8("%A", @day); my $dowabbr = substr($downame, 0, 1); $downame{$dow % 7}=$downame; $dowabbr{$dow % 7}=$dowabbr; $calendar.= qq{\t\t$dowabbr\n}; } $calendar.=< EOF my $wday; # we start with a week_start_day, and skip until we get to the first for ($wday=$week_start_day; $wday != $monthstart[6]; $wday++, $wday %= 7) { $calendar.=qq{\t\n} if $wday == $week_start_day; $calendar.=qq{\t\t \n}; } # At this point, either the first is a week_start_day, in which case # nothing has been printed, or else we are in the middle of a row. for (my $day = 1; $day <= month_days(year => $params{year}, month => $params{month}); $day++, $wday++, $wday %= 7) { # At this point, on a week_start_day, we close out a row, # and start a new one -- unless it is week_start_day on the # first, where we do not close a row -- since none was started. if ($wday == $week_start_day) { $calendar.=qq{\t\n} unless $day == 1; $calendar.=qq{\t\n}; } my $tag; my $key="$params{year}/$params{month}/$day"; if (defined $linkcache{$key}) { if ($day == $today) { $tag='month-calendar-day-this-day'; } else { $tag='month-calendar-day-link'; } $calendar.=qq{\t\t}; $calendar.=qq{}; $calendar.=qq{\n}; } else { if ($day == $today) { $tag='month-calendar-day-this-day'; } elsif ($day == $future_dom) { $tag='month-calendar-day-future'; } else { $tag='month-calendar-day-nolink'; } $calendar.=qq{\t\t$day\n}; } } # finish off the week for (; $wday != $week_start_day; $wday++, $wday %= 7) { $calendar.=qq{\t\t \n}; } $calendar.=< EOF return $calendar; } sub format_year (@) { my %params=@_; my @post_months; foreach my $p (pagespec_match_list($params{page}, "creation_year($params{year}) and ($params{pages})", # add presence dependencies to update # year calendar's links to months when # pages are added/removed deptype => deptype("presence"))) { my $mtime = $IkiWiki::pagectime{$p}; my @date = localtime($mtime); my $month = $date[4] + 1; $post_months[$month]++; } my $calendar="\n"; my $pyear = $params{year} - 1; my $nyear = $params{year} + 1; my $thisyear = $now[5]+1900; my $future_month = 0; $future_month = $now[4]+1 if $params{year} == $thisyear; my $archivebase = 'archives'; $archivebase = $config{archivebase} if defined $config{archivebase}; $archivebase = $params{archivebase} if defined $params{archivebase}; # calculate URL's for previous and next years my ($url, $purl, $nurl)=("$params{year}",'',''); if (exists $pagesources{"$archivebase/$params{year}"}) { $url = htmllink($params{page}, $params{destpage}, "$archivebase/$params{year}", noimageinline => 1, linktext => $params{year}, title => $params{year}); } add_depends($params{page}, "$archivebase/$params{year}", deptype("presence")); if (exists $pagesources{"$archivebase/$pyear"}) { $purl = htmllink($params{page}, $params{destpage}, "$archivebase/$pyear", noimageinline => 1, linktext => "\←", title => $pyear); } add_depends($params{page}, "$archivebase/$pyear", deptype("presence")); if (exists $pagesources{"$archivebase/$nyear"}) { $nurl = htmllink($params{page}, $params{destpage}, "$archivebase/$nyear", noimageinline => 1, linktext => "\→", title => $nyear); } add_depends($params{page}, "$archivebase/$nyear", deptype("presence")); # Start producing the year calendar my $m=$params{months_per_row}-2; $calendar=< $purl $url $nurl Months EOF for (my $month = 1; $month <= 12; $month++) { my @day=localtime(timelocal(0,0,0,15,$month-1,$params{year}-1900)); my $murl; my $monthname = strftime_utf8("%B", @day); my $monthabbr = strftime_utf8("%b", @day); $calendar.=qq{\t\n} if ($month % $params{months_per_row} == 1); my $tag; my $mtag=sprintf("%02d", $month); if ($month == $params{month} && $thisyear == $params{year}) { $tag = 'year-calendar-this-month'; } elsif ($pagesources{"$archivebase/$params{year}/$mtag"}) { $tag = 'year-calendar-month-link'; } elsif ($future_month && $month >= $future_month) { $tag = 'year-calendar-month-future'; } else { $tag = 'year-calendar-month-nolink'; } if ($pagesources{"$archivebase/$params{year}/$mtag"} && $post_months[$mtag]) { $murl = htmllink($params{page}, $params{destpage}, "$archivebase/$params{year}/$mtag", noimageinline => 1, linktext => $monthabbr, title => $monthname); $calendar.=qq{\t}; $calendar.=$murl; $calendar.=qq{\t\n}; } else { $calendar.=qq{\t$monthabbr\n}; } add_depends($params{page}, "$archivebase/$params{year}/$mtag", deptype("presence")); $calendar.=qq{\t\n} if ($month % $params{months_per_row} == 0); } $calendar.=< EOF return $calendar; } sub setnextchange ($$) { my $page=shift; my $timestamp=shift; if (! exists $pagestate{$page}{calendar}{nextchange} || $pagestate{$page}{calendar}{nextchange} > $timestamp) { $pagestate{$page}{calendar}{nextchange}=$timestamp; } } sub preprocess (@) { my %params=@_; my $thisyear=1900 + $now[5]; my $thismonth=1 + $now[4]; $params{pages} = "*" unless defined $params{pages}; $params{type} = "month" unless defined $params{type}; $params{week_start_day} = 0 unless defined $params{week_start_day}; $params{months_per_row} = 3 unless defined $params{months_per_row}; $params{year} = $thisyear unless defined $params{year}; $params{month} = $thismonth unless defined $params{month}; my $relativeyear=0; if ($params{year} < 1) { $relativeyear=1; $params{year}=$thisyear+$params{year}; } my $relativemonth=0; if ($params{month} < 1) { $relativemonth=1; my $monthoff=$params{month}; $params{month}=($thismonth+$monthoff) % 12; $params{month}=12 if $params{month}==0; my $yearoff=POSIX::ceil(($thismonth-$params{month}) / -12) - int($monthoff / 12); $params{year}-=$yearoff; } $params{month} = sprintf("%02d", $params{month}); if ($params{type} eq 'month' && $params{year} == $thisyear && $params{month} == $thismonth) { # calendar for current month, updates next midnight setnextchange($params{destpage}, ($time + (60 - $now[0]) # seconds + (59 - $now[1]) * 60 # minutes + (23 - $now[2]) * 60 * 60 # hours )); } elsif ($params{type} eq 'month' && (($params{year} == $thisyear && $params{month} > $thismonth) || $params{year} > $thisyear)) { # calendar for upcoming month, updates 1st of that month setnextchange($params{destpage}, timelocal(0, 0, 0, 1, $params{month}-1, $params{year})); } elsif (($params{type} eq 'year' && $params{year} == $thisyear) || $relativemonth) { # Calendar for current year updates 1st of next month. # Any calendar relative to the current month also updates # then. if ($thismonth < 12) { setnextchange($params{destpage}, timelocal(0, 0, 0, 1, $thismonth+1-1, $params{year})); } else { setnextchange($params{destpage}, timelocal(0, 0, 0, 1, 1-1, $params{year}+1)); } } elsif ($relativeyear) { # Any calendar relative to the current year updates 1st # of next year. setnextchange($params{destpage}, timelocal(0, 0, 0, 1, 1-1, $thisyear+1)); } elsif ($params{type} eq 'year' && $params{year} > $thisyear) { # calendar for upcoming year, updates 1st of that year setnextchange($params{destpage}, timelocal(0, 0, 0, 1, 1-1, $params{year})); } else { # calendar for past month or year, does not need # to update any more delete $pagestate{$params{destpage}}{calendar}; } my $calendar=""; if ($params{type} eq 'month') { $calendar=format_month(%params); } elsif ($params{type} eq 'year') { $calendar=format_year(%params); } return "\n
$calendar
\n"; } #}} sub needsbuild (@) { my $needsbuild=shift; foreach my $page (keys %pagestate) { if (exists $pagestate{$page}{calendar}{nextchange}) { if ($pagestate{$page}{calendar}{nextchange} <= $time) { # force a rebuild so the calendar shows # the current day push @$needsbuild, $pagesources{$page}; } if (exists $pagesources{$page} && grep { $_ eq $pagesources{$page} } @$needsbuild) { # remove state, will be re-added if # the calendar is still there during the # rebuild delete $pagestate{$page}{calendar}; } } } return $needsbuild; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/anonok.pm0000644000000000000000000000157412211730001016543 0ustar #!/usr/bin/perl package IkiWiki::Plugin::anonok; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "anonok", call => \&getsetup); hook(type => "canedit", id => "anonok", call => \&canedit); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "auth", }, anonok_pagespec => { type => "pagespec", example => "*/discussion", description => "PageSpec to limit which pages anonymous users can edit", link => "ikiwiki/PageSpec", safe => 1, rebuild => 0, }, } sub canedit ($$$) { my $page=shift; my $cgi=shift; my $session=shift; my $ret; if (exists $config{anonok_pagespec} && length $config{anonok_pagespec}) { if (pagespec_match($page, $config{anonok_pagespec}, location => $page)) { return ""; } else { return undef; } } else { return ""; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/smiley.pm0000644000000000000000000000460512211730001016556 0ustar #!/usr/bin/perl package IkiWiki::Plugin::smiley; use warnings; use strict; use IkiWiki 3.00; my %smileys; my $smiley_regexp; sub import { add_underlay("smiley"); hook(type => "getsetup", id => "smiley", call => \&getsetup); hook(type => "sanitize", id => "smiley", call => \&sanitize); } sub getsetup () { return plugin => { safe => 1, # force a rebuild because turning it off # removes the smileys, which would break links rebuild => 1, }, } sub build_regexp () { my $srcfile = srcfile("smileys.mdwn", 1); if (! defined $srcfile) { print STDERR sprintf(gettext("smiley plugin will not work without %s"), "smileys.mdwn")."\n"; $smiley_regexp=''; return; } my $list=readfile($srcfile); while ($list =~ m/^\s*\*\s+\\\\([^\s]+)\s+\[\[([^]]+)\]\]/mg) { my $smiley=$1; my $file=$2; $smileys{$smiley}=$file; # Add a version with < and > escaped, since they probably # will be (by markdown) by the time the sanitize hook runs. $smiley=~s//>/g; $smileys{$smiley}=$file; } if (! %smileys) { debug(gettext("failed to parse any smileys")); $smiley_regexp=''; return; } # sort and reverse so that substrings come after longer strings # that contain them, in most cases. $smiley_regexp='('.join('|', map { quotemeta } reverse sort keys %smileys).')'; #debug($smiley_regexp); } sub sanitize (@) { my %params=@_; build_regexp() unless defined $smiley_regexp; $_=$params{content}; return $_ unless length $smiley_regexp; MATCH: while (m{(?:^|(?<=\s|>))(\\?)$smiley_regexp(?:(?=\s|<)|$)}g) { my $escape=$1; my $smiley=$2; my $epos=$-[1]; my $spos=$-[2]; # Smilies are not allowed inside
 or .
		# For each tag in turn, match forward to find the next 
		# or  after the smiley.
		my $pos=pos;
		foreach my $tag ("pre", "code") {
			if (m/<(\/)?\s*$tag\s*>/isg && defined $1) {
				#  found first, so the smiley is
				# inside the tag, so do not expand it.
				next MATCH;
			}
			# Reset pos back to where it was before this test.
			pos=$pos;
		}
	
		if ($escape) {
			# Remove escape.
			substr($_, $epos, 1)="";
			pos=$epos+1;
		}
		else {
			# Replace the smiley with its expanded value.
			my $link=htmllink($params{page}, $params{destpage},
				         $smileys{$smiley}, linktext => $smiley);
			substr($_, $spos, length($smiley))=$link;
			pos=$epos+length($link);
		}
	}

	return $_;
}

1
ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/version.pm0000644000000000000000000000206212211730001016734 0ustar  #!/usr/bin/perl
# Ikiwiki version plugin.
package IkiWiki::Plugin::version;

use warnings;
use strict;
use IkiWiki 3.00;

sub import {
	hook(type => "getsetup", id => "version", call => \&getsetup);
	hook(type => "needsbuild", id => "version", call => \&needsbuild);
	hook(type => "preprocess", id => "version", call => \&preprocess);
}

sub getsetup () {
	return
		plugin => {
			safe => 1,
			rebuild => undef,
			section => "widget",
		},
}

sub needsbuild (@) {
	my $needsbuild=shift;
	foreach my $page (keys %pagestate) {
		if (exists $pagestate{$page}{version}{shown}) {
			if ($pagestate{$page}{version}{shown} ne $IkiWiki::version) {
				push @$needsbuild, $pagesources{$page};
			}
			if (exists $pagesources{$page} &&
			    grep { $_ eq $pagesources{$page} } @$needsbuild) {
				# remove state, will be re-added if
				# the version is still shown during the
				# rebuild
				delete $pagestate{$page}{version}{shown};
			}
		}
	}
	return $needsbuild;
}

sub preprocess (@) {
	my %params=@_;
	$pagestate{$params{destpage}}{version}{shown}=$IkiWiki::version;
}

1
ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/amazon_s3.pm0000644000000000000000000001413112211730001017141 0ustar  #!/usr/bin/perl
package IkiWiki::Plugin::amazon_s3;

use warnings;
no warnings 'redefine';
use strict;
use IkiWiki 3.00;
use IkiWiki::Render;
use Net::Amazon::S3;

# Store references to real subs before overriding them.
our %subs;
BEGIN {
	foreach my $sub (qw{IkiWiki::writefile IkiWiki::prune}) {
		$subs{$sub}=\&$sub;
	}
};

sub import {
	hook(type => "getopt", id => "amazon_s3", call => \&getopt);
	hook(type => "getsetup", id => "amazon_s3", call => \&getsetup);
	hook(type => "checkconfig", id => "amazon_s3", call => \&checkconfig);
}

sub getopt () {
        eval q{use Getopt::Long};
        error($@) if $@;
        Getopt::Long::Configure('pass_through');
        GetOptions("delete-bucket" => sub {
		my $bucket=getbucket();
		debug(gettext("deleting bucket.."));
		my $resp = $bucket->list_all or die $bucket->err . ": " . $bucket->errstr;
		foreach my $key (@{$resp->{keys}}) {
			debug("\t".$key->{key});
			$bucket->delete_key($key->{key}) or die $bucket->err . ": " . $bucket->errstr;
		}
		$bucket->delete_bucket or die $bucket->err . ": " . $bucket->errstr;
		debug(gettext("done"));
		exit(0);
	});
}

sub getsetup () {
	return
		plugin => {
			safe => 0,
			rebuild => 0,
		},
		amazon_s3_key_id => {
			type => "string",
			example => "XXXXXXXXXXXXXXXXXXXX",
			description => "public access key id",
			safe => 1,
			rebuild => 0,
		},
		amazon_s3_key_id => {
			type => "string",
			example => "$ENV{HOME}/.s3_key",
			description => "file holding secret key (must not be readable by others!)",
			safe => 0, # ikiwiki reads this file
			rebuild => 0,
		},
		amazon_s3_bucket => {
			type => "string",
			example => "mywiki",
			description => "globally unique name of bucket to store wiki in",
			safe => 1,
			rebuild => 1,
		},
		amazon_s3_prefix => {
			type => "string",
			example => "wiki/",
			description => "a prefix to prepend to each page name",
			safe => 1,
			rebuild => 1,
		},
		amazon_s3_location => {
			type => "string",
			example => "EU",
			description => "which S3 datacenter to use (leave blank for default)",
			safe => 1,
			rebuild => 1,
		},
		amazon_s3_dupindex => {
			type => "boolean",
			example => 0,
			description => "store each index file twice? (allows urls ending in \"/index.html\" and \"/\")",
			safe => 1,
			rebuild => 1,
		},
}

sub checkconfig {
	foreach my $field (qw{amazon_s3_key_id amazon_s3_key_file
	                      amazon_s3_bucket}) {
		if (! exists $config{$field} || ! defined $config{$field}) {
			error(sprintf(gettext("Must specify %s"), $field));
		}
	}
	if (! exists $config{amazon_s3_prefix} ||
	    ! defined $config{amazon_s3_prefix}) {
	    $config{amazon_s3_prefix}="wiki/";
	}
}

{
my $bucket;
sub getbucket {
	return $bucket if defined $bucket;
	
	open(IN, "<", $config{amazon_s3_key_file}) || error($config{amazon_s3_key_file}.": ".$!);
	my $key=;
	chomp $key;
	close IN;

	my $s3=Net::Amazon::S3->new({
		aws_access_key_id => $config{amazon_s3_key_id},
		aws_secret_access_key => $key,
		retry => 1,
	});

	# make sure the bucket exists
	if (exists $config{amazon_s3_location}) {
		$bucket=$s3->add_bucket({
			bucket => $config{amazon_s3_bucket},
			location_constraint => $config{amazon_s3_location},
		});
	}
	else {
		$bucket=$s3->add_bucket({
			bucket => $config{amazon_s3_bucket},
		});
	}

	if (! $bucket) {
		# Try to use existing bucket.
		$bucket=$s3->bucket($config{amazon_s3_bucket});
	}
	if (! $bucket) {
		error(gettext("Failed to create S3 bucket: ").
			$s3->err.": ".$s3->errstr."\n");
	}

	return $bucket;
}
}

# Given a file, return any S3 keys associated with it.
sub file2keys ($) {
	my $file=shift;

	my @keys;
	if ($file =~ /^\Q$config{destdir}\/\E(.*)/) {
		push @keys, $config{amazon_s3_prefix}.$1;

		# Munge foo/index.html to foo/
		if ($keys[0]=~/(^|.*\/)index.$config{htmlext}$/) {
			# A duplicate might need to be stored under the
			# unmunged name too.
			if (!$config{usedirs} || $config{amazon_s3_dupindex}) {
				push @keys, $1;
			}
			else {
				@keys=($1);
			}
		}
	}
	return @keys;
}

package IkiWiki;
use File::MimeInfo;
use Encode;

# This is a wrapper around the real writefile.
sub writefile ($$$;$$) {
        my $file=shift;
        my $destdir=shift;
        my $content=shift;
        my $binary=shift;
        my $writer=shift;

	# First, write the file to disk.
	my $ret=$IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::writefile'}->($file, $destdir, $content, $binary, $writer);

	my @keys=IkiWiki::Plugin::amazon_s3::file2keys("$destdir/$file");

	# Store the data in S3.
	if (@keys) {
		my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();

		# The http layer tries to downgrade utf-8
		# content, but that can fail (see
		# http://rt.cpan.org/Ticket/Display.html?id=35710),
		# so force convert it to bytes.
		$content=encode_utf8($content) if defined $content;

		my %opts=(
			acl_short => 'public-read',
			content_type => mimetype("$destdir/$file"),
		);

		# If there are multiple keys to write, data is sent
		# multiple times.
		# TODO: investigate using the new copy operation.
		#       (It may not be robust enough.)
		foreach my $key (@keys) {
			my $res;
			if (! $writer) {
				$res=$bucket->add_key($key, $content, \%opts);
			}
			else {
				# This test for empty files is a workaround
				# for this bug:
				# http://rt.cpan.org//Ticket/Display.html?id=35731
				if (-z "$destdir/$file") {
					$res=$bucket->add_key($key, "", \%opts);
				}
				else {
					# read back in the file that the writer emitted
					$res=$bucket->add_key_filename($key, "$destdir/$file", \%opts);
				}
			}
			if (! $res) {
				error(gettext("Failed to save file to S3: ").
					$bucket->err.": ".$bucket->errstr."\n");
			}
		}
	}

	return $ret;
}

# This is a wrapper around the real prune.
sub prune ($;$) {
	my $file=shift;
	my $up_to=shift;

	my @keys=IkiWiki::Plugin::amazon_s3::file2keys($file);

	# Prune files out of S3 too.
	if (@keys) {
		my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();

		foreach my $key (@keys) {
			my $res=$bucket->delete_key($key);
			if (! $res) {
				error(gettext("Failed to delete file from S3: ").
					$bucket->err.": ".$bucket->errstr."\n");
			}
		}
	}

	return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file, $up_to);
}

1
ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/flattr.pm0000644000000000000000000000431312211730001016544 0ustar  #!/usr/bin/perl
package IkiWiki::Plugin::flattr;

use warnings;
use strict;
use IkiWiki 3.00;

sub import {
	hook(type => "getsetup", id => "flattr", call => \&getsetup);
	hook(type => "preprocess", id => "flattr", call => \&preprocess);
	hook(type => "format", id => "flattr", call => \&format);
}

sub getsetup () {
	return
		plugin => {
			safe => 1,
			rebuild => undef,
		},
		flattr_userid => {
			type => "string",
			example => 'joeyh',
			description => "userid or user name to use by default for Flattr buttons",
			advanced => 0,
			safe => 1,
			rebuild => undef,
		},
}

my %flattr_pages;

sub preprocess (@) {
	my %params=@_;

	$flattr_pages{$params{destpage}}=1;

	my $url=$params{url};
	if (! defined $url) {
		$url=urlto($params{page}, "", 1);
	}

	my @fields;
	foreach my $field (qw{language uid button hidden category tags}) {
		if (exists $params{$field}) {
			push @fields, "$field:$params{$field}";
		}
	}
	
	return ''.
		(exists $params{description} ? $params{description} : '').
		'';
}

sub format (@) {
	my %params=@_;

	# Add flattr's javascript to pages with flattr buttons.
	if ($flattr_pages{$params{page}}) {
		if (! ($params{content}=~s!^(]*>)!$1.flattrjs()!em)) {
			# no  tag, probably in preview mode
			$params{content}=flattrjs().$params{content};
		}
	}
	return $params{content};
}

my $js_cached;
sub flattrjs {
	return $js_cached if defined $js_cached;

	my $js_url='https://api.flattr.com/js/0.5.0/load.js?mode=auto';
	if (defined $config{flattr_userid}) {
		my $userid=$config{flattr_userid};
		$userid=~s/[^-A-Za-z0-9_]//g; # sanitize for inclusion in javascript
		$js_url.="&uid=$userid";
	}

	# This is Flattr's standard javascript snippet to include their
	# external javascript file, asynchronously.
	return $js_cached=<<"EOF";

EOF
}

1
ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/pagetemplate.pm0000644000000000000000000000162612211730000017723 0ustar  #!/usr/bin/perl
package IkiWiki::Plugin::pagetemplate;

use warnings;
use strict;
use IkiWiki 3.00;

my %templates;

sub import {
	hook(type => "getsetup", id => "pagetemplate", call => \&getsetup);
	hook(type => "preprocess", id => "pagetemplate", call => \&preprocess);
	hook(type => "templatefile", id => "pagetemplate", call => \&templatefile);
}

sub getsetup () {
	return 
		plugin => {
			safe => 1,
			rebuild => undef,
		},
}

sub preprocess (@) {
	my %params=@_;

	if (! exists $params{template} ||
	    $params{template} !~ /^[-A-Za-z0-9._+]+$/ ||
	    ! defined IkiWiki::template_file($params{template})) {
		 error gettext("bad or missing template")
	}

	if ($params{page} eq $params{destpage}) {
		$templates{$params{page}}=$params{template};
	}

	return "";
}

sub templatefile (@) {
	my %params=@_;

	if (exists $templates{$params{page}}) {
		return $templates{$params{page}};
	}
	
	return undef;
}

1
ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/editdiff.pm0000644000000000000000000000276012211730001017032 0ustar  #!/usr/bin/perl
# This plugin adds a "Diff" button to the page edit form.
package IkiWiki::Plugin::editdiff;

use warnings;
use strict;
use IkiWiki 3.00;
use HTML::Entities;
use IPC::Open2;

sub import {
	hook(type => "getsetup", id => "editdiff", call => \&getsetup);
	hook(type => "formbuilder_setup", id => "editdiff",
		call => \&formbuilder_setup);
}

sub getsetup () {
	return
		plugin => {
			safe => 1,
			rebuild => 0,
			section => "web",
		},
}

sub diff ($$) {
	my $orig=shift;
	my $content=shift;

	my $sigpipe=0;
	$SIG{PIPE} = sub { $sigpipe=1; };

	my $pid = open2(*DIFFOUT, *DIFFIN, 'diff', '-u', $orig, '-');
	binmode($_, ':utf8') foreach (*DIFFIN, *DIFFOUT);

	print DIFFIN $content;
	close DIFFIN;
	my $ret='';
	while () {
		if (defined $ret) {
			$ret.=$_;
		}
		elsif (/^\@\@/) {
			$ret=$_;
		}
	}
	close DIFFOUT;
	waitpid $pid, 0;

	$SIG{PIPE}="default";
	return "couldn't run diff\n" if $sigpipe;

	return "
".encode_entities($ret)."
"; } sub formbuilder_setup { my %params=@_; my $form=$params{form}; return if ! defined $form->field("do") || $form->field("do") ne "edit"; my $page=$form->field("page"); $page = IkiWiki::possibly_foolish_untaint($page); return unless exists $pagesources{$page}; push @{$params{buttons}}, "Diff"; if ($form->submitted eq "Diff") { my $content=$form->field('editcontent'); $content=~s/\r\n/\n/g; $content=~s/\r/\n/g; my $diff = diff(srcfile($pagesources{$page}), $content); $form->tmpl_param("page_diff", $diff); } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/editpage.pm0000644000000000000000000003250212211730000017032 0ustar #!/usr/bin/perl package IkiWiki::Plugin::editpage; use warnings; use strict; use IkiWiki; use open qw{:utf8 :std}; sub import { hook(type => "getsetup", id => "editpage", call => \&getsetup); hook(type => "refresh", id => "editpage", call => \&refresh); hook(type => "sessioncgi", id => "editpage", call => \&IkiWiki::cgi_editpage); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, section => "core", }, } sub refresh () { if (exists $wikistate{editpage} && exists $wikistate{editpage}{previews}) { # Expire old preview files after one hour. my $expire=time - (60 * 60); my @previews; foreach my $file (@{$wikistate{editpage}{previews}}) { my $mtime=(stat("$config{destdir}/$file"))[9]; if (defined $mtime && $mtime <= $expire) { # Avoid deleting a preview that was later saved. my $delete=1; foreach my $page (keys %renderedfiles) { if (grep { $_ eq $file } @{$renderedfiles{$page}}) { $delete=0; } } if ($delete) { debug(sprintf(gettext("removing old preview %s"), $file)); IkiWiki::prune("$config{destdir}/$file", $config{destdir}); } } elsif (defined $mtime) { push @previews, $file; } } $wikistate{editpage}{previews}=\@previews; } } # Back to ikiwiki namespace for the rest, this code is very much # internal to ikiwiki even though it's separated into a plugin, # and other plugins use the function below. package IkiWiki; sub cgi_editpage ($$) { my $q=shift; my $session=shift; my $do=$q->param('do'); return unless $do eq 'create' || $do eq 'edit'; decode_cgi_utf8($q); my @fields=qw(do rcsinfo subpage from page type editcontent editmessage subscribe); my @buttons=("Save Page", "Preview", "Cancel"); eval q{use CGI::FormBuilder}; error($@) if $@; my $form = CGI::FormBuilder->new( fields => \@fields, charset => "utf-8", method => 'POST', required => [qw{editcontent}], javascript => 0, params => $q, action => IkiWiki::cgiurl(), header => 0, table => 0, template => { template("editpage.tmpl") }, ); decode_form_utf8($form); run_hooks(formbuilder_setup => sub { shift->(form => $form, cgi => $q, session => $session, buttons => \@buttons); }); decode_form_utf8($form); # This untaint is safe because we check file_pruned and # wiki_file_regexp. my ($page)=$form->field('page')=~/$config{wiki_file_regexp}/; if (! defined $page) { error(gettext("bad page name")); } $page=possibly_foolish_untaint($page); my $absolute=($page =~ s#^/+##); # absolute name used to force location if (! defined $page || ! length $page || file_pruned($page)) { error(gettext("bad page name")); } my $baseurl = urlto($page); my $from; if (defined $form->field('from')) { ($from)=$form->field('from')=~/$config{wiki_file_regexp}/; } my $file; my $type; if (exists $pagesources{$page} && $form->field("do") ne "create") { $file=$pagesources{$page}; $type=pagetype($file); if (! defined $type || $type=~/^_/) { error(sprintf(gettext("%s is not an editable page"), $page)); } if (! $form->submitted) { $form->field(name => "rcsinfo", value => rcs_prepedit($file), force => 1); } $form->field(name => "editcontent", validate => '/.*/'); } else { $type=$form->param('type'); if (defined $type && length $type && $hooks{htmlize}{$type}) { $type=possibly_foolish_untaint($type); } elsif (defined $from && exists $pagesources{$from}) { # favor the type of linking page $type=pagetype($pagesources{$from}); } $type=$config{default_pageext} if ! defined $type || $type=~/^_/; # not internal type $file=newpagefile($page, $type); if (! $form->submitted) { $form->field(name => "rcsinfo", value => "", force => 1); } $form->field(name => "editcontent", validate => '/.+/'); } $form->field(name => "do", type => 'hidden'); $form->field(name => "sid", type => "hidden", value => $session->id, force => 1); $form->field(name => "from", type => 'hidden'); $form->field(name => "rcsinfo", type => 'hidden'); $form->field(name => "subpage", type => 'hidden'); $form->field(name => "page", value => $page, force => 1); $form->field(name => "type", value => $type, force => 1); $form->field(name => "editmessage", type => "text", size => 80); $form->field(name => "editcontent", type => "textarea", rows => 20, cols => 80); $form->tmpl_param("can_commit", $config{rcs}); $form->tmpl_param("helponformattinglink", htmllink($page, $page, "ikiwiki/formatting", noimageinline => 1, linktext => "FormattingHelp")); my $cansubscribe=IkiWiki::Plugin::notifyemail->can("subscribe") && IkiWiki::Plugin::comments->can("import") && defined $session->param('name'); if ($cansubscribe) { $form->field(name => "subscribe", type => "checkbox", options => [gettext("email comments to me")]); } else { $form->field(name => "subscribe", type => 'hidden'); } my $previewing=0; if ($form->submitted eq "Cancel") { if ($form->field("do") eq "create" && defined $from) { redirect($q, urlto($from)); } elsif ($form->field("do") eq "create") { redirect($q, baseurl(undef)); } else { redirect($q, $baseurl); } exit; } elsif ($form->submitted eq "Preview") { $previewing=1; my $new=not exists $pagesources{$page}; # temporarily record its type $pagesources{$page}=$page.".".$type if $new; my %wasrendered=map { $_ => 1 } @{$renderedfiles{$page}}; my $content=$form->field('editcontent'); run_hooks(editcontent => sub { $content=shift->( content => $content, page => $page, cgi => $q, session => $session, ); }); my $preview=htmlize($page, $page, $type, linkify($page, $page, preprocess($page, $page, filter($page, $page, $content), 0, 1))); run_hooks(format => sub { $preview=shift->( page => $page, content => $preview, ); }); $form->tmpl_param("page_preview", $preview); # Previewing may have created files on disk. # Keep a list of these to be deleted later. my %previews = map { $_ => 1 } @{$wikistate{editpage}{previews}}; foreach my $f (@{$renderedfiles{$page}}) { $previews{$f}=1 unless $wasrendered{$f}; } # Throw out any other state changes made during previewing, # and save the previews list. loadindex(); @{$wikistate{editpage}{previews}} = keys %previews; saveindex(); } elsif ($form->submitted eq "Save Page") { $form->tmpl_param("page_preview", ""); } if ($form->submitted ne "Save Page" || ! $form->validate) { if ($form->field("do") eq "create") { my @page_locs; my $best_loc; if (! defined $from || ! length $from || $from ne $form->field('from') || file_pruned($from) || $absolute || $form->submitted) { @page_locs=$best_loc=$page; unshift @page_locs, lc($page) if ! $form->submitted && lc($page) ne $page; } elsif (lc $page eq lc $config{discussionpage}) { @page_locs=$best_loc="$from/".lc($page); } else { my $dir=$from."/"; $dir=~s![^/]+/+$!!; if ((defined $form->field('subpage') && length $form->field('subpage'))) { $best_loc="$from/$page"; } else { $best_loc=$dir.$page; } my $mixedcase=lc($page) ne $page; push @page_locs, $dir.lc($page) if $mixedcase; push @page_locs, $dir.$page; push @page_locs, $from."/".lc($page) if $mixedcase; push @page_locs, $from."/".$page; while (length $dir) { $dir=~s![^/]+/+$!!; push @page_locs, $dir.lc($page) if $mixedcase; push @page_locs, $dir.$page; } my $userpage=IkiWiki::userpage($page); push @page_locs, $userpage if ! grep { $_ eq $userpage } @page_locs; } @page_locs = grep { ! exists $pagecase{lc $_} } @page_locs; if (! @page_locs) { # hmm, someone else made the page in the # meantime? if ($previewing) { # let them go ahead with the edit # and resolve the conflict at save # time @page_locs=$page; } else { redirect($q, $baseurl); exit; } } my @editable_locs = grep { check_canedit($_, $q, $session, 1) } @page_locs; if (! @editable_locs) { # now let it throw an error, or prompt for # login map { check_canedit($_, $q, $session) } ($best_loc, @page_locs); } my @page_types; if (exists $hooks{htmlize}) { foreach my $key (grep { !/^_/ } keys %{$hooks{htmlize}}) { push @page_types, [$key, $hooks{htmlize}{$key}{longname} || $key]; } } @page_types=sort @page_types; $form->tmpl_param("page_select", 1); $form->field(name => "page", type => 'select', options => [ map { [ $_, pagetitle($_, 1) ] } @editable_locs ], value => $best_loc); $form->field(name => "type", type => 'select', options => \@page_types); $form->title(sprintf(gettext("creating %s"), pagetitle(basename($page)))); } elsif ($form->field("do") eq "edit") { check_canedit($page, $q, $session); if (! defined $form->field('editcontent') || ! length $form->field('editcontent')) { my $content=""; if (exists $pagesources{$page}) { $content=readfile(srcfile($pagesources{$page})); $content=~s/\n/\r\n/g; } $form->field(name => "editcontent", value => $content, force => 1); } $form->tmpl_param("page_select", 0); $form->field(name => "page", type => 'hidden'); $form->field(name => "type", type => 'hidden'); $form->title(sprintf(gettext("editing %s"), pagetitle(basename($page)))); } showform($form, \@buttons, $session, $q, page => $page); } else { # save page check_canedit($page, $q, $session); checksessionexpiry($q, $session, $q->param('sid')); my $exists=-e "$config{srcdir}/$file"; if ($form->field("do") ne "create" && ! $exists && ! defined srcfile($file, 1)) { $form->tmpl_param("message", template("editpagegone.tmpl")->output); $form->field(name => "do", value => "create", force => 1); $form->tmpl_param("page_select", 0); $form->field(name => "page", type => 'hidden'); $form->field(name => "type", type => 'hidden'); $form->title(sprintf(gettext("editing %s"), $page)); showform($form, \@buttons, $session, $q, page => $page); exit; } elsif ($form->field("do") eq "create" && $exists) { $form->tmpl_param("message", template("editcreationconflict.tmpl")->output); $form->field(name => "do", value => "edit", force => 1); $form->tmpl_param("page_select", 0); $form->field(name => "page", type => 'hidden'); $form->field(name => "type", type => 'hidden'); $form->title(sprintf(gettext("editing %s"), $page)); $form->field("editcontent", value => readfile("$config{srcdir}/$file"). "\n\n\n".$form->field("editcontent"), force => 1); showform($form, \@buttons, $session, $q, page => $page); exit; } my $message=""; if (defined $form->field('editmessage') && length $form->field('editmessage')) { $message=$form->field('editmessage'); } my $content=$form->field('editcontent'); check_content(content => $content, page => $page, cgi => $q, session => $session, subject => $message); run_hooks(editcontent => sub { $content=shift->( content => $content, page => $page, cgi => $q, session => $session, ); }); $content=~s/\r\n/\n/g; $content=~s/\r/\n/g; $content.="\n" if $content !~ /\n$/; $config{cgi}=0; # avoid cgi error message eval { writefile($file, $config{srcdir}, $content) }; $config{cgi}=1; if ($@) { $form->field(name => "rcsinfo", value => rcs_prepedit($file), force => 1); my $mtemplate=template("editfailedsave.tmpl"); $mtemplate->param(error_message => $@); $form->tmpl_param("message", $mtemplate->output); $form->field("editcontent", value => $content, force => 1); $form->tmpl_param("page_select", 0); $form->field(name => "page", type => 'hidden'); $form->field(name => "type", type => 'hidden'); $form->title(sprintf(gettext("editing %s"), $page)); showform($form, \@buttons, $session, $q, page => $page); exit; } my $conflict; if ($config{rcs}) { if (! $exists) { rcs_add($file); } # Prevent deadlock with post-commit hook by # signaling to it that it should not try to # do anything. disable_commit_hook(); $conflict=rcs_commit( file => $file, message => $message, token => $form->field("rcsinfo"), session => $session, ); enable_commit_hook(); rcs_update(); } # Refresh even if there was a conflict, since other changes # may have been committed while the post-commit hook was # disabled. require IkiWiki::Render; refresh(); saveindex(); if (defined $conflict) { $form->field(name => "rcsinfo", value => rcs_prepedit($file), force => 1); $form->tmpl_param("message", template("editconflict.tmpl")->output); $form->field("editcontent", value => $conflict, force => 1); $form->field("do", "edit", force => 1); $form->tmpl_param("page_select", 0); $form->field(name => "page", type => 'hidden'); $form->field(name => "type", type => 'hidden'); $form->title(sprintf(gettext("editing %s"), $page)); showform($form, \@buttons, $session, $q, page => $page); } else { # The trailing question mark tries to avoid broken # caches and get the most recent version of the page. redirect($q, $baseurl."?updated"); } if ($cansubscribe && length $form->field("subscribe")) { my $subspec="comment($page)"; IkiWiki::Plugin::notifyemail::subscribe( $session->param('name'), $subspec); } } exit; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/hnb.pm0000644000000000000000000000236212211730001016021 0ustar #!/usr/bin/perl # hnb markup # Licensed under the GPL v2 or greater # Copyright (C) 2008 by Axel Beckert # # TODO: Make a switch to allow both HTML export routines of hnb # (`export_html` and `export_htmlcss`) to be used. package IkiWiki::Plugin::hnb; use warnings; use strict; use IkiWiki 3.00; use File::Temp qw(:mktemp); sub import { hook(type => "getsetup", id => "hnb", call => \&getsetup); hook(type => "htmlize", id => "hnb", call => \&htmlize); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, # format plugin section => "format", }, } sub htmlize (@) { my %params = @_; # hnb outputs version number etc. every time to STDOUT, so # using files makes it easier to seprarate. my ($infh, $tmpin) = mkstemp( "/tmp/ikiwiki-hnbin.XXXXXXXXXX" ); my ($outfh, $tmpout) = mkstemp( "/tmp/ikiwiki-hnbout.XXXXXXXXXX" ); open(TMP, '>', $tmpin) or die "Can't write to $tmpin: $!"; print TMP $params{content}; close TMP; system("hnb '$tmpin' 'go root' 'export_html $tmpout' > /dev/null"); unlink $tmpin; open(TMP, '<', $tmpout) or die "Can't read from $tmpout: $!"; local $/; my $ret = ; close TMP; unlink $tmpout; $ret =~ s/.*//si; $ret =~ s/.*//si; return $ret; } 1; ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/headinganchors.pm0000644000000000000000000000156312211730001020231 0ustar #!/usr/bin/perl # quick HTML heading id adder by Paul Wise package IkiWiki::Plugin::headinganchors; use warnings; use strict; use IkiWiki 3.00; use URI::Escape; sub import { hook(type => "getsetup", id => "headinganchors", call => \&getsetup); hook(type => "sanitize", id => "headinganchors", call => \&headinganchors); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub text_to_anchor { my $str = shift; $str =~ s/^\s+//; $str =~ s/\s+$//; $str =~ s/\s/_/g; $str =~ s/"//g; $str =~ s/^[^a-zA-Z]/z-/; # must start with an alphabetical character $str = uri_escape_utf8($str); $str =~ s/%/./g; return $str; } sub headinganchors (@) { my %params=@_; my $content=$params{content}; $content=~s{([^>]*)}{''.$2.''}gie; return $content; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/getsource.pm0000644000000000000000000000377112211730001017257 0ustar #!/usr/bin/perl package IkiWiki::Plugin::getsource; use warnings; use strict; use IkiWiki; use open qw{:utf8 :std}; sub import { hook(type => "getsetup", id => "getsource", call => \&getsetup); hook(type => "pagetemplate", id => "getsource", call => \&pagetemplate); hook(type => "cgi", id => "getsource", call => \&cgi_getsource); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, section => "web", }, getsource_mimetype => { type => "string", example => "text/plain; charset=utf-8", description => "Mime type for returned source.", safe => 1, rebuild => 0, }, } sub pagetemplate (@) { my %params=@_; my $page=$params{page}; my $template=$params{template}; if (length $config{cgiurl}) { $template->param(getsourceurl => IkiWiki::cgiurl(do => "getsource", page => $page)); $template->param(have_actions => 1); } } sub cgi_getsource ($) { my $cgi=shift; return unless defined $cgi->param('do') && $cgi->param("do") eq "getsource"; IkiWiki::decode_cgi_utf8($cgi); my $page=$cgi->param('page'); if (! defined $page || $page !~ /$config{wiki_file_regexp}/) { error("invalid page parameter"); } # For %pagesources. IkiWiki::loadindex(); if (! exists $pagesources{$page}) { IkiWiki::cgi_custom_failure( $cgi, "404 Not Found", IkiWiki::cgitemplate($cgi, gettext("missing page"), "

". sprintf(gettext("The page %s does not exist."), htmllink("", "", $page)). "

")); exit; } if (! defined pagetype($pagesources{$page})) { IkiWiki::cgi_custom_failure( $cgi->header(-status => "403 Forbidden"), IkiWiki::cgitemplate($cgi, gettext("not a page"), "

". sprintf(gettext("%s is an attachment, not a page."), htmllink("", "", $page)). "

")); exit; } if (! $config{getsource_mimetype}) { $config{getsource_mimetype} = "text/plain; charset=utf-8"; } print "Content-Type: $config{getsource_mimetype}\r\n"; print ("\r\n"); print readfile(srcfile($pagesources{$page})); exit 0; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/fortune.pm0000644000000000000000000000110612211730000016726 0ustar #!/usr/bin/perl # Include a fortune in a page package IkiWiki::Plugin::fortune; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "fortune", call => \&getsetup); hook(type => "preprocess", id => "fortune", call => \&preprocess); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preprocess (@) { $ENV{PATH}="$ENV{PATH}:/usr/games:/usr/local/games"; my $f = `fortune 2>/dev/null`; if ($?) { error gettext("fortune failed"); } else { return "
$f
\n"; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/img.pm0000644000000000000000000001204312211730001016023 0ustar #!/usr/bin/perl # Ikiwiki enhanced image handling plugin # Christian Mock cm@tahina.priv.at 20061002 package IkiWiki::Plugin::img; use warnings; use strict; use IkiWiki 3.00; my %imgdefaults; sub import { hook(type => "getsetup", id => "img", call => \&getsetup); hook(type => "preprocess", id => "img", call => \&preprocess, scan => 1); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preprocess (@) { my ($image) = $_[0] =~ /$config{wiki_file_regexp}/; # untaint my %params=@_; if (! defined $image) { error("bad image filename"); } if (exists $imgdefaults{$params{page}}) { foreach my $key (keys %{$imgdefaults{$params{page}}}) { if (! exists $params{$key}) { $params{$key}=$imgdefaults{$params{page}}->{$key}; } } } if (! exists $params{size} || ! length $params{size}) { $params{size}='full'; } if ($image eq 'defaults') { $imgdefaults{$params{page}} = \%params; return ''; } add_link($params{page}, $image); add_depends($params{page}, $image); # optimisation: detect scan mode, and avoid generating the image if (! defined wantarray) { return; } my $file = bestlink($params{page}, $image); my $srcfile = srcfile($file, 1); if (! length $file || ! defined $srcfile) { return htmllink($params{page}, $params{destpage}, $image); } my $dir = $params{page}; my $base = IkiWiki::basename($file); my $issvg = $base=~s/\.svg$/.png/i; eval q{use Image::Magick}; error gettext("Image::Magick is not installed") if $@; my $im = Image::Magick->new($issvg ? (magick => "png") : ()); my $imglink; my $r = $im->Read($srcfile); error sprintf(gettext("failed to read %s: %s"), $file, $r) if $r; my ($dwidth, $dheight); if ($params{size} ne 'full') { my ($w, $h) = ($params{size} =~ /^(\d*)x(\d*)$/); error sprintf(gettext('wrong size format "%s" (should be WxH)'), $params{size}) unless (defined $w && defined $h && (length $w || length $h)); if ((length $w && $w > $im->Get("width")) || (length $h && $h > $im->Get("height"))) { # resizing larger $imglink = $file; # don't generate larger image, just set display size if (length $w && length $h) { ($dwidth, $dheight)=($w, $h); } # avoid division by zero on 0x0 image elsif ($im->Get("width") == 0 || $im->Get("height") == 0) { ($dwidth, $dheight)=(0, 0); } # calculate unspecified size from the other one, preserving # aspect ratio elsif (length $w) { $dwidth=$w; $dheight=$w / $im->Get("width") * $im->Get("height"); } elsif (length $h) { $dheight=$h; $dwidth=$h / $im->Get("height") * $im->Get("width"); } } else { # resizing smaller my $outfile = "$config{destdir}/$dir/${w}x${h}-$base"; $imglink = "$dir/${w}x${h}-$base"; will_render($params{page}, $imglink); if (-e $outfile && (-M $srcfile >= -M $outfile)) { $im = Image::Magick->new; $r = $im->Read($outfile); error sprintf(gettext("failed to read %s: %s"), $outfile, $r) if $r; } else { $r = $im->Resize(geometry => "${w}x${h}"); error sprintf(gettext("failed to resize: %s"), $r) if $r; # don't actually write resized file in preview mode; # rely on width and height settings if (! $params{preview}) { my @blob = $im->ImageToBlob(); writefile($imglink, $config{destdir}, $blob[0], 1); } else { $imglink = $file; } } # always get the true size of the resized image $dwidth = $im->Get("width"); $dheight = $im->Get("height"); } } else { $imglink = $file; $dwidth = $im->Get("width"); $dheight = $im->Get("height"); } if (! defined($dwidth) || ! defined($dheight)) { error sprintf(gettext("failed to determine size of image %s"), $file) } my ($fileurl, $imgurl); if (! $params{preview}) { $fileurl=urlto($file, $params{destpage}); $imgurl=urlto($imglink, $params{destpage}); } else { $fileurl=urlto($file); $imgurl=urlto($imglink); } if (! exists $params{class}) { $params{class}="img"; } my $attrs=''; foreach my $attr (qw{alt title class id hspace vspace}) { if (exists $params{$attr}) { $attrs.=" $attr=\"$params{$attr}\""; } } my $imgtag=''; my $link; if (! defined $params{link}) { $link=$fileurl; } elsif ($params{link} =~ /^\w+:\/\//) { $link=$params{link}; } if (defined $link) { $imgtag=''.$imgtag.''; } else { my $b = bestlink($params{page}, $params{link}); if (length $b) { add_depends($params{page}, $b, deptype("presence")); $imgtag=htmllink($params{page}, $params{destpage}, $params{link}, linktext => $imgtag, noimageinline => 1, ); } } if (exists $params{caption}) { return ''. ''. ''. '
'.$params{caption}.'
'.$imgtag.'
'; } else { return $imgtag; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/blogspam.pm0000644000000000000000000000721312211730001017056 0ustar #!/usr/bin/perl package IkiWiki::Plugin::blogspam; use warnings; use strict; use IkiWiki 3.00; use Encode; my $defaulturl='http://test.blogspam.net:8888/'; sub import { hook(type => "getsetup", id => "blogspam", call => \&getsetup); hook(type => "checkconfig", id => "blogspam", call => \&checkconfig); hook(type => "checkcontent", id => "blogspam", call => \&checkcontent); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "auth", }, blogspam_pagespec => { type => 'pagespec', example => 'postcomment(*)', description => 'PageSpec of pages to check for spam', link => 'ikiwiki/PageSpec', safe => 1, rebuild => 0, }, blogspam_options => { type => "string", example => "blacklist=1.2.3.4,blacklist=8.7.6.5,max-links=10", description => "options to send to blogspam server", link => "http://blogspam.net/api/testComment.html#options", safe => 1, rebuild => 0, }, blogspam_server => { type => "string", default => $defaulturl, description => "blogspam server XML-RPC url", safe => 1, rebuild => 0, }, } sub checkconfig () { # This is done at checkconfig time because printing an error # if the module is missing when a spam is posted would not # let the admin know about the problem. eval q{ use RPC::XML; use RPC::XML::Client; $RPC::XML::ENCODING = 'utf-8'; }; error $@ if $@; } sub checkcontent (@) { my %params=@_; my $session=$params{session}; my $spec='!admin()'; if (exists $config{blogspam_pagespec} && length $config{blogspam_pagespec}) { $spec.=" and (".$config{blogspam_pagespec}.")"; } my $user=$session->param("name"); return undef unless pagespec_match($params{page}, $spec, (defined $user ? (user => $user) : ()), (defined $session->remote_addr() ? (ip => $session->remote_addr()) : ()), location => $params{page}); my $url=$defaulturl; $url = $config{blogspam_server} if exists $config{blogspam_server}; my $client = RPC::XML::Client->new($url); my @options = split(",", $config{blogspam_options}) if exists $config{blogspam_options}; # Allow short comments and whitespace-only edits, unless the user # has overridden min-words themselves. push @options, "min-words=0" unless grep /^min-words=/i, @options; # Wiki pages can have a lot of urls, unless the user specifically # wants to limit them. push @options, "exclude=lotsaurls" unless grep /^max-links/i, @options; # Unless the user specified a size check, disable such checking. push @options, "exclude=size" unless grep /^(?:max|min)-size/i, @options; # This test has absurd false positives on words like "alpha" # and "buy". push @options, "exclude=stopwords"; my %req=( ip => $session->remote_addr(), comment => encode_utf8(defined $params{diff} ? $params{diff} : $params{content}), subject => encode_utf8(defined $params{subject} ? $params{subject} : ""), name => encode_utf8(defined $params{author} ? $params{author} : ""), link => encode_utf8(exists $params{url} ? $params{url} : ""), options => join(",", @options), site => encode_utf8($config{url}), version => "ikiwiki ".$IkiWiki::version, ); my $res = $client->send_request('testComment', \%req); if (! ref $res || ! defined $res->value) { debug("failed to get response from blogspam server ($url)"); return undef; } elsif ($res->value =~ /^SPAM:(.*)/) { eval q{use Data::Dumper}; debug("blogspam server reports ".$res->value.": ".Dumper(\%req)); return gettext("Sorry, but that looks like spam to blogspam: ").$1; } elsif ($res->value ne 'OK') { debug("blogspam server failure: ".$res->value); return undef; } else { return undef; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/sidebar.pm0000644000000000000000000000465412211730001016671 0ustar #!/usr/bin/perl # Sidebar plugin. # by Tuomo Valkonen package IkiWiki::Plugin::sidebar; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "sidebar", call => \&getsetup); hook(type => "preprocess", id => "sidebar", call => \&preprocess); hook(type => "pagetemplate", id => "sidebar", call => \&pagetemplate); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, }, global_sidebars => { type => "boolean", example => 1, description => "show sidebar page on all pages?", safe => 1, rebuild => 1, }, } my %pagesidebar; sub preprocess (@) { my %params=@_; my $page=$params{page}; return "" unless $page eq $params{destpage}; if (! defined $params{content}) { $pagesidebar{$page}=undef; } else { my $file = $pagesources{$page}; my $type = pagetype($file); $pagesidebar{$page}= IkiWiki::htmlize($page, $page, $type, IkiWiki::linkify($page, $page, IkiWiki::preprocess($page, $page, $params{content}))); } return ""; } my $oldfile; my $oldcontent; sub sidebar_content ($) { my $page=shift; return delete $pagesidebar{$page} if defined $pagesidebar{$page}; return if ! exists $pagesidebar{$page} && defined $config{global_sidebars} && ! $config{global_sidebars}; my $sidebar_page=bestlink($page, "sidebar") || return; my $sidebar_file=$pagesources{$sidebar_page} || return; my $sidebar_type=pagetype($sidebar_file); if (defined $sidebar_type) { # FIXME: This isn't quite right; it won't take into account # adding a new sidebar page. So adding such a page # currently requires a wiki rebuild. add_depends($page, $sidebar_page); my $content; if (defined $oldfile && $sidebar_file eq $oldfile) { $content=$oldcontent; } else { $content=readfile(srcfile($sidebar_file)); $oldcontent=$content; $oldfile=$sidebar_file; } return unless length $content; return IkiWiki::htmlize($sidebar_page, $page, $sidebar_type, IkiWiki::linkify($sidebar_page, $page, IkiWiki::preprocess($sidebar_page, $page, IkiWiki::filter($sidebar_page, $page, $content)))); } } sub pagetemplate (@) { my %params=@_; my $template=$params{template}; if ($params{destpage} eq $params{page} && $template->query(name => "sidebar")) { my $content=sidebar_content($params{destpage}); if (defined $content && length $content) { $template->param(sidebar => $content); } } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/opendiscussion.pm0000644000000000000000000000113612211730001020315 0ustar #!/usr/bin/perl package IkiWiki::Plugin::opendiscussion; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "opendiscussion", call => \&getsetup); hook(type => "canedit", id => "opendiscussion", call => \&canedit, first => 1); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "auth", }, } sub canedit ($$) { my $page=shift; my $cgi=shift; my $session=shift; return "" if $config{discussion} && $page=~/(\/|^)\Q$config{discussionpage}\E$/i; return "" if pagespec_match($page, "postcomment(*)"); return undef; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/polygen.pm0000644000000000000000000000271312211730000016726 0ustar #!/usr/bin/perl # # Include polygen output in a page # # by Enrico Zini package IkiWiki::Plugin::polygen; use warnings; use strict; use IkiWiki 3.00; use File::Find; sub import { hook(type => "getsetup", id => "polygen", call => \&getsetup); hook(type => "preprocess", id => "polygen", call => \&preprocess); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preprocess (@) { my %params=@_; my $grammar = ($params{grammar} or 'polygen'); my $symbol = ($params{symbol} or undef); # Sanitize parameters $grammar =~ IkiWiki::basename($grammar); $grammar =~ s/[^A-Za-z0-9]//g; $grammar =~ s/\.grm$//; $grammar .= '.grm'; $symbol =~ s/[^A-Za-z0-9]//g if defined $symbol; $symbol = IkiWiki::possibly_foolish_untaint($symbol) if defined $symbol; my $grmfile = '/usr/share/polygen/ita/polygen.grm'; if (! -d '/usr/share/polygen') { error gettext("polygen not installed"); } find({wanted => sub { if (substr($File::Find::name, -length($grammar)) eq $grammar) { $grmfile = IkiWiki::possibly_foolish_untaint($File::Find::name); } }, no_chdir => 1, }, '/usr/share/polygen'); my $res; if (defined $symbol) { $res = `polygen -S $symbol $grmfile 2>/dev/null`; } else { $res = `polygen $grmfile 2>/dev/null`; } if ($?) { error gettext("command failed"); } # Strip trailing spaces and newlines so that we flow well with the # markdown text $res =~ s/\s*$//; return $res; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/userlist.pm0000644000000000000000000000331012211730001017116 0ustar #!/usr/bin/perl package IkiWiki::Plugin::userlist; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "userlist", call => \&getsetup); hook(type => "sessioncgi", id => "userlist", call => \&sessioncgi); hook(type => "formbuilder_setup", id => "userlist", call => \&formbuilder_setup); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "web", }, } sub sessioncgi ($$) { my $cgi=shift; my $session=shift; if ($cgi->param("do") eq "userlist") { showuserlist($cgi, $session); exit; } } sub formbuilder_setup (@) { my %params=@_; my $form=$params{form}; if ($form->title eq "preferences" && IkiWiki::is_admin($params{session}->param("name"))) { push @{$params{buttons}}, "Users"; if ($form->submitted && $form->submitted eq "Users") { showuserlist($params{cgi}, $params{session}); exit; } } } sub showuserlist ($$) { my $q=shift; my $session=shift; IkiWiki::needsignin($q, $session); if (! defined $session->param("name") || ! IkiWiki::is_admin($session->param("name"))) { error(gettext("you are not logged in as an admin")); } my $h="\n"; $h.="\n"; my $info=IkiWiki::userinfo_retrieve(); eval q{use HTML::Entities}; if (ref $info) { foreach my $user (sort { $info->{$a}->{regdate} <=> $info->{$b}->{regdate} } keys %$info) { my %i=%{$info->{$user}}; $h.="\n"; } } $h.="
".gettext("login")."".gettext("email")."
".encode_entities($user)."". encode_entities(defined $i{email} ? $i{email} : ""). "
\n"; IkiWiki::printheader($session); print IkiWiki::cgitemplate(undef, gettext("Users"), $h); } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/google.pm0000644000000000000000000000222212211730000016520 0ustar #!/usr/bin/perl package IkiWiki::Plugin::google; use warnings; use strict; use IkiWiki 3.00; use URI; sub import { hook(type => "getsetup", id => "google", call => \&getsetup); hook(type => "checkconfig", id => "google", call => \&checkconfig); hook(type => "pagetemplate", id => "google", call => \&pagetemplate); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, section => "web", }, } sub checkconfig () { if (! length $config{url}) { error(sprintf(gettext("Must specify %s when using the %s plugin"), "url", 'google')); } # This is a mass dependency, so if the search form template # changes, every page is rebuilt. add_depends("", "templates/googleform.tmpl"); } my $form; sub pagetemplate (@) { my %params=@_; my $page=$params{page}; my $template=$params{template}; # Add search box to page header. if ($template->query(name => "searchform")) { if (! defined $form) { my $searchform = template("googleform.tmpl", blind_cache => 1); $searchform->param(url => $config{url}); $searchform->param(html5 => $config{html5}); $form=$searchform->output; } $template->param(searchform => $form); } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/inline.pm0000644000000000000000000005143112211730000016530 0ustar #!/usr/bin/perl # Page inlining and blogging. package IkiWiki::Plugin::inline; use warnings; use strict; use Encode; use IkiWiki 3.00; use URI; my %knownfeeds; my %page_numfeeds; my @inline; my $nested=0; sub import { hook(type => "getopt", id => "inline", call => \&getopt); hook(type => "getsetup", id => "inline", call => \&getsetup); hook(type => "checkconfig", id => "inline", call => \&checkconfig); hook(type => "sessioncgi", id => "inline", call => \&sessioncgi); hook(type => "preprocess", id => "inline", call => \&IkiWiki::preprocess_inline, scan => 1); hook(type => "pagetemplate", id => "inline", call => \&IkiWiki::pagetemplate_inline); hook(type => "format", id => "inline", call => \&format, first => 1); # Hook to change to do pinging since it's called late. # This ensures each page only pings once and prevents slow # pings interrupting page builds. hook(type => "rendered", id => "inline", call => \&IkiWiki::pingurl); } sub getopt () { eval q{use Getopt::Long}; error($@) if $@; Getopt::Long::Configure('pass_through'); GetOptions( "rss!" => \$config{rss}, "atom!" => \$config{atom}, "allowrss!" => \$config{allowrss}, "allowatom!" => \$config{allowatom}, "pingurl=s" => sub { push @{$config{pingurl}}, $_[1]; }, ); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "core", }, rss => { type => "boolean", example => 0, description => "enable rss feeds by default?", safe => 1, rebuild => 1, }, atom => { type => "boolean", example => 0, description => "enable atom feeds by default?", safe => 1, rebuild => 1, }, allowrss => { type => "boolean", example => 0, description => "allow rss feeds to be used?", safe => 1, rebuild => 1, }, allowatom => { type => "boolean", example => 0, description => "allow atom feeds to be used?", safe => 1, rebuild => 1, }, pingurl => { type => "string", example => "http://rpc.technorati.com/rpc/ping", description => "urls to ping (using XML-RPC) on feed update", safe => 1, rebuild => 0, }, } sub checkconfig () { if (($config{rss} || $config{atom}) && ! length $config{url}) { error(gettext("Must specify url to wiki with --url when using --rss or --atom")); } if ($config{rss}) { push @{$config{wiki_file_prune_regexps}}, qr/\.rss$/; } if ($config{atom}) { push @{$config{wiki_file_prune_regexps}}, qr/\.atom$/; } if (! exists $config{pingurl}) { $config{pingurl}=[]; } } sub format (@) { my %params=@_; # Fill in the inline content generated earlier. This is actually an # optimisation. $params{content}=~s{
}{ delete @inline[$1,] }eg; return $params{content}; } sub sessioncgi ($$) { my $q=shift; my $session=shift; if ($q->param('do') eq 'blog') { my $page=titlepage(decode_utf8($q->param('title'))); $page=~s/(\/)/"__".ord($1)."__"/eg; # don't create subdirs # if the page already exists, munge it to be unique my $from=$q->param('from'); my $add=""; while (exists $IkiWiki::pagecase{lc($from."/".$page.$add)}) { $add=1 unless length $add; $add++; } $q->param('page', "/$from/$page$add"); # now go create the page $q->param('do', 'create'); # make sure the editpage plugin is loaded if (IkiWiki->can("cgi_editpage")) { IkiWiki::cgi_editpage($q, $session); } else { error(gettext("page editing not allowed")); } exit; } } # Back to ikiwiki namespace for the rest, this code is very much # internal to ikiwiki even though it's separated into a plugin. package IkiWiki; my %toping; my %feedlinks; sub preprocess_inline (@) { my %params=@_; if (! exists $params{pages} && ! exists $params{pagenames}) { error gettext("missing pages parameter"); } if (! defined wantarray) { # Running in scan mode: only do the essentials if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) { # default to sorting age, the same as inline itself, # but let the params override that IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params); } return; } if (yesno($params{trail}) && IkiWiki::Plugin::trail->can("preprocess_trailitems")) { scalar IkiWiki::Plugin::trail::preprocess_trailitems(sort => 'age', %params); } my $raw=yesno($params{raw}); my $archive=yesno($params{archive}); my $rss=(($config{rss} || $config{allowrss}) && exists $params{rss}) ? yesno($params{rss}) : $config{rss}; my $atom=(($config{atom} || $config{allowatom}) && exists $params{atom}) ? yesno($params{atom}) : $config{atom}; my $quick=exists $params{quick} ? yesno($params{quick}) : 0; my $feeds=exists $params{feeds} ? yesno($params{feeds}) : !$quick && ! $raw; my $emptyfeeds=exists $params{emptyfeeds} ? yesno($params{emptyfeeds}) : 1; my $feedonly=yesno($params{feedonly}); if (! exists $params{show} && ! $archive) { $params{show}=10; } if (! exists $params{feedshow} && exists $params{show}) { $params{feedshow}=$params{show}; } my $desc; if (exists $params{description}) { $desc = $params{description} } else { $desc = $config{wikiname}; } my $actions=yesno($params{actions}); if (exists $params{template}) { $params{template}=~s/[^-_a-zA-Z0-9]+//g; } else { $params{template} = $archive ? "archivepage" : "inlinepage"; } my @list; if (exists $params{pagenames}) { foreach my $p (qw(sort pages)) { if (exists $params{$p}) { error sprintf(gettext("the %s and %s parameters cannot be used together"), "pagenames", $p); } } @list = split ' ', $params{pagenames}; if (yesno($params{reverse})) { @list=reverse(@list); } foreach my $p (@list) { add_depends($params{page}, $p, deptype($quick ? "presence" : "content")); } @list = grep { exists $pagesources{$_} } @list; } else { my $num=0; if ($params{show}) { $num=$params{show}; } if ($params{feedshow} && $num < $params{feedshow} && $num > 0) { $num=$params{feedshow}; } if ($params{skip} && $num) { $num+=$params{skip}; } @list = pagespec_match_list($params{page}, $params{pages}, deptype => deptype($quick ? "presence" : "content"), filter => sub { $_[0] eq $params{page} }, sort => exists $params{sort} ? $params{sort} : "age", reverse => yesno($params{reverse}), ($num ? (num => $num) : ()), ); } if (exists $params{skip}) { @list=@list[$params{skip} .. $#list]; } my @feedlist; if ($feeds) { if (exists $params{feedshow} && $params{feedshow} && @list > $params{feedshow}) { @feedlist=@list[0..$params{feedshow} - 1]; } else { @feedlist=@list; } } if ($params{show} && @list > $params{show}) { @list=@list[0..$params{show} - 1]; } if ($feeds && exists $params{feedpages}) { @feedlist = pagespec_match_list( $params{page}, "($params{pages}) and ($params{feedpages})", deptype => deptype($quick ? "presence" : "content"), list => \@feedlist, ); } my ($feedbase, $feednum); if ($feeds) { # Ensure that multiple feeds on a page go to unique files. # Feedfile can lead to conflicts if usedirs is not enabled, # so avoid supporting it in that case. delete $params{feedfile} if ! $config{usedirs}; # Tight limits on legal feedfiles, to avoid security issues # and conflicts. if (defined $params{feedfile}) { if ($params{feedfile} =~ /\// || $params{feedfile} !~ /$config{wiki_file_regexp}/) { error("illegal feedfile"); } $params{feedfile}=possibly_foolish_untaint($params{feedfile}); } $feedbase=targetpage($params{page}, "", $params{feedfile}); my $feedid=join("\0", $feedbase, map { $_."\0".$params{$_} } sort keys %params); if (exists $knownfeeds{$feedid}) { $feednum=$knownfeeds{$feedid}; } else { if (exists $page_numfeeds{$params{destpage}}{$feedbase}) { if ($feeds) { $feednum=$knownfeeds{$feedid}=++$page_numfeeds{$params{destpage}}{$feedbase}; } } else { $feednum=$knownfeeds{$feedid}=""; if ($feeds) { $page_numfeeds{$params{destpage}}{$feedbase}=1; } } } } my ($rssurl, $atomurl, $rssdesc, $atomdesc); if ($feeds) { if ($rss) { $rssurl=abs2rel($feedbase."rss".$feednum, dirname(htmlpage($params{destpage}))); $rssdesc = sprintf(gettext("%s (RSS feed)"), $desc); } if ($atom) { $atomurl=abs2rel($feedbase."atom".$feednum, dirname(htmlpage($params{destpage}))); $atomdesc = sprintf(gettext("%s (Atom feed)"), $desc); } } my $ret=""; if (length $config{cgiurl} && ! $params{preview} && (exists $params{rootpage} || (exists $params{postform} && yesno($params{postform}))) && IkiWiki->can("cgi_editpage")) { # Add a blog post form, with feed buttons. my $formtemplate=template_depends("blogpost.tmpl", $params{page}, blind_cache => 1); $formtemplate->param(cgiurl => IkiWiki::cgiurl()); $formtemplate->param(rootpage => rootpage(%params)); if ($feeds) { if ($rss) { $formtemplate->param(rssurl => $rssurl); $formtemplate->param(rssdesc => $rssdesc); } if ($atom) { $formtemplate->param(atomurl => $atomurl); $formtemplate->param(atomdesc => $atomdesc); } } if (exists $params{postformtext}) { $formtemplate->param(postformtext => $params{postformtext}); } else { $formtemplate->param(postformtext => gettext("Add a new post titled:")); } if (exists $params{id}) { $formtemplate->param(postformid => $params{id}); } $ret.=$formtemplate->output; # The post form includes the feed buttons, so # emptyfeeds cannot be hidden. $emptyfeeds=1; } elsif ($feeds && !$params{preview} && ($emptyfeeds || @feedlist)) { # Add feed buttons. my $linktemplate=template_depends("feedlink.tmpl", $params{page}, blind_cache => 1); if ($rss) { $linktemplate->param(rssurl => $rssurl); $linktemplate->param(rssdesc => $rssdesc); } if ($atom) { $linktemplate->param(atomurl => $atomurl); $linktemplate->param(atomdesc => $atomdesc); } if (exists $params{id}) { $linktemplate->param(id => $params{id}); } $ret.=$linktemplate->output; } if (! $feedonly) { my $template; if (! $raw) { # cannot use wiki pages as templates; template not sanitized due to # format hook hack eval { $template=template_depends($params{template}.".tmpl", $params{page}, blind_cache => 1); }; if ($@) { error sprintf(gettext("failed to process template %s"), $params{template}.".tmpl").": $@"; } } my $needcontent=$raw || (!($archive && $quick) && $template->query(name => 'content')); foreach my $page (@list) { my $file = $pagesources{$page}; my $type = pagetype($file); if (! $raw) { if ($needcontent) { # Get the content before populating the # template, since getting the content uses # the same template if inlines are nested. my $content=get_inline_content($page, $params{destpage}); $template->param(content => $content); } $template->param(pageurl => urlto($page, $params{destpage})); $template->param(inlinepage => $page); $template->param(title => pagetitle(basename($page))); $template->param(ctime => displaytime($pagectime{$page}, $params{timeformat}, 1)); $template->param(mtime => displaytime($pagemtime{$page}, $params{timeformat})); $template->param(first => 1) if $page eq $list[0]; $template->param(last => 1) if $page eq $list[$#list]; $template->param(html5 => $config{html5}); if ($actions) { my $file = $pagesources{$page}; my $type = pagetype($file); if ($config{discussion}) { if ($page !~ /.*\/\Q$config{discussionpage}\E$/i && (length $config{cgiurl} || exists $pagesources{$page."/".lc($config{discussionpage})})) { $template->param(have_actions => 1); $template->param(discussionlink => htmllink($page, $params{destpage}, $config{discussionpage}, noimageinline => 1, forcesubpage => 1)); } } if (length $config{cgiurl} && defined $type && IkiWiki->can("cgi_editpage")) { $template->param(have_actions => 1); $template->param(editurl => cgiurl(do => "edit", page => $page)); } } run_hooks(pagetemplate => sub { shift->(page => $page, destpage => $params{destpage}, template => $template,); }); $ret.=$template->output; $template->clear_params; } else { if (defined $type) { $ret.="\n". linkify($page, $params{destpage}, preprocess($page, $params{destpage}, filter($page, $params{destpage}, readfile(srcfile($file))))); } else { $ret.="\n". readfile(srcfile($file)); } } } } if ($feeds && ($emptyfeeds || @feedlist)) { if ($rss) { my $rssp=$feedbase."rss".$feednum; will_render($params{destpage}, $rssp); if (! $params{preview}) { writefile($rssp, $config{destdir}, genfeed("rss", $config{url}."/".$rssp, $desc, $params{guid}, $params{page}, @feedlist)); $toping{$params{destpage}}=1 unless $config{rebuild}; $feedlinks{$params{destpage}}.=qq{}; } } if ($atom) { my $atomp=$feedbase."atom".$feednum; will_render($params{destpage}, $atomp); if (! $params{preview}) { writefile($atomp, $config{destdir}, genfeed("atom", $config{url}."/".$atomp, $desc, $params{guid}, $params{page}, @feedlist)); $toping{$params{destpage}}=1 unless $config{rebuild}; $feedlinks{$params{destpage}}.=qq{}; } } } clear_inline_content_cache(); return $ret if $raw || $nested; push @inline, $ret; return "
\n\n"; } sub pagetemplate_inline (@) { my %params=@_; my $page=$params{page}; my $template=$params{template}; $template->param(feedlinks => $feedlinks{$page}) if exists $feedlinks{$page} && $template->query(name => "feedlinks"); } { my %inline_content; my $cached_destpage=""; sub get_inline_content ($$) { my $page=shift; my $destpage=shift; if (exists $inline_content{$page} && $cached_destpage eq $destpage) { return $inline_content{$page}; } my $file=$pagesources{$page}; my $type=pagetype($file); my $ret=""; if (defined $type) { $nested++; $ret=htmlize($page, $destpage, $type, linkify($page, $destpage, preprocess($page, $destpage, filter($page, $destpage, readfile(srcfile($file)))))); $nested--; if (isinternal($page)) { # make inlined text of internal pages searchable run_hooks(indexhtml => sub { shift->(page => $page, destpage => $destpage, content => $ret); }); } } if ($cached_destpage ne $destpage) { clear_inline_content_cache(); $cached_destpage=$destpage; } return $inline_content{$page}=$ret; } sub clear_inline_content_cache () { %inline_content=(); } } sub date_822 ($) { my $time=shift; my $lc_time=POSIX::setlocale(&POSIX::LC_TIME); POSIX::setlocale(&POSIX::LC_TIME, "C"); my $ret=POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time)); POSIX::setlocale(&POSIX::LC_TIME, $lc_time); return $ret; } sub absolute_urls ($$) { # needed because rss sucks my $html=shift; my $baseurl=shift; my $url=$baseurl; $url=~s/[^\/]+$//; my $urltop; # calculated if needed my $ret=""; eval q{use HTML::Parser; use HTML::Tagset}; die $@ if $@; my $p = HTML::Parser->new(api_version => 3); $p->handler(default => sub { $ret.=join("", @_) }, "text"); $p->handler(start => sub { my ($tagname, $pos, $text) = @_; if (ref $HTML::Tagset::linkElements{$tagname}) { while (4 <= @$pos) { # use attribute sets from right to left # to avoid invalidating the offsets # when replacing the values my ($k_offset, $k_len, $v_offset, $v_len) = splice(@$pos, -4); my $attrname = lc(substr($text, $k_offset, $k_len)); next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}}; next unless $v_offset; # 0 v_offset means no value my $v = substr($text, $v_offset, $v_len); $v =~ s/^([\'\"])(.*)\1$/$2/; eval q{use HTML::Entities}; my $dv = decode_entities($v); if ($dv=~/^#/) { $v=$baseurl.$v; # anchor } elsif ($dv=~/^(?!\w+:)[^\/]/) { $v=$url.$v; # relative url } elsif ($dv=~/^\//) { if (! defined $urltop) { # what is the non path part of the url? my $top_uri = URI->new($url); $top_uri->path_query(""); # reset the path $urltop = $top_uri->as_string; } $v=$urltop.$v; # url relative to top of site } $v =~ s/\"/"/g; # since we quote with "" substr($text, $v_offset, $v_len) = qq("$v"); } } $ret.=$text; }, "tagname, tokenpos, text"); $p->parse($html); $p->eof; return $ret; } sub genenclosure { my $itemtemplate=shift; my $url=shift; my $file=shift; return unless $itemtemplate->query(name => "enclosure"); my $size=(srcfile_stat($file))[8]; my $mime="unknown"; eval q{use File::MimeInfo}; if (! $@) { $mime = mimetype($file); } $itemtemplate->param( enclosure => $url, type => $mime, length => $size, ); } sub genfeed ($$$$$@) { my $feedtype=shift; my $feedurl=shift; my $feeddesc=shift; my $guid=shift; my $page=shift; my @pages=@_; my $url=URI->new(encode_utf8(urlto($page,"",1))); my $itemtemplate=template_depends($feedtype."item.tmpl", $page, blind_cache => 1); my $content=""; my $lasttime = 0; foreach my $p (@pages) { my $u=URI->new(encode_utf8(urlto($p, "", 1))); my $pcontent = absolute_urls(get_inline_content($p, $page), $url); my $fancy_enclosure_seen = 0; $itemtemplate->param( title => pagetitle(basename($p)), url => $u, permalink => $u, cdate_822 => date_822($pagectime{$p}), mdate_822 => date_822($pagemtime{$p}), cdate_3339 => date_3339($pagectime{$p}), mdate_3339 => date_3339($pagemtime{$p}), ); if (exists $pagestate{$p}) { if (exists $pagestate{$p}{meta}{guid}) { eval q{use HTML::Entities}; $itemtemplate->param(guid => HTML::Entities::encode_numeric($pagestate{$p}{meta}{guid})); } if (exists $pagestate{$p}{meta}{updated}) { $itemtemplate->param(mdate_822 => date_822($pagestate{$p}{meta}{updated})); $itemtemplate->param(mdate_3339 => date_3339($pagestate{$p}{meta}{updated})); } if (exists $pagestate{$p}{meta}{enclosure}) { my $absurl = $pagestate{$p}{meta}{enclosure}; my $file = $pagestate{$p}{meta}{enclosurefile}; genenclosure($itemtemplate, $absurl, $file); $fancy_enclosure_seen = 1; } } my $file=$pagesources{$p}; unless ($fancy_enclosure_seen || defined(pagetype($file))) { genenclosure($itemtemplate, $u, $file); $itemtemplate->param(simplepodcast => 1); } $itemtemplate->param(content => $pcontent); run_hooks(pagetemplate => sub { shift->(page => $p, destpage => $page, template => $itemtemplate); }); $content.=$itemtemplate->output; $itemtemplate->clear_params; $lasttime = $pagemtime{$p} if $pagemtime{$p} > $lasttime; } my $template=template_depends($feedtype."page.tmpl", $page, blind_cache => 1); $template->param( title => $page ne "index" ? pagetitle($page) : $config{wikiname}, wikiname => $config{wikiname}, pageurl => $url, content => $content, feeddesc => $feeddesc, guid => $guid, feeddate => date_3339($lasttime), feeddate_822 => date_822($lasttime), feedurl => $feedurl, ); run_hooks(pagetemplate => sub { shift->(page => $page, destpage => $page, template => $template); }); return $template->output; } sub pingurl (@) { return unless @{$config{pingurl}} && %toping; eval q{require RPC::XML::Client}; if ($@) { debug(gettext("RPC::XML::Client not found, not pinging")); return; } # daemonize here so slow pings don't slow down wiki updates defined(my $pid = fork) or error("Can't fork: $!"); return if $pid; chdir '/'; POSIX::setsid() or error("Can't start a new session: $!"); open STDIN, '/dev/null'; open STDOUT, '>/dev/null'; open STDERR, '>&STDOUT' or error("Can't dup stdout: $!"); # Don't need to keep a lock on the wiki as a daemon. IkiWiki::unlockwiki(); foreach my $page (keys %toping) { my $title=pagetitle(basename($page), 0); my $url=urlto($page, "", 1); foreach my $pingurl (@{$config{pingurl}}) { debug("Pinging $pingurl for $page"); eval { my $client = RPC::XML::Client->new($pingurl); my $req = RPC::XML::request->new('weblogUpdates.ping', $title, $url); my $res = $client->send_request($req); if (! ref $res) { error("Did not receive response to ping"); } my $r=$res->value; if (! exists $r->{flerror} || $r->{flerror}) { error("Ping rejected: ".(exists $r->{message} ? $r->{message} : "[unknown reason]")); } }; if ($@) { error "Ping failed: $@"; } } } exit 0; # daemon done } sub rootpage (@) { my %params=@_; my $rootpage; if (exists $params{rootpage}) { $rootpage=bestlink($params{page}, $params{rootpage}); if (!length $rootpage) { $rootpage=$params{rootpage}; } } else { $rootpage=$params{page}; } return $rootpage; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/creole.pm0000644000000000000000000000122612211730001016521 0ustar #!/usr/bin/perl # WikiCreole markup # based on the WikiText plugin. package IkiWiki::Plugin::creole; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "creole", call => \&getsetup); hook(type => "htmlize", id => "creole", call => \&htmlize); } sub getsetup { return plugin => { safe => 1, rebuild => 1, # format plugin section => "format", }, } sub htmlize (@) { my %params=@_; my $content = $params{content}; eval q{use Text::WikiCreole}; return $content if $@; # don't parse WikiLinks, ikiwiki already does creole_customlinks(); creole_custombarelinks(); return creole_parse($content); } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/color.pm0000644000000000000000000000402612211730000016366 0ustar #!/usr/bin/perl # Ikiwiki text colouring plugin # Paweł‚ Tęcza package IkiWiki::Plugin::color; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "preprocess", id => "color", call => \&preprocess); hook(type => "format", id => "color", call => \&format); hook(type => "getsetup", id => "color", call => \&getsetup); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preserve_style ($$$) { my $foreground = shift; my $background = shift; my $text = shift; $foreground = defined $foreground ? lc($foreground) : ''; $background = defined $background ? lc($background) : ''; $text = '' unless (defined $text); # Validate colors. Only color name or color code are valid. $foreground = '' unless ($foreground && ($foreground =~ /^[a-z]+$/ || $foreground =~ /^#[0-9a-f]{3,6}$/)); $background = '' unless ($background && ($background =~ /^[a-z]+$/ || $background =~ /^#[0-9a-f]{3,6}$/)); my $preserved = ''; $preserved .= ''; $preserved .= 'color: '.$foreground if ($foreground); $preserved .= '; ' if ($foreground && $background); $preserved .= 'background-color: '.$background if ($background); $preserved .= ''; $preserved .= ''.$text.''; return $preserved; } sub replace_preserved_style ($) { my $content = shift; $content =~ s!((color: ([a-z]+|\#[0-9a-f]{3,6})?)?((; )?(background-color: ([a-z]+|\#[0-9a-f]{3,6})?)?)?)!!g; $content =~ s!!!g; return $content; } sub preprocess (@) { my %params = @_; return preserve_style($params{foreground}, $params{background}, # Preprocess the text to expand any preprocessor directives # embedded inside it. IkiWiki::preprocess($params{page}, $params{destpage}, $params{text})); } sub format (@) { my %params = @_; $params{content} = replace_preserved_style($params{content}); return $params{content}; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/localstyle.pm0000644000000000000000000000110012211730001017412 0ustar #!/usr/bin/perl package IkiWiki::Plugin::localstyle; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "localstyle", call => \&getsetup); hook(type => "pagetemplate", id => "localstyle", call => \&pagetemplate); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, }, } sub pagetemplate (@) { my %params=@_; my $template=$params{template}; if ($template->query(name => "local_css")) { my $best=bestlink($params{page}, 'local.css'); if ($best) { $template->param(local_css => $best); } } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/ddate.pm0000644000000000000000000000141012211730001016324 0ustar #!/usr/bin/perl # Discordian date support fnord ikiwiki. package IkiWiki::Plugin::ddate; use IkiWiki 3.00; no warnings; sub import { hook(type => "getsetup", id => "ddate", call => \&getsetup); } sub getsetup { return plugin => { safe => 1, rebuild => 1, }, } sub IkiWiki::formattime ($;$) { my $time=shift; my $format=shift; if (! defined $format) { $format=$config{timeformat}; if ($format eq '%c') { $format='on %A, the %e of %B, %Y. %N%nCelebrate %H'; } } eval q{ use DateTime; use DateTime::Calendar::Discordian; }; if ($@) { return "some time or other ($@ -- hail Eris!)"; } my $dt = DateTime->from_epoch(epoch => $time); my $dd = DateTime::Calendar::Discordian->from_object(object => $dt); return $dd->strftime($format); } 5 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/orphans.pm0000644000000000000000000000312312211730000016717 0ustar #!/usr/bin/perl # Provides a list of pages no other page links to. package IkiWiki::Plugin::orphans; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "orphans", call => \&getsetup); hook(type => "preprocess", id => "orphans", call => \&preprocess); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preprocess (@) { my %params=@_; $params{pages}="*" unless defined $params{pages}; # Needs to update whenever a link changes, on any page # since any page could link to one of the pages we're # considering as orphans. add_depends($params{page}, "*", deptype("links")); my @orphans=pagespec_match_list($params{page}, $params{pages}, # update when orphans are added/removed deptype => deptype("presence"), filter => sub { my $page=shift; # Filter out pages that other pages link to. return 1 if IkiWiki::backlink_pages($page); # Toplevel index is assumed to never be orphaned. return 1 if $page eq 'index'; # If the page has a link to some other page, it's # indirectly linked via that page's backlinks. return 1 if grep { length $_ && ($_ !~ /\/\Q$config{discussionpage}\E$/i || ! $config{discussion}) && bestlink($page, $_) !~ /^(\Q$page\E|)$/ } @{$links{$page}}; return 0; }, ); return gettext("All pages have other pages linking to them.") unless @orphans; return "
    \n". join("\n", map { "
  • ". htmllink($params{page}, $params{destpage}, $_, noimageinline => 1). "
  • " } sort @orphans). "
\n"; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/template.pm0000644000000000000000000000317412211730001017067 0ustar #!/usr/bin/perl # Structured template plugin. package IkiWiki::Plugin::template; use warnings; use strict; use IkiWiki 3.00; use Encode; sub import { hook(type => "getsetup", id => "template", call => \&getsetup); hook(type => "preprocess", id => "template", call => \&preprocess, scan => 1); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preprocess (@) { my %params=@_; # This needs to run even in scan mode, in order to process # links and other metadata included via the template. my $scan=! defined wantarray; if (! exists $params{id}) { error gettext("missing id parameter") } # The bare id is used, so a page templates/$id can be used as # the template. my $template; eval { $template=template_depends($params{id}, $params{page}, blind_cache => 1); }; if ($@) { error sprintf(gettext("failed to process template %s"), htmllink($params{page}, $params{destpage}, "/templates/$params{id}"))." $@"; } $params{basename}=IkiWiki::basename($params{page}); foreach my $param (keys %params) { my $value=IkiWiki::preprocess($params{page}, $params{destpage}, $params{$param}, $scan); if ($template->query(name => $param)) { my $htmlvalue=IkiWiki::htmlize($params{page}, $params{destpage}, pagetype($pagesources{$params{page}}), $value); chomp $htmlvalue; $template->param($param => $htmlvalue); } if ($template->query(name => "raw_$param")) { chomp $value; $template->param("raw_$param" => $value); } } return IkiWiki::preprocess($params{page}, $params{destpage}, $template->output, $scan); } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/txt.pm0000644000000000000000000000341512211730001016071 0ustar #!/usr/bin/perl # .txt as a wiki page type - links WikiLinks and URIs. # # Copyright (C) 2008 Gabriel McManus # Licensed under the GNU General Public License, version 2 or later package IkiWiki::Plugin::txt; use warnings; use strict; use IkiWiki 3.00; use HTML::Entities; my $findurl=0; sub import { hook(type => "getsetup", id => "txt", call => \&getsetup); hook(type => "filter", id => "txt", call => \&filter); hook(type => "htmlize", id => "txt", call => \&htmlize); hook(type => "htmlizeformat", id => "txt", call => \&htmlizeformat); eval q{use URI::Find}; if (! $@) { $findurl=1; } } sub getsetup () { return plugin => { safe => 1, rebuild => 1, # format plugin section => "format", }, } # We use filter to convert raw text to HTML # (htmlize is called after other plugins insert HTML) sub filter (@) { my %params = @_; my $content = $params{content}; if (defined $pagesources{$params{page}} && $pagesources{$params{page}} =~ /\.txt$/) { if ($pagesources{$params{page}} eq 'robots.txt' && $params{page} eq $params{destpage}) { will_render($params{page}, 'robots.txt'); writefile('robots.txt', $config{destdir}, $content); } return txt2html($content); } return $content; } sub txt2html ($) { my $content=shift; encode_entities($content, "<>&"); if ($findurl) { my $finder = URI::Find->new(sub { my ($uri, $orig_uri) = @_; return qq|$orig_uri|; }); $finder->find(\$content); } return "
" . $content . "
"; } # We need this to register the .txt file extension sub htmlize (@) { my %params=@_; return $params{content}; } sub htmlizeformat ($$) { my $format=shift; my $content=shift; if ($format eq 'txt') { return txt2html($content); } else { return; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/signinedit.pm0000644000000000000000000000154212211730001017406 0ustar #!/usr/bin/perl package IkiWiki::Plugin::signinedit; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "signinedit", call => \&getsetup); hook(type => "canedit", id => "signinedit", call => \&canedit, last => 1); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "auth", }, } sub canedit ($$$) { my $page=shift; my $cgi=shift; my $session=shift; # Have the user sign in, if they are not already. This is why the # hook runs last, so that any hooks that don't need the user to # signin can override this. if (! defined $session->param("name") || ! IkiWiki::userinfo_get($session->param("name"), "regdate")) { return "" unless exists $IkiWiki::hooks{auth}; return sub { IkiWiki::needsignin($cgi, $session) }; } else { return ""; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/repolist.pm0000644000000000000000000000214712211730001017114 0ustar #!/usr/bin/perl package IkiWiki::Plugin::repolist; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "repolist", call => \&getsetup); hook(type => "checkconfig", id => "repolist", call => \&checkconfig); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "web", }, repositories => { type => "string", example => ["svn://svn.example.org/wiki/trunk"], description => "URIs of repositories containing the wiki's source", safe => 1, rebuild => undef, }, } my $relvcs; sub checkconfig () { if (defined $config{rcs} && $config{repositories}) { $relvcs=join("\n", map { s/"//g; # avoid quotes just in case qq{} } @{$config{repositories}}); hook(type => "pagetemplate", id => "repolist", call => \&pagetemplate); } } sub pagetemplate (@) { my %params=@_; my $page=$params{page}; my $template=$params{template}; if (defined $relvcs && $template->query(name => "relvcs")) { $template->param(relvcs => $relvcs); } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/camelcase.pm0000644000000000000000000000273412211730001017172 0ustar #!/usr/bin/perl # CamelCase links package IkiWiki::Plugin::camelcase; use warnings; use strict; use IkiWiki 3.00; # This regexp is based on the one in Text::WikiFormat. my $link_regexp=qr{ (? "getsetup", id => "camelcase", call => \&getsetup); hook(type => "linkify", id => "camelcase", call => \&linkify); hook(type => "scan", id => "camelcase", call => \&scan); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, }, camelcase_ignore => { type => "string", example => [], description => "list of words to not turn into links", safe => 1, rebuild => undef, # might change links }, } sub linkify (@) { my %params=@_; my $page=$params{page}; my $destpage=$params{destpage}; $params{content}=~s{$link_regexp}{ ignored($1) ? $1 : htmllink($page, $destpage, linkpage($1)) }eg; return $params{content}; } sub scan (@) { my %params=@_; my $page=$params{page}; my $content=$params{content}; while ($content =~ /$link_regexp/g) { add_link($page, linkpage($1)) unless ignored($1) } } sub ignored ($) { my $word=lc shift; grep { $word eq lc $_ } @{$config{'camelcase_ignore'}} } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/toggle.pm0000644000000000000000000000444312211730001016535 0ustar #!/usr/bin/perl package IkiWiki::Plugin::toggle; use warnings; use strict; use IkiWiki 3.00; sub import { add_underlay("javascript"); hook(type => "getsetup", id => "toggle", call => \&getsetup); hook(type => "preprocess", id => "toggle", call => \&preprocess_toggle); hook(type => "preprocess", id => "toggleable", call => \&preprocess_toggleable); hook(type => "format", id => "toggle", call => \&format); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub genid ($$) { my $page=shift; my $id=shift; $id="$page.$id"; # make it a legal html id attribute $id=~s/[^-a-zA-Z0-9.]/-/g; if ($id !~ /^[a-zA-Z]/) { $id="id$id"; } return $id; } sub preprocess_toggle (@) { my %params=(id => "default", text => "more", @_); my $id=genid($params{page}, $params{id}); return "$params{text}"; } sub preprocess_toggleable (@) { my %params=(id => "default", text => "", open => "no", @_); # Preprocess the text to expand any preprocessor directives # embedded inside it. $params{text}=IkiWiki::preprocess($params{page}, $params{destpage}, $params{text}); my $id=genid($params{page}, $params{id}); my $class=(lc($params{open}) ne "yes") ? "toggleable" : "toggleable-open"; # Should really be a postprocessor directive, oh well. Work around # markdown's dislike of markdown inside a
with various funky # whitespace. my ($indent)=$params{text}=~/( +)$/; $indent="" unless defined $indent; return "
\n\n$params{text}\n$indent
"; } sub format (@) { my %params=@_; if ($params{content}=~s!(
\s*)
!$1!g) { $params{content}=~s/
//g; if (! ($params{content}=~s!^(]*>)!$1.include_javascript($params{page})!em)) { # no tag, probably in preview mode $params{content}=include_javascript(undef).$params{content}; } } return $params{content}; } sub include_javascript ($) { my $from=shift; return ''."\n". ''; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/html.pm0000644000000000000000000000133412211730001016214 0ustar #!/usr/bin/perl # Raw html as a wiki page type. package IkiWiki::Plugin::html; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "html", call => \&getsetup); hook(type => "htmlize", id => "html", call => \&htmlize); hook(type => "htmlize", id => "htm", call => \&htmlize); # ikiwiki defaults to skipping .html files as a security measure; # make it process them so this plugin can take effect $config{wiki_file_prune_regexps} = [ grep { !m/\\\.x\?html\?\$/ } @{$config{wiki_file_prune_regexps}} ]; } sub getsetup () { return plugin => { safe => 1, rebuild => 1, # format plugin section => "format", }, } sub htmlize (@) { my %params=@_; return $params{content}; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/mirrorlist.pm0000644000000000000000000000301312211730001017452 0ustar #!/usr/bin/perl package IkiWiki::Plugin::mirrorlist; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "mirrorlist", call => \&getsetup); hook(type => "pagetemplate", id => "mirrorlist", call => \&pagetemplate); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, section => "web", }, mirrorlist => { type => "string", example => {}, description => "list of mirrors", safe => 1, rebuild => 1, }, mirrorlist_use_cgi => { type => 'boolean', example => 1, description => "generate links that point to the mirrors' ikiwiki CGI", safe => 1, rebuild => 1, }, } sub checkconfig () { if (! defined $config{mirrorlist_use_cgi}) { $config{mirrorlist_use_cgi}=0; } } sub pagetemplate (@) { my %params=@_; my $template=$params{template}; if ($template->query(name => "extrafooter") && keys %{$config{mirrorlist}} > 0) { my $value=$template->param("extrafooter"); $value.=mirrorlist($params{page}); $template->param(extrafooter => $value); } } sub mirrorlist ($) { my $page=shift; return ($config{html5} ? '' : '
'); } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/openid.pm0000644000000000000000000001557112211730001016536 0ustar #!/usr/bin/perl # OpenID support. package IkiWiki::Plugin::openid; use warnings; use strict; use IkiWiki 3.00; sub import { add_underlay("openid-selector"); add_underlay("jquery"); hook(type => "checkconfig", id => "openid", call => \&checkconfig); hook(type => "getsetup", id => "openid", call => \&getsetup); hook(type => "auth", id => "openid", call => \&auth); hook(type => "formbuilder_setup", id => "openid", call => \&formbuilder_setup, last => 1); } sub checkconfig () { if ($config{cgi}) { # Intercept normal signin form, so the openid selector # can be displayed. # # When other auth hooks are registered, give the selector # a reference to the normal signin form. require IkiWiki::CGI; my $real_cgi_signin; if (keys %{$IkiWiki::hooks{auth}} > 1) { $real_cgi_signin=\&IkiWiki::cgi_signin; } inject(name => "IkiWiki::cgi_signin", call => sub ($$) { openid_selector($real_cgi_signin, @_); }); } } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "auth", }, openid_realm => { type => "string", description => "url pattern of openid realm (default is cgiurl)", safe => 0, rebuild => 0, }, openid_cgiurl => { type => "string", description => "url to ikiwiki cgi to use for openid authentication (default is cgiurl)", safe => 0, rebuild => 0, }, } sub openid_selector { my $real_cgi_signin=shift; my $q=shift; my $session=shift; my $openid_url=$q->param('openid_identifier'); my $openid_error; if (! load_openid_module()) { if ($real_cgi_signin) { $real_cgi_signin->($q, $session); exit; } error(sprintf(gettext("failed to load openid module: "), @_)); } elsif (defined $q->param("action") && $q->param("action") eq "verify") { validate($q, $session, $openid_url, sub { $openid_error=shift; }); } my $template=IkiWiki::template("openid-selector.tmpl"); $template->param( cgiurl => IkiWiki::cgiurl(), (defined $openid_error ? (openid_error => $openid_error) : ()), (defined $openid_url ? (openid_url => $openid_url) : ()), ($real_cgi_signin ? (nonopenidform => $real_cgi_signin->($q, $session, 1)) : ()), ); IkiWiki::printheader($session); print IkiWiki::cgitemplate($q, "signin", $template->output); exit; } sub formbuilder_setup (@) { my %params=@_; my $form=$params{form}; my $session=$params{session}; my $cgi=$params{cgi}; if ($form->title eq "preferences" && IkiWiki::openiduser($session->param("name"))) { $form->field(name => "openid_identifier", disabled => 1, label => htmllink("", "", "ikiwiki/OpenID", noimageinline => 1), value => "", size => 1, force => 1, fieldset => "login", comment => $session->param("name")); $form->field(name => "email", type => "hidden"); } } sub validate ($$$;$) { my $q=shift; my $session=shift; my $openid_url=shift; my $errhandler=shift; my $csr=getobj($q, $session); my $claimed_identity = $csr->claimed_identity($openid_url); if (! $claimed_identity) { if ($errhandler) { $errhandler->($csr->err); return 0; } else { error($csr->err); } } # Ask for client to provide a name and email, if possible. # Try sreg and ax if ($claimed_identity->can("set_extension_args")) { $claimed_identity->set_extension_args( 'http://openid.net/extensions/sreg/1.1', { optional => 'email,fullname,nickname', }, ); $claimed_identity->set_extension_args( 'http://openid.net/srv/ax/1.0', { mode => 'fetch_request', 'required' => 'email,fullname,nickname,firstname', 'type.email' => "http://schema.openid.net/contact/email", 'type.fullname' => "http://axschema.org/namePerson", 'type.nickname' => "http://axschema.org/namePerson/friendly", 'type.firstname' => "http://axschema.org/namePerson/first", }, ); } my $cgiurl=$config{openid_cgiurl}; $cgiurl=$q->url if ! defined $cgiurl; my $trust_root=$config{openid_realm}; $trust_root=$cgiurl if ! defined $trust_root; my $check_url = $claimed_identity->check_url( return_to => auto_upgrade_https($q, "$cgiurl?do=postsignin"), trust_root => auto_upgrade_https($q, $trust_root), delayed_return => 1, ); # Redirect the user to the OpenID server, which will # eventually bounce them back to auth() IkiWiki::redirect($q, $check_url); exit 0; } sub auth ($$) { my $q=shift; my $session=shift; if (defined $q->param('openid.mode')) { my $csr=getobj($q, $session); if (my $setup_url = $csr->user_setup_url) { IkiWiki::redirect($q, $setup_url); } elsif ($csr->user_cancel) { IkiWiki::redirect($q, IkiWiki::baseurl(undef)); } elsif (my $vident = $csr->verified_identity) { $session->param(name => $vident->url); my @extensions; if ($vident->can("signed_extension_fields")) { @extensions=grep { defined } ( $vident->signed_extension_fields('http://openid.net/extensions/sreg/1.1'), $vident->signed_extension_fields('http://openid.net/srv/ax/1.0'), ); } my $nickname; foreach my $ext (@extensions) { foreach my $field (qw{value.email email}) { if (exists $ext->{$field} && defined $ext->{$field} && length $ext->{$field}) { $session->param(email => $ext->{$field}); if (! defined $nickname && $ext->{$field}=~/(.+)@.+/) { $nickname = $1; } last; } } foreach my $field (qw{value.nickname nickname value.fullname fullname value.firstname}) { if (exists $ext->{$field} && defined $ext->{$field} && length $ext->{$field}) { $nickname=$ext->{$field}; last; } } } if (defined $nickname) { $session->param(nickname => Encode::decode_utf8($nickname)); } } else { error("OpenID failure: ".$csr->err); } } elsif (defined $q->param('openid_identifier')) { # myopenid.com affiliate support validate($q, $session, $q->param('openid_identifier')); } } sub getobj ($$) { my $q=shift; my $session=shift; eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available eval q{use Net::OpenID::Consumer}; error($@) if $@; my $ua; eval q{use LWPx::ParanoidAgent}; if (! $@) { $ua=LWPx::ParanoidAgent->new; } else { $ua=useragent(); } # Store the secret in the session. my $secret=$session->param("openid_secret"); if (! defined $secret) { $secret=rand; $session->param(openid_secret => $secret); } my $cgiurl=$config{openid_cgiurl}; $cgiurl=$q->url if ! defined $cgiurl; return Net::OpenID::Consumer->new( ua => $ua, args => $q, consumer_secret => sub { return shift()+$secret }, required_root => auto_upgrade_https($q, $cgiurl), ); } sub auto_upgrade_https { my $q=shift; my $url=shift; if ($q->https()) { $url=~s/^http:/https:/i; } return $url; } sub load_openid_module { # Give up if module is unavailable to avoid needing to depend on it. eval q{use Net::OpenID::Consumer}; if ($@) { debug("unable to load Net::OpenID::Consumer, not enabling OpenID login ($@)"); return; } return 1; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/filecheck.pm0000644000000000000000000001247212211730001017172 0ustar #!/usr/bin/perl package IkiWiki::Plugin::filecheck; use warnings; use strict; use IkiWiki 3.00; my %units=( # size in bytes B => 1, byte => 1, KB => 2 ** 10, kilobyte => 2 ** 10, K => 2 ** 10, KB => 2 ** 10, kilobyte => 2 ** 10, M => 2 ** 20, MB => 2 ** 20, megabyte => 2 ** 20, G => 2 ** 30, GB => 2 ** 30, gigabyte => 2 ** 30, T => 2 ** 40, TB => 2 ** 40, terabyte => 2 ** 40, P => 2 ** 50, PB => 2 ** 50, petabyte => 2 ** 50, E => 2 ** 60, EB => 2 ** 60, exabyte => 2 ** 60, Z => 2 ** 70, ZB => 2 ** 70, zettabyte => 2 ** 70, Y => 2 ** 80, YB => 2 ** 80, yottabyte => 2 ** 80, # ikiwiki, if you find you need larger data quantities, either modify # yourself to add them, or travel back in time to 2008 and kill me. # -- Joey ); sub import { hook(type => "getsetup", id => "filecheck", call => \&getsetup); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, }, } sub parsesize ($) { my $size=shift; no warnings; my $base=$size+0; # force to number use warnings; foreach my $unit (sort keys %units) { if ($size=~/[0-9\s]\Q$unit\E$/i) { return $base * $units{$unit}; } } return $base; } # This is provided for other plugins that want to convert back the other way. sub humansize ($) { my $size=shift; foreach my $unit (reverse sort { $units{$a} <=> $units{$b} || $b cmp $a } keys %units) { if ($size / $units{$unit} > 0.25) { return (int($size / $units{$unit} * 10)/10).$unit; } } return $size; # near zero, or negative } package IkiWiki::PageSpec; sub match_maxsize ($$;@) { my $page=shift; my $maxsize=eval{IkiWiki::Plugin::filecheck::parsesize(shift)}; if ($@) { return IkiWiki::ErrorReason->new("unable to parse maxsize (or number too large)"); } my %params=@_; my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page}); if (! defined $file) { return IkiWiki::ErrorReason->new("file does not exist"); } if (-s $file > $maxsize) { return IkiWiki::FailReason->new("file too large (".(-s $file)." > $maxsize)"); } else { return IkiWiki::SuccessReason->new("file not too large"); } } sub match_minsize ($$;@) { my $page=shift; my $minsize=eval{IkiWiki::Plugin::filecheck::parsesize(shift)}; if ($@) { return IkiWiki::ErrorReason->new("unable to parse minsize (or number too large)"); } my %params=@_; my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page}); if (! defined $file) { return IkiWiki::ErrorReason->new("file does not exist"); } if (-s $file < $minsize) { return IkiWiki::FailReason->new("file too small"); } else { return IkiWiki::SuccessReason->new("file not too small"); } } sub match_mimetype ($$;@) { my $page=shift; my $wanted=shift; my %params=@_; my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page}); if (! defined $file) { return IkiWiki::ErrorReason->new("file does not exist"); } # Get the mime type. # # First, try File::Mimeinfo. This is fast, but doesn't recognise # all files. eval q{use File::MimeInfo::Magic}; my $mimeinfo_ok=! $@; my $mimetype; if ($mimeinfo_ok) { $mimetype=File::MimeInfo::Magic::magic($file); } # Fall back to using file, which has a more complete # magic database. if (! defined $mimetype) { open(my $file_h, "-|", "file", "-bi", $file); $mimetype=<$file_h>; chomp $mimetype; close $file_h; } if (! defined $mimetype || $mimetype !~s /;.*//) { # Fall back to default value. $mimetype=File::MimeInfo::Magic::default($file) if $mimeinfo_ok; if (! defined $mimetype) { $mimetype="unknown"; } } my $regexp=IkiWiki::glob2re($wanted); if ($mimetype!~$regexp) { return IkiWiki::FailReason->new("file MIME type is $mimetype, not $wanted"); } else { return IkiWiki::SuccessReason->new("file MIME type is $mimetype"); } } sub match_virusfree ($$;@) { my $page=shift; my $wanted=shift; my %params=@_; my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page}); if (! defined $file) { return IkiWiki::ErrorReason->new("file does not exist"); } if (! exists $IkiWiki::config{virus_checker} || ! length $IkiWiki::config{virus_checker}) { return IkiWiki::ErrorReason->new("no virus_checker configured"); } # The file needs to be fed into the virus checker on stdin, # because the file is not world-readable, and if clamdscan is # used, clamd would fail to read it. eval q{use IPC::Open2}; error($@) if $@; open (IN, "<", $file) || return IkiWiki::ErrorReason->new("failed to read file"); binmode(IN); my $sigpipe=0; $SIG{PIPE} = sub { $sigpipe=1 }; my $pid=open2(\*CHECKER_OUT, "<&IN", $IkiWiki::config{virus_checker}); my $reason=; chomp $reason; 1 while (); close(CHECKER_OUT); waitpid $pid, 0; $SIG{PIPE}="DEFAULT"; if ($sigpipe || $?) { if (! length $reason) { $reason="virus checker $IkiWiki::config{virus_checker}; failed with no output"; } return IkiWiki::FailReason->new("file seems to contain a virus ($reason)"); } else { return IkiWiki::SuccessReason->new("file seems virusfree ($reason)"); } } sub match_ispage ($$;@) { my $filename=shift; if (defined IkiWiki::pagetype($filename)) { return IkiWiki::SuccessReason->new("file is a wiki page"); } else { return IkiWiki::FailReason->new("file is not a wiki page"); } } ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/htmltidy.pm0000644000000000000000000000271112211730000017105 0ustar #!/usr/bin/perl # HTML Tidy plugin # requires 'tidy' binary, found in Debian or http://tidy.sf.net/ # mostly a proof-of-concept on how to use external filters. # It is particularly useful when the html plugin is used. # # by Faidon Liambotis package IkiWiki::Plugin::htmltidy; use warnings; use strict; use IkiWiki 3.00; use IPC::Open2; sub import { hook(type => "getsetup", id => "tidy", call => \&getsetup); hook(type => "sanitize", id => "tidy", call => \&sanitize); hook(type => "checkconfig", id => "tidy", call => \&checkconfig); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, }, htmltidy => { type => "string", description => "tidy command line", safe => 0, # path rebuild => undef, }, } sub checkconfig () { if (! defined $config{htmltidy}) { $config{htmltidy}="tidy -quiet -asxhtml -utf8 --show-body-only yes --show-warnings no --tidy-mark no --markup yes"; } } sub sanitize (@) { my %params=@_; return $params{content} unless defined $config{htmltidy}; my $pid; my $sigpipe=0; $SIG{PIPE}=sub { $sigpipe=1 }; $pid=open2(*IN, *OUT, "$config{htmltidy} 2>/dev/null"); # open2 doesn't respect "use open ':utf8'" binmode (IN, ':utf8'); binmode (OUT, ':utf8'); print OUT $params{content}; close OUT; local $/ = undef; my $ret=; close IN; waitpid $pid, 0; $SIG{PIPE}="DEFAULT"; if ($sigpipe || ! defined $ret) { return gettext("htmltidy failed to parse this html"); } return $ret; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/trail.pm0000644000000000000000000002571512211730001016374 0ustar #!/usr/bin/perl # Copyright © 2008-2011 Joey Hess # Copyright © 2009-2012 Simon McVittie # Licensed under the GNU GPL, version 2, or any later version published by the # Free Software Foundation package IkiWiki::Plugin::trail; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "trail", call => \&getsetup); hook(type => "needsbuild", id => "trail", call => \&needsbuild); hook(type => "preprocess", id => "trailoptions", call => \&preprocess_trailoptions, scan => 1); hook(type => "preprocess", id => "trailitem", call => \&preprocess_trailitem, scan => 1); hook(type => "preprocess", id => "trailitems", call => \&preprocess_trailitems, scan => 1); hook(type => "preprocess", id => "traillink", call => \&preprocess_traillink, scan => 1); hook(type => "pagetemplate", id => "trail", call => \&pagetemplate); hook(type => "build_affected", id => "trail", call => \&build_affected); } # Page state # # If a page $T is a trail, then it can have # # * $pagestate{$T}{trail}{contents} # Reference to an array of lists each containing either: # - [pagenames => "page1", "page2"] # Those literal pages # - [link => "link"] # A link specification, pointing to the same page that [[link]] # would select # - [pagespec => "posts/*", "age", 0] # A match by pagespec; the third array element is the sort order # and the fourth is whether to reverse sorting # # * $pagestate{$T}{trail}{sort} # A sorting order; if absent or undef, the trail is in the order given # by the links that form it # # * $pagestate{$T}{trail}{circular} # True if this trail is circular (i.e. going "next" from the last item is # allowed, and takes you back to the first) # # * $pagestate{$T}{trail}{reverse} # True if C is to be reversed. # # If a page $M is a member of a trail $T, then it has # # * $pagestate{$M}{trail}{item}{$T}[0] # The page before this one in C<$T> at the last rebuild, or undef. # # * $pagestate{$M}{trail}{item}{$T}[1] # The page after this one in C<$T> at the last refresh, or undef. sub getsetup () { return plugin => { safe => 1, rebuild => undef, }, } # Cache of pages' old titles, so we can tell whether they changed my %old_trail_titles; sub needsbuild (@) { my $needsbuild=shift; foreach my $page (keys %pagestate) { if (exists $pagestate{$page}{trail}) { if (exists $pagesources{$page} && grep { $_ eq $pagesources{$page} } @$needsbuild) { # Remember its title, so we can know whether # it changed. $old_trail_titles{$page} = title_of($page); # Remove state, it will be re-added # if the preprocessor directive is still # there during the rebuild. {item} is the # only thing that's added for items, not # trails, and it's harmless to delete that - # the item is being rebuilt anyway. delete $pagestate{$page}{trail}; } } } return $needsbuild; } my $scanned = 0; sub preprocess_trailoptions (@) { my %params = @_; if (exists $params{circular}) { $pagestate{$params{page}}{trail}{circular} = IkiWiki::yesno($params{circular}); } if (exists $params{sort}) { $pagestate{$params{page}}{trail}{sort} = $params{sort}; } if (exists $params{reverse}) { $pagestate{$params{page}}{trail}{reverse} = $params{reverse}; } return ""; } sub preprocess_trailitem (@) { my $link = shift; shift; # avoid collecting everything in the preprocess stage if we already # did in the scan stage if (defined wantarray) { return "" if $scanned; } else { $scanned = 1; } my %params = @_; my $trail = $params{page}; $link = linkpage($link); add_link($params{page}, $link, 'trail'); push @{$pagestate{$params{page}}{trail}{contents}}, [link => $link]; return ""; } sub preprocess_trailitems (@) { my %params = @_; # avoid collecting everything in the preprocess stage if we already # did in the scan stage if (defined wantarray) { return "" if $scanned; } else { $scanned = 1; } # trail members from a pagespec ought to be in some sort of order, # and path is a nice obvious default $params{sort} = 'path' unless exists $params{sort}; $params{reverse} = 'no' unless exists $params{reverse}; if (exists $params{pages}) { push @{$pagestate{$params{page}}{trail}{contents}}, ["pagespec" => $params{pages}, $params{sort}, IkiWiki::yesno($params{reverse})]; } if (exists $params{pagenames}) { push @{$pagestate{$params{page}}{trail}{contents}}, [pagenames => (split ' ', $params{pagenames})]; } return ""; } sub preprocess_traillink (@) { my $link = shift; shift; my %params = @_; my $trail = $params{page}; $link =~ qr{ (?: ([^\|]+) # 1: link text \| # followed by | )? # optional (.+) # 2: page to link to }x; my $linktext = $1; $link = linkpage($2); add_link($params{page}, $link, 'trail'); # avoid collecting everything in the preprocess stage if we already # did in the scan stage my $already; if (defined wantarray) { $already = $scanned; } else { $scanned = 1; } push @{$pagestate{$params{page}}{trail}{contents}}, [link => $link] unless $already; if (defined $linktext) { $linktext = pagetitle($linktext); } if (exists $params{text}) { $linktext = $params{text}; } if (defined $linktext) { return htmllink($trail, $params{destpage}, $link, linktext => $linktext); } return htmllink($trail, $params{destpage}, $link); } # trail => [member1, member2] my %trail_to_members; # member => { trail => [prev, next] } # e.g. if %trail_to_members = ( # trail1 => ["member1", "member2"], # trail2 => ["member0", "member1"], # ) # # then $member_to_trails{member1} = { # trail1 => [undef, "member2"], # trail2 => ["member0", undef], # } my %member_to_trails; # member => 1 my %rebuild_trail_members; sub trails_differ { my ($old, $new) = @_; foreach my $trail (keys %$old) { if (! exists $new->{$trail}) { return 1; } if (exists $old_trail_titles{$trail} && title_of($trail) ne $old_trail_titles{$trail}) { return 1; } my ($old_p, $old_n) = @{$old->{$trail}}; my ($new_p, $new_n) = @{$new->{$trail}}; $old_p = "" unless defined $old_p; $old_n = "" unless defined $old_n; $new_p = "" unless defined $new_p; $new_n = "" unless defined $new_n; if ($old_p ne $new_p) { return 1; } if (exists $old_trail_titles{$old_p} && title_of($old_p) ne $old_trail_titles{$old_p}) { return 1; } if ($old_n ne $new_n) { return 1; } if (exists $old_trail_titles{$old_n} && title_of($old_n) ne $old_trail_titles{$old_n}) { return 1; } } foreach my $trail (keys %$new) { if (! exists $old->{$trail}) { return 1; } } return 0; } my $done_prerender = 0; sub prerender { return if $done_prerender; %trail_to_members = (); %member_to_trails = (); foreach my $trail (keys %pagestate) { next unless exists $pagestate{$trail}{trail}{contents}; my $members = []; my @contents = @{$pagestate{$trail}{trail}{contents}}; foreach my $c (@contents) { if ($c->[0] eq 'pagespec') { push @$members, pagespec_match_list($trail, $c->[1], sort => $c->[2], reverse => $c->[3]); } elsif ($c->[0] eq 'pagenames') { my @pagenames = @$c; shift @pagenames; foreach my $page (@pagenames) { if (exists $pagesources{$page}) { push @$members, $page; } else { # rebuild trail if it turns up add_depends($trail, $page, deptype("presence")); } } } elsif ($c->[0] eq 'link') { my $best = bestlink($trail, $c->[1]); push @$members, $best if length $best; } } if (defined $pagestate{$trail}{trail}{sort}) { # re-sort @$members = pagespec_match_list($trail, 'internal(*)', list => $members, sort => $pagestate{$trail}{trail}{sort}); } if (IkiWiki::yesno $pagestate{$trail}{trail}{reverse}) { @$members = reverse @$members; } # uniquify my %seen; my @tmp; foreach my $member (@$members) { push @tmp, $member unless $seen{$member}; $seen{$member} = 1; } $members = [@tmp]; for (my $i = 0; $i <= $#$members; $i++) { my $member = $members->[$i]; my $prev; $prev = $members->[$i - 1] if $i > 0; my $next = $members->[$i + 1]; $member_to_trails{$member}{$trail} = [$prev, $next]; } if ((scalar @$members) > 1 && $pagestate{$trail}{trail}{circular}) { $member_to_trails{$members->[0]}{$trail}[0] = $members->[$#$members]; $member_to_trails{$members->[$#$members]}{$trail}[1] = $members->[0]; } $trail_to_members{$trail} = $members; } foreach my $member (keys %pagestate) { if (exists $pagestate{$member}{trail}{item} && ! exists $member_to_trails{$member}) { $rebuild_trail_members{$member} = 1; delete $pagestate{$member}{trail}{item}; } } foreach my $member (keys %member_to_trails) { if (! exists $pagestate{$member}{trail}{item}) { $rebuild_trail_members{$member} = 1; } else { if (trails_differ($pagestate{$member}{trail}{item}, $member_to_trails{$member})) { $rebuild_trail_members{$member} = 1; } } $pagestate{$member}{trail}{item} = $member_to_trails{$member}; } $done_prerender = 1; } sub build_affected { my %affected; # In principle we might not have done this yet, although in practice # at least the trail itself has probably changed, and its template # almost certainly contains TRAILS or TRAILLOOP, triggering our # prerender as a side-effect. prerender(); foreach my $member (keys %rebuild_trail_members) { $affected{$member} = sprintf(gettext("building %s, its previous or next page has changed"), $member); } return %affected; } sub title_of ($) { my $page = shift; if (defined ($pagestate{$page}{meta}{title})) { return $pagestate{$page}{meta}{title}; } return pagetitle(IkiWiki::basename($page)); } my $recursive = 0; sub pagetemplate (@) { my %params = @_; my $page = $params{page}; my $template = $params{template}; return unless length $page; if ($template->query(name => 'trails') && ! $recursive) { prerender(); $recursive = 1; my $inner = template("trails.tmpl", blind_cache => 1); IkiWiki::run_hooks(pagetemplate => sub { shift->(%params, template => $inner) }); $template->param(trails => $inner->output); $recursive = 0; } if ($template->query(name => 'trailloop')) { prerender(); my @trails; # sort backlinks by page name to have a consistent order foreach my $trail (sort keys %{$member_to_trails{$page}}) { my $members = $trail_to_members{$trail}; my ($prev, $next) = @{$member_to_trails{$page}{$trail}}; my ($prevurl, $nexturl, $prevtitle, $nexttitle); if (defined $prev) { $prevurl = urlto($prev, $page); $prevtitle = title_of($prev); } if (defined $next) { $nexturl = urlto($next, $page); $nexttitle = title_of($next); } push @trails, { prevpage => $prev, prevtitle => $prevtitle, prevurl => $prevurl, nextpage => $next, nexttitle => $nexttitle, nexturl => $nexturl, trailpage => $trail, trailtitle => title_of($trail), trailurl => urlto($trail, $page), }; } $template->param(trailloop => \@trails); } } 1; ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/haiku.pm0000644000000000000000000000207212211730001016351 0ustar #!/usr/bin/perl # haiku generator plugin package IkiWiki::Plugin::haiku; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "haiku", call => \&getsetup); hook(type => "preprocess", id => "haiku", call => \&preprocess); } sub getsetup { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preprocess (@) { my %params=@_; my $haiku; eval q{use Coy}; if ($@ || ! Coy->can("Coy::with_haiku")) { my @canned=( "The lack of a Coy: No darting, subtle haiku. Instead, canned tuna. ", "apt-get install Coy no, wait, that's not quite it instead: libcoy-perl ", "Coyly I'll do it, no code, count Five-Seven-Five to make a haiku. ", ); $haiku=$canned[rand @canned]; } else { $haiku=Coy::with_haiku($params{hint} ? $params{hint} : $params{page}); # trim off other text $haiku=~s/\s+-----\n//s; $haiku=~s/\s+-----.*//s; } $haiku=~s/^\s+//mg; $haiku=~s/\n/
\n/mg; return "\n\n

$haiku

\n\n"; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/otl.pm0000644000000000000000000000355012211730000016047 0ustar #!/usr/bin/perl # outline markup package IkiWiki::Plugin::otl; use warnings; use strict; use IkiWiki 3.00; use open qw{:utf8 :std}; sub import { hook(type => "getsetup", id => "otl", call => \&getsetup); hook(type => "htmlize", id => "otl", call => \&htmlize); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, # format plugin section => "format", }, } sub htmlize (@) { my %params=@_; # Munge up check boxes to look a little bit better. my $checked=htmllink($params{page}, $params{page}, "smileys/star_on.png", linktext => "[X]"); my $unchecked=htmllink($params{page}, $params{page}, "smileys/star_off.png", linktext => "[_]"); $params{content}=~s/^(\s*)\[X\]\s/${1}$checked /mg; $params{content}=~s/^(\s*)\[_\]\s/${1}$unchecked /mg; # Can't use open2 since otl2html doesn't play nice with buffering. # Instead, fork off a child process that will run otl2html and feed # it the content. Then read otl2html's response. my $tries=10; my $pid; do { $pid = open(KID_TO_READ, "-|"); unless (defined $pid) { $tries--; if ($tries < 1) { debug("failed to fork: $@"); return $params{content}; } } } until defined $pid; if (! $pid) { $tries=10; $pid=undef; do { $pid = open(KID_TO_WRITE, "|-"); unless (defined $pid) { $tries--; if ($tries < 1) { debug("failed to fork: $@"); print $params{content}; exit; } } } until defined $pid; if (! $pid) { if (! exec 'otl2html', '-S', '/dev/null', '-T', '/dev/stdin') { debug("failed to run otl2html: $@"); print $params{content}; exit; } } print KID_TO_WRITE $params{content}; close KID_TO_WRITE; waitpid $pid, 0; exit; } local $/ = undef; my $ret=; close KID_TO_READ; waitpid $pid, 0; $ret=~s/.*//s; $ret=~s/.*//s; $ret=~s/\n"; } return $res; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/listdirectives.pm0000644000000000000000000000460312211730001020307 0ustar #!/usr/bin/perl # Ikiwiki listdirectives plugin. package IkiWiki::Plugin::listdirectives; use warnings; use strict; use IkiWiki 3.00; sub import { add_underlay("directives"); hook(type => "getsetup", id => "listdirectives", call => \&getsetup); hook(type => "checkconfig", id => "listdirectives", call => \&checkconfig); hook(type => "needsbuild", id => "listdirectives", call => \&needsbuild); hook(type => "preprocess", id => "listdirectives", call => \&preprocess); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, directive_description_dir => { type => "string", description => "directory in srcdir that contains directive descriptions", example => "ikiwiki/directive", safe => 1, rebuild => 1, }, } my @fulllist; my @shortlist; my $pluginstring; sub checkconfig () { if (! defined $config{directive_description_dir}) { $config{directive_description_dir} = "ikiwiki/directive"; } else { $config{directive_description_dir} =~ s/\/+$//; } } sub needsbuild (@) { my $needsbuild=shift; @fulllist = grep { ! /^_/ } sort keys %{$IkiWiki::hooks{preprocess}}; @shortlist = grep { ! $IkiWiki::hooks{preprocess}{$_}{shortcut} } @fulllist; $pluginstring = join(' ', @shortlist) . " : " . join(' ', @fulllist); foreach my $page (keys %pagestate) { if (exists $pagestate{$page}{listdirectives}{shown}) { if ($pagestate{$page}{listdirectives}{shown} ne $pluginstring) { push @$needsbuild, $pagesources{$page}; } if (exists $pagesources{$page} && grep { $_ eq $pagesources{$page} } @$needsbuild) { # remove state, will be re-added if # the [[!listdirectives]] is still there during the # rebuild delete $pagestate{$page}{listdirectives}{shown}; } } } return $needsbuild; } sub preprocess (@) { my %params=@_; $pagestate{$params{destpage}}{listdirectives}{shown}=$pluginstring; my @pluginlist; if (defined $params{generated}) { @pluginlist = @fulllist; } else { @pluginlist = @shortlist; } my $result = '
    '; foreach my $plugin (@pluginlist) { $result .= '
  • '; my $link=linkpage($config{directive_description_dir}."/".$plugin); add_depends($params{page}, $link, deptype("presence")); $result .= htmllink($params{page}, $params{destpage}, $link); $result .= '
  • '; } $result .= "
"; return $result; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/external.pm0000644000000000000000000001311612211730000017072 0ustar #!/usr/bin/perl # Support for external plugins written in other languages. # Communication via XML RPC to a pipe. # See externaldemo for an example of a plugin that uses this. package IkiWiki::Plugin::external; use warnings; use strict; use IkiWiki 3.00; use RPC::XML; use IPC::Open2; use IO::Handle; my %plugins; sub import { my $self=shift; my $plugin=shift; return unless defined $plugin; my ($plugin_read, $plugin_write); my $pid = open2($plugin_read, $plugin_write, IkiWiki::possibly_foolish_untaint($plugin)); # open2 doesn't respect "use open ':utf8'" binmode($plugin_read, ':utf8'); binmode($plugin_write, ':utf8'); $plugins{$plugin}={in => $plugin_read, out => $plugin_write, pid => $pid, accum => ""}; $RPC::XML::ENCODING="utf-8"; $RPC::XML::FORCE_STRING_ENCODING="true"; rpc_call($plugins{$plugin}, "import"); } sub rpc_write ($$) { my $fh=shift; my $string=shift; $fh->print($string."\n"); $fh->flush; } sub rpc_call ($$;@) { my $plugin=shift; my $command=shift; # send the command my $req=RPC::XML::request->new($command, @_); rpc_write($plugin->{out}, $req->as_string); # process incoming rpc until a result is available while ($_ = $plugin->{in}->getline) { $plugin->{accum}.=$_; while ($plugin->{accum} =~ /^\s*(<\?xml\s.*?<\/(?:methodCall|methodResponse)>)\n(.*)/s) { $plugin->{accum}=$2; my $parser; eval q{ use RPC::XML::ParserFactory; $parser = RPC::XML::ParserFactory->new; }; if ($@) { # old interface eval q{ use RPC::XML::Parser; $parser = RPC::XML::Parser->new; }; } my $r=$parser->parse($1); error("XML RPC parser failure: $r") unless ref $r; if ($r->isa('RPC::XML::response')) { my $value=$r->value; if ($r->is_fault($value)) { # throw the error as best we can print STDERR $value->string."\n"; return ""; } elsif ($value->isa('RPC::XML::array')) { return @{$value->value}; } elsif ($value->isa('RPC::XML::struct')) { my %hash=%{$value->value}; # XML-RPC v1 does not allow for # nil/null/None/undef values to be # transmitted. The extension # is the right fix, but for # back-compat, let external plugins send # a hash with one key "null" pointing # to an empty string. if (exists $hash{null} && $hash{null} eq "" && int(keys(%hash)) == 1) { return undef; } return %hash; } else { return $value->value; } } my $name=$r->name; my @args=map { $_->value } @{$r->args}; # When dispatching a function, first look in # IkiWiki::RPC::XML. This allows overriding # IkiWiki functions with RPC friendly versions. my $ret; if (exists $IkiWiki::RPC::XML::{$name}) { $ret=$IkiWiki::RPC::XML::{$name}($plugin, @args); } elsif (exists $IkiWiki::{$name}) { $ret=$IkiWiki::{$name}(@args); } else { error("XML RPC call error, unknown function: $name"); } # XML-RPC v1 does not allow for nil/null/None/undef # values to be transmitted, so until XML::RPC::Parser # honours v2 (), send a hash with one key "null" # pointing to an empty string. if (! defined $ret) { $ret={"null" => ""}; } my $string=eval { RPC::XML::response->new($ret)->as_string }; if ($@ && ref $ret) { # One common reason for serialisation to # fail is a complex return type that cannot # be represented as an XML RPC response. # Handle this case by just returning 1. $string=eval { RPC::XML::response->new(1)->as_string }; } if ($@) { error("XML response serialisation failed: $@"); } rpc_write($plugin->{out}, $string); } } return undef; } package IkiWiki::RPC::XML; use Memoize; sub getvar ($$$) { my $plugin=shift; my $varname="IkiWiki::".shift; my $key=shift; no strict 'refs'; my $ret=$varname->{$key}; use strict 'refs'; return $ret; } sub setvar ($$$;@) { my $plugin=shift; my $varname="IkiWiki::".shift; my $key=shift; my $value=shift; no strict 'refs'; my $ret=$varname->{$key}=$value; use strict 'refs'; return $ret; } sub getstate ($$$$) { my $plugin=shift; my $page=shift; my $id=shift; my $key=shift; return $IkiWiki::pagestate{$page}{$id}{$key}; } sub setstate ($$$$;@) { my $plugin=shift; my $page=shift; my $id=shift; my $key=shift; my $value=shift; return $IkiWiki::pagestate{$page}{$id}{$key}=$value; } sub getargv ($) { my $plugin=shift; return \@ARGV; } sub setargv ($@) { my $plugin=shift; my $array=shift; @ARGV=@$array; } sub inject ($@) { # Bind a given perl function name to a particular RPC request. my $plugin=shift; my %params=@_; if (! exists $params{name} || ! exists $params{call}) { die "inject needs name and call parameters"; } my $sub = sub { IkiWiki::Plugin::external::rpc_call($plugin, $params{call}, @_) }; $sub=memoize($sub) if $params{memoize}; # This will add it to the symbol table even if not present. no warnings; eval qq{*$params{name}=\$sub}; use warnings; # This will ensure that everywhere it was exported to sees # the injected version. IkiWiki::inject(name => $params{name}, call => $sub); return 1; } sub hook ($@) { # the call parameter is a function name to call, since XML RPC # cannot pass a function reference my $plugin=shift; my %params=@_; my $callback=$params{call}; delete $params{call}; IkiWiki::hook(%params, call => sub { IkiWiki::Plugin::external::rpc_call($plugin, $callback, @_); }); } sub pagespec_match ($@) { # convert return object into a XML RPC boolean my $plugin=shift; my $page=shift; my $spec=shift; return RPC::XML::boolean->new(0 + IkiWiki::pagespec_match( $page, $spec, @_)); } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/theme.pm0000644000000000000000000000336412211730001016357 0ustar #!/usr/bin/perl package IkiWiki::Plugin::theme; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "theme", call => \&getsetup); hook(type => "checkconfig", id => "theme", call => \&checkconfig); hook(type => "needsbuild", id => "theme", call => \&needsbuild); hook(type => "pagetemplate", id => "theme", call => \&pagetemplate); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "web", }, theme => { type => "string", example => "actiontabs", description => "name of theme to enable", safe => 1, rebuild => 0, }, } my $added=0; sub checkconfig () { if (! $added && exists $config{theme} && $config{theme} =~ /^\w+$/) { add_underlay("themes/".$config{theme}); $added=1; } } sub needsbuild ($) { my $needsbuild=shift; if (($config{theme} || '') ne ($wikistate{theme}{currenttheme} || '')) { # theme changed; ensure all files in the theme are built my %needsbuild=map { $_ => 1 } @$needsbuild; if ($config{theme}) { foreach my $file (glob("$config{underlaydirbase}/themes/$config{theme}/*")) { if (-f $file) { my $f=IkiWiki::basename($file); push @$needsbuild, $f unless $needsbuild{$f}; } } } elsif ($wikistate{theme}{currenttheme}) { foreach my $file (glob("$config{underlaydirbase}/themes/$wikistate{theme}{currenttheme}/*")) { my $f=IkiWiki::basename($file); if (-f $file && defined eval { srcfile($f) }) { push @$needsbuild, $f; } } } $wikistate{theme}{currenttheme}=$config{theme}; } return $needsbuild; } sub pagetemplate (@) { my %params=@_; my $template=$params{template}; if (exists $config{theme} && length $config{theme}) { $template->param("theme_$config{theme}" => 1); } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/testpagespec.pm0000644000000000000000000000136012211730001017736 0ustar #!/usr/bin/perl package IkiWiki::Plugin::testpagespec; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "testpagespec", call => \&getsetup); hook(type => "preprocess", id => "testpagespec", call => \&preprocess); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, }, } sub preprocess (@) { my %params=@_; foreach my $param (qw{match pagespec}) { if (! exists $params{$param}) { error sprintf(gettext("%s parameter is required"), $param); } } add_depends($params{page}, $params{pagespec}); my $ret=pagespec_match($params{match}, $params{pagespec}, location => $params{page}); if ($ret) { return "match: $ret"; } else { return "no match: $ret"; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/graphviz.pm0000644000000000000000000000742112211730001017105 0ustar #!/usr/bin/perl # graphviz plugin for ikiwiki: render graphviz source as an image. # Josh Triplett package IkiWiki::Plugin::graphviz; use warnings; use strict; use IkiWiki 3.00; use IPC::Open2; sub import { hook(type => "getsetup", id => "graphviz", call => \&getsetup); hook(type => "needsbuild", id => "version", call => \&needsbuild); hook(type => "preprocess", id => "graph", call => \&graph, scan => 1); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } my %graphviz_programs = ( "dot" => 1, "neato" => 1, "fdp" => 1, "twopi" => 1, "circo" => 1 ); sub needsbuild { my $needsbuild=shift; foreach my $page (keys %pagestate) { if (exists $pagestate{$page}{graph} && exists $pagesources{$page} && grep { $_ eq $pagesources{$page} } @$needsbuild) { # remove state, will be re-added if # the graph is still there during the rebuild delete $pagestate{$page}{graph}; } } return $needsbuild; } sub render_graph (\%) { my %params = %{(shift)}; my $src = "charset=\"utf-8\";\n"; $src .= "ratio=compress;\nsize=\"".($params{width}+0).", ".($params{height}+0)."\";\n" if defined $params{width} and defined $params{height}; $src .= $params{src}; $src .= "}\n"; # Use the sha1 of the graphviz code as part of its filename, # and as a unique identifier for its imagemap. eval q{use Digest::SHA}; error($@) if $@; my $sha=IkiWiki::possibly_foolish_untaint(Digest::SHA::sha1_hex($params{type}.$src)); $src = "$params{type} graph$sha {\n".$src; my $dest=$params{page}."/graph-".$sha.".png"; will_render($params{page}, $dest); my $map=$pagestate{$params{destpage}}{graph}{$sha}; if (! -e "$config{destdir}/$dest" || ! defined $map) { # Use ikiwiki's function to create the image file, this makes # sure needed subdirs are there and does some sanity checking. writefile($dest, $config{destdir}, ""); my $pid; my $sigpipe=0; $SIG{PIPE}=sub { $sigpipe=1 }; $pid=open2(*IN, *OUT, "$params{prog} -Tpng -o '$config{destdir}/$dest' -Tcmapx"); # open2 doesn't respect "use open ':utf8'" binmode (IN, ':utf8'); binmode (OUT, ':utf8'); print OUT $src; close OUT; local $/ = undef; $map=$pagestate{$params{destpage}}{graph}{$sha}=; close IN; waitpid $pid, 0; $SIG{PIPE}="DEFAULT"; error gettext("failed to run graphviz") if ($sigpipe || $?); } return "\n". $map; } sub graph (@) { my %params=@_; # Support wikilinks in the graph source. my $src=$params{src}; $src="" unless defined $src; $src=IkiWiki::linkify($params{page}, $params{destpage}, $params{src}); return unless defined wantarray; # scan mode short-circuit if ($src ne $params{src}) { # linkify makes html links, but graphviz wants plain # urls. This is, frankly a hack: Process source as html, # throw out everything inside tags that is not a href. my $s; my $nested=0; use HTML::Parser; error $@ if $@; my $p=HTML::Parser->new(api_version => 3); $p->handler(start => sub { my %attrs=%{shift()}; if (exists $attrs{href}) { if ($s=~/href\s*=\s*"$/) { $s.=$attrs{href}; } elsif ($s=~/href\s*=\s*$/) { $s.="\"$attrs{href}\""; } else { $s.="href=\"$attrs{href}\""; } } $nested++; }, "attr"); $p->handler(end => sub { $nested--; }); $p->handler(default => sub { $s.=join("", @_) unless $nested; }, "text"); $p->parse($src); $p->eof; $s=~s/\[ href= \]//g; # handle self-links $params{src}=$s; } else { $params{src}=$src; } $params{type} = "digraph" unless defined $params{type}; $params{prog} = "dot" unless defined $params{prog}; error gettext("prog not a valid graphviz program") unless $graphviz_programs{$params{prog}}; return render_graph(%params); } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/pagecount.pm0000644000000000000000000000155712211730001017244 0ustar #!/usr/bin/perl package IkiWiki::Plugin::pagecount; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "pagecount", call => \&getsetup); hook(type => "preprocess", id => "pagecount", call => \&preprocess); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preprocess (@) { my %params=@_; my $pages=defined $params{pages} ? $params{pages} : "*"; # Just get a list of all the pages, and count the items in it. # Use a presence dependency to only update when pages are added # or removed. if ($pages eq '*') { # optimisation to avoid needing to try matching every page add_depends($params{page}, $pages, deptype("presence")); return scalar keys %pagesources; } return scalar pagespec_match_list($params{page}, $pages, deptype => deptype("presence")); } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/monotone.pm0000644000000000000000000005205512211730001017114 0ustar #!/usr/bin/perl package IkiWiki::Plugin::monotone; use warnings; use strict; use IkiWiki; use Monotone; use Date::Parse qw(str2time); use Date::Format qw(time2str); use URI::Escape q{uri_escape_utf8}; my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate sha1sums my $mtn_version = undef; sub import { hook(type => "checkconfig", id => "monotone", call => \&checkconfig); hook(type => "getsetup", id => "monotone", call => \&getsetup); hook(type => "rcs", id => "rcs_update", call => \&rcs_update); hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); hook(type => "rcs", id => "rcs_add", call => \&rcs_add); hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime); } sub checkconfig () { if (!defined($config{mtnrootdir})) { $config{mtnrootdir} = $config{srcdir}; } if (! -d "$config{mtnrootdir}/_MTN") { error("Ikiwiki srcdir does not seem to be a Monotone workspace (or set the mtnrootdir)!"); } my $child = open(MTN, "-|"); if (! $child) { open STDERR, ">/dev/null"; exec("mtn", "version") || error("mtn version failed to run"); } while () { if (/^monotone (\d+\.\d+)(?:(?:\.\d+){0,2}|dev)? /) { $mtn_version=$1; } } close MTN || debug("mtn version exited $?"); if (!defined($mtn_version)) { error("Cannot determine monotone version"); } if ($mtn_version < 0.38) { error("Monotone version too old, is $mtn_version but required 0.38"); } if (defined $config{mtn_wrapper} && length $config{mtn_wrapper}) { push @{$config{wrappers}}, { wrapper => $config{mtn_wrapper}, wrappermode => (defined $config{mtn_wrappermode} ? $config{mtn_wrappermode} : "06755"), }; } } sub getsetup () { return plugin => { safe => 0, # rcs plugin rebuild => undef, section => "rcs", }, mtn_wrapper => { type => "string", example => "/srv/mtn/wiki/_MTN/ikiwiki-netsync-hook", description => "monotone netsync hook to generate", safe => 0, # file rebuild => 0, }, mtn_wrappermode => { type => "string", example => '06755', description => "mode for mtn_wrapper (can safely be made suid)", safe => 0, rebuild => 0, }, mtnkey => { type => "string", example => 'web@example.com', description => "your monotone key", safe => 1, rebuild => 0, }, historyurl => { type => "string", example => "http://viewmtn.example.com/branch/head/filechanges/com.example.branch/[[file]]", description => "viewmtn url to show file history ([[file]] substituted)", safe => 1, rebuild => 1, }, diffurl => { type => "string", example => "http://viewmtn.example.com/revision/diff/[[r1]]/with/[[r2]]/[[file]]", description => "viewmtn url to show a diff ([[r1]], [[r2]], and [[file]] substituted)", safe => 1, rebuild => 1, }, mtnsync => { type => "boolean", example => 0, description => "sync on update and commit?", safe => 0, # paranoia rebuild => 0, }, mtnrootdir => { type => "string", description => "path to your workspace (defaults to the srcdir; specify if the srcdir is a subdirectory of the workspace)", safe => 0, # path rebuild => 0, }, } sub get_rev () { my $sha1 = `mtn --root=$config{mtnrootdir} automate get_base_revision_id`; ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now if (! $sha1) { debug("Unable to get base revision for '$config{srcdir}'.") } return $sha1; } sub get_rev_auto ($) { my $automator=shift; my @results = $automator->call("get_base_revision_id"); my $sha1 = $results[0]; ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now if (! $sha1) { debug("Unable to get base revision for '$config{srcdir}'.") } return $sha1; } sub mtn_merge ($$$$) { my $leftRev=shift; my $rightRev=shift; my $branch=shift; my $author=shift; my $mergeRev; my $child = open(MTNMERGE, "-|"); if (! $child) { open STDERR, ">&STDOUT"; exec("mtn", "--root=$config{mtnrootdir}", "explicit_merge", $leftRev, $rightRev, $branch, "--author", $author, "--key", $config{mtnkey}) || error("mtn merge failed to run"); } while () { if (/^mtn.\s.merged.\s($sha1_pattern)$/) { $mergeRev=$1; } } close MTNMERGE || return undef; debug("merged $leftRev, $rightRev to make $mergeRev"); return $mergeRev; } sub commit_file_to_new_rev ($$$$$$$$) { my $automator=shift; my $wsfilename=shift; my $oldFileID=shift; my $newFileContents=shift; my $oldrev=shift; my $branch=shift; my $author=shift; my $message=shift; #store the file my ($out, $err) = $automator->call("put_file", $oldFileID, $newFileContents); my ($newFileID) = ($out =~ m/^($sha1_pattern)$/); error("Failed to store file data for $wsfilename in repository") if (! defined $newFileID || length $newFileID != 40); # get the mtn filename rather than the workspace filename ($out, $err) = $automator->call("get_corresponding_path", $oldrev, $wsfilename, $oldrev); my ($filename) = ($out =~ m/^file "(.*)"$/); error("Couldn't find monotone repository path for file $wsfilename") if (! $filename); debug("Converted ws filename of $wsfilename to repos filename of $filename"); # then stick in a new revision for this file my $manifest = "format_version \"1\"\n\n". "new_manifest [0000000000000000000000000000000000000000]\n\n". "old_revision [$oldrev]\n\n". "patch \"$filename\"\n". " from [$oldFileID]\n". " to [$newFileID]\n"; ($out, $err) = $automator->call("put_revision", $manifest); my ($newRevID) = ($out =~ m/^($sha1_pattern)$/); error("Unable to make new monotone repository revision") if (! defined $newRevID || length $newRevID != 40); debug("put revision: $newRevID"); # now we need to add certs for this revision... # author, branch, changelog, date $automator->call("cert", $newRevID, "author", $author); $automator->call("cert", $newRevID, "branch", $branch); $automator->call("cert", $newRevID, "changelog", $message); $automator->call("cert", $newRevID, "date", time2str("%Y-%m-%dT%T", time, "UTC")); debug("Added certs for rev: $newRevID"); return $newRevID; } sub read_certs ($$) { my $automator=shift; my $rev=shift; my @results = $automator->call("certs", $rev); my @ret; my $line = $results[0]; while ($line =~ m/\s+key\s["\[](.*?)[\]"]\nsignature\s"(ok|bad|unknown)"\n\s+name\s"(.*?)"\n\s+value\s"(.*?)"\n\s+trust\s"(trusted|untrusted)"\n/sg) { push @ret, { key => $1, signature => $2, name => $3, value => $4, trust => $5, }; } return @ret; } sub get_changed_files ($$) { my $automator=shift; my $rev=shift; my @results = $automator->call("get_revision", $rev); my $changes=$results[0]; my @ret; my %seen = (); # we need to strip off the relative path to the source dir # because monotone outputs all file paths absolute according # to the workspace root my $rel_src_dir = $config{'srcdir'}; $rel_src_dir =~ s/^\Q$config{'mtnrootdir'}\E\/?//; $rel_src_dir .= "/" if length $rel_src_dir; while ($changes =~ m/\s*(add_file|patch|delete|rename)\s"(.*?)(?param("name")) { return "Web user: " . $params{session}->param("name"); } elsif (defined $params{session}->remote_addr()) { return "Web IP: " . $params{session}->remote_addr(); } } return "Web: Anonymous"; } sub rcs_commit (@) { # Tries to commit the page; returns undef on _success_ and # a version of the page with the rcs's conflict markers on failure. # The file is relative to the srcdir. my %params=@_; my $author=IkiWiki::possibly_foolish_untaint(commitauthor(%params)), chdir $config{srcdir} or error("Cannot chdir to $config{srcdir}: $!"); my ($oldrev) = $params{token} =~ m/^($sha1_pattern)$/; # untaint my $rev = get_rev(); if (defined $rev && defined $oldrev && $rev ne $oldrev) { my $automator = Monotone->new(); $automator->open_args("--root", $config{mtnrootdir}, "--key", $config{mtnkey}); # Something has been committed, has this file changed? my ($out, $err); $automator->setOpts("r", $oldrev, "r", $rev); ($out, $err) = $automator->call("content_diff", $params{file}); debug("Problem committing $params{file}") if ($err ne ""); my $diff = $out; if ($diff) { # Commit a revision with just this file changed off # the old revision. # # first get the contents debug("File changed: forming branch"); my $newfile=readfile("$config{srcdir}/$params{file}"); # then get the old content ID from the diff if ($diff !~ m/^---\s$params{file}\s+($sha1_pattern)$/m) { error("Unable to find previous file ID for $params{file}"); } my $oldFileID = $1; # get the branch we're working in ($out, $err) = $automator->call("get_option", "branch"); chomp $out; error("Illegal branch name in monotone workspace") if ($out !~ m/^([-\@\w\.]+)$/); my $branch = $1; # then put the new content into the DB (and record the new content ID) my $newRevID = commit_file_to_new_rev($automator, $params{file}, $oldFileID, $newfile, $oldrev, $branch, $author, $params{message}); $automator->close(); # if we made it to here then the file has been committed... revert the local copy if (system("mtn", "--root=$config{mtnrootdir}", "revert", $params{file}) != 0) { debug("Unable to revert $params{file} after merge on conflicted commit!"); } debug("Divergence created! Attempting auto-merge."); # see if it will merge cleanly $ENV{MTN_MERGE}="fail"; my $mergeResult = mtn_merge($newRevID, $rev, $branch, $author); $ENV{MTN_MERGE}=""; # push any changes so far if (defined($config{mtnsync}) && $config{mtnsync}) { if (system("mtn", "--root=$config{mtnrootdir}", "push", "--quiet", "--ticker=none", "--key", $config{mtnkey}) != 0) { debug("monotone push failed"); } } if (defined($mergeResult)) { # everything is merged - bring outselves up to date if (system("mtn", "--root=$config{mtnrootdir}", "update", "-r", $mergeResult) != 0) { debug("Unable to update to rev $mergeResult after merge on conflicted commit!"); } } else { debug("Auto-merge failed. Using diff-merge to add conflict markers."); $ENV{MTN_MERGE}="diffutils"; $ENV{MTN_MERGE_DIFFUTILS}="partial=true"; $mergeResult = mtn_merge($newRevID, $rev, $branch, $author); $ENV{MTN_MERGE}=""; $ENV{MTN_MERGE_DIFFUTILS}=""; if (!defined($mergeResult)) { debug("Unable to insert conflict markers!"); error("Your commit succeeded. Unfortunately, someone else committed something to the same ". "part of the wiki at the same time. Both versions are stored in the monotone repository, ". "but at present the different versions cannot be reconciled through the web interface. ". "Please use the non-web interface to resolve the conflicts."); } if (system("mtn", "--root=$config{mtnrootdir}", "update", "-r", $mergeResult) != 0) { debug("Unable to update to rev $mergeResult after conflict-enhanced merge on conflicted commit!"); } # return "conflict enhanced" file to the user # for cleanup note, this relies on the fact # that ikiwiki seems to call rcs_prepedit() # again after we return return readfile("$config{srcdir}/$params{file}"); } return undef; } $automator->close(); } # If we reached here then the file we're looking at hasn't changed # since $oldrev. Commit it. if (system("mtn", "--root=$config{mtnrootdir}", "commit", "--quiet", "--author", $author, "--key", $config{mtnkey}, "-m", IkiWiki::possibly_foolish_untaint($params{message}), $params{file}) != 0) { debug("Traditional commit failed! Returning data as conflict."); my $conflict=readfile("$config{srcdir}/$params{file}"); if (system("mtn", "--root=$config{mtnrootdir}", "revert", "--quiet", $params{file}) != 0) { debug("monotone revert failed"); } return $conflict; } if (defined($config{mtnsync}) && $config{mtnsync}) { if (system("mtn", "--root=$config{mtnrootdir}", "push", "--quiet", "--ticker=none", "--key", $config{mtnkey}) != 0) { debug("monotone push failed"); } } return undef # success } sub rcs_commit_staged (@) { # Commits all staged changes. Changes can be staged using rcs_add, # rcs_remove, and rcs_rename. my %params=@_; # Note - this will also commit any spurious changes that happen to be # lying around in the working copy. There shouldn't be any, but... chdir $config{srcdir} or error("Cannot chdir to $config{srcdir}: $!"); if (system("mtn", "--root=$config{mtnrootdir}", "commit", "--quiet", "--author", IkiWiki::possibly_foolish_untaint(commitauthor(%params)), "--key", $config{mtnkey}, "-m", IkiWiki::possibly_foolish_untaint($params{message})) != 0) { error("Monotone commit failed"); } } sub rcs_add ($) { my $file=shift; chdir $config{srcdir} or error("Cannot chdir to $config{srcdir}: $!"); if (system("mtn", "--root=$config{mtnrootdir}", "add", "--quiet", $file) != 0) { error("Monotone add failed"); } } sub rcs_remove ($) { my $file = shift; chdir $config{srcdir} or error("Cannot chdir to $config{srcdir}: $!"); # Note: it is difficult to undo a remove in Monotone at the moment. # Until this is fixed, it might be better to make 'rm' move things # into an attic, rather than actually remove them. # To resurrect a file, you currently add a new file with the contents # you want it to have. This loses all connectivity and automated # merging with the 'pre-delete' versions of the file. if (system("mtn", "--root=$config{mtnrootdir}", "rm", "--quiet", $file) != 0) { error("Monotone remove failed"); } } sub rcs_rename ($$) { my ($src, $dest) = @_; chdir $config{srcdir} or error("Cannot chdir to $config{srcdir}: $!"); if (system("mtn", "--root=$config{mtnrootdir}", "rename", "--quiet", $src, $dest) != 0) { error("Monotone rename failed"); } } sub rcs_recentchanges ($) { my $num=shift; my @ret; chdir $config{srcdir} or error("Cannot chdir to $config{srcdir}: $!"); # use log --brief to get a list of revs, as this # gives the results in a nice order # (otherwise we'd have to do our own date sorting) my @revs; my $child = open(MTNLOG, "-|"); if (! $child) { exec("mtn", "log", "--root=$config{mtnrootdir}", "--no-graph", "--brief", "--last=$num") || error("mtn log failed to run"); } while (my $line = ) { if ($line =~ m/^($sha1_pattern)/) { push @revs, $1; } } close MTNLOG || debug("mtn log exited $?"); my $automator = Monotone->new(); $automator->open(undef, $config{mtnrootdir}); while (@revs != 0) { my $rev = shift @revs; # first go through and figure out the messages, etc my $certs = [read_certs($automator, $rev)]; my $user; my $when; my $committype; my (@pages, @message); foreach my $cert (@$certs) { if ($cert->{signature} eq "ok" && $cert->{trust} eq "trusted") { if ($cert->{name} eq "author") { $user = $cert->{value}; # detect the source of the commit # from the changelog if ($cert->{key} eq $config{mtnkey}) { $committype = "web"; } else { $committype = "mtn"; } } elsif ($cert->{name} eq "date") { $when = str2time($cert->{value}, 'UTC'); } elsif ($cert->{name} eq "changelog") { my $messageText = $cert->{value}; # split the changelog into multiple # lines foreach my $msgline (split(/\n/, $messageText)) { push @message, { line => $msgline }; } } } } my @changed_files = get_changed_files($automator, $rev); my ($out, $err) = $automator->call("parents", $rev); my @parents = ($out =~ m/^($sha1_pattern)$/); my $parent = $parents[0]; foreach my $file (@changed_files) { next unless length $file; if (defined $config{diffurl} and (@parents == 1)) { my $diffurl=$config{diffurl}; $diffurl=~s/\[\[r1\]\]/$parent/g; $diffurl=~s/\[\[r2\]\]/$rev/g; my $efile = uri_escape_utf8($file); $diffurl=~s/\[\[file\]\]/$efile/g; push @pages, { page => pagename($file), diffurl => $diffurl, }; } else { push @pages, { page => pagename($file), } } } push @ret, { rev => $rev, user => $user, committype => $committype, when => $when, message => [@message], pages => [@pages], } if @pages; } $automator->close(); return @ret; } sub rcs_diff ($;$) { my $rev=shift; my $maxlines=shift; my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint chdir $config{srcdir} or error("Cannot chdir to $config{srcdir}: $!"); my $child = open(MTNDIFF, "-|"); if (! $child) { exec("mtn", "diff", "--root=$config{mtnrootdir}", "-r", "p:".$sha1, "-r", $sha1) || error("mtn diff $sha1 failed to run"); } my @lines; while (my $line=) { last if defined $maxlines && @lines == $maxlines; push @lines, $line; } close MTNDIFF || debug("mtn diff $sha1 exited $?"); if (wantarray) { return @lines; } else { return join("", @lines); } } sub rcs_getctime ($) { my $file=shift; chdir $config{srcdir} or error("Cannot chdir to $config{srcdir}: $!"); my $child = open(MTNLOG, "-|"); if (! $child) { exec("mtn", "log", "--root=$config{mtnrootdir}", "--no-graph", "--brief", $file) || error("mtn log $file failed to run"); } my $prevRev; my $firstRev; while () { if (/^($sha1_pattern)/) { $prevRev=$firstRev; $firstRev=$1; } } close MTNLOG || debug("mtn log $file exited $?"); if (! defined $firstRev) { debug "failed to parse mtn log for $file"; return 0; } my $automator = Monotone->new(); $automator->open(undef, $config{mtnrootdir}); # mtn 0.48 has a bug that makes it list the creation of parent # directories as last (first) log entry... So when we're dealing # with that version, let's check that the file we're looking for # is actually part of the last (first) revision. Otherwise, pick # the one before (after) that one. if ($mtn_version == 0.48) { my $changes = [get_changed_files($automator, $firstRev)]; if (! exists {map { $_ => 1 } @$changes}->{$file}) { $firstRev = $prevRev; } } my $certs = [read_certs($automator, $firstRev)]; $automator->close(); my $date; foreach my $cert (@$certs) { if ($cert->{signature} eq "ok" && $cert->{trust} eq "trusted") { if ($cert->{name} eq "date") { $date = $cert->{value}; } } } if (! defined $date) { debug "failed to find date cert for revision $firstRev when looking for creation time of $file"; return 0; } $date=str2time($date, 'UTC'); debug("found ctime ".localtime($date)." for $file"); return $date; } sub rcs_getmtime ($) { my $file=shift; chdir $config{srcdir} or error("Cannot chdir to $config{srcdir}: $!"); my $child = open(MTNLOG, "-|"); if (! $child) { exec("mtn", "log", "--root=$config{mtnrootdir}", "--no-graph", "--brief", $file) || error("mtn log $file failed to run"); } my $lastRev = ""; while () { if (/^($sha1_pattern)/ && $lastRev eq "") { $lastRev=$1; } } close MTNLOG || debug("mtn log $file exited $?"); if (! defined $lastRev) { debug "failed to parse mtn log for $file"; return 0; } my $automator = Monotone->new(); $automator->open(undef, $config{mtnrootdir}); my $certs = [read_certs($automator, $lastRev)]; $automator->close(); my $date; foreach my $cert (@$certs) { if ($cert->{signature} eq "ok" && $cert->{trust} eq "trusted") { if ($cert->{name} eq "date") { $date = $cert->{value}; } } } if (! defined $date) { debug "failed to find date cert for revision $lastRev when looking for creation time of $file"; return 0; } $date=str2time($date, 'UTC'); debug("found mtime ".localtime($date)." for $file"); return $date; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/norcs.pm0000644000000000000000000000260412211730001016375 0ustar #!/usr/bin/perl # Stubs for no revision control. package IkiWiki::Plugin::norcs; use warnings; use strict; use IkiWiki; sub import { hook(type => "getsetup", id => "norcs", call => \&getsetup); hook(type => "rcs", id => "rcs_update", call => \&rcs_update); hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); hook(type => "rcs", id => "rcs_add", call => \&rcs_add); hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime); } sub getsetup () { return plugin => { safe => 0, # rcs plugin rebuild => 0, section => "rcs", }, } sub rcs_update () { } sub rcs_prepedit ($) { return "" } sub rcs_commit (@) { return undef # success } sub rcs_commit_staged (@) { return undef # success } sub rcs_add ($) { } sub rcs_remove ($) { } sub rcs_rename ($$) { } sub rcs_recentchanges ($) { } sub rcs_diff ($;$) { } sub rcs_getctime ($) { return 0; } sub rcs_getmtime ($) { return 0; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/git.pm0000644000000000000000000005662212211730001016045 0ustar #!/usr/bin/perl package IkiWiki::Plugin::git; use warnings; use strict; use IkiWiki; use Encode; use URI::Escape q{uri_escape_utf8}; use open qw{:utf8 :std}; my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate Git sha1sums my $dummy_commit_msg = 'dummy commit'; # message to skip in recent changes sub import { hook(type => "checkconfig", id => "git", call => \&checkconfig); hook(type => "getsetup", id => "git", call => \&getsetup); hook(type => "genwrapper", id => "git", call => \&genwrapper); hook(type => "rcs", id => "rcs_update", call => \&rcs_update); hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); hook(type => "rcs", id => "rcs_add", call => \&rcs_add); hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime); hook(type => "rcs", id => "rcs_receive", call => \&rcs_receive); hook(type => "rcs", id => "rcs_preprevert", call => \&rcs_preprevert); hook(type => "rcs", id => "rcs_revert", call => \&rcs_revert); } sub checkconfig () { if (! defined $config{gitorigin_branch}) { $config{gitorigin_branch}="origin"; } if (! defined $config{gitmaster_branch}) { $config{gitmaster_branch}="master"; } if (defined $config{git_wrapper} && length $config{git_wrapper}) { push @{$config{wrappers}}, { wrapper => $config{git_wrapper}, wrappermode => (defined $config{git_wrappermode} ? $config{git_wrappermode} : "06755"), wrapper_background_command => $config{git_wrapper_background_command}, }; } if (defined $config{git_test_receive_wrapper} && length $config{git_test_receive_wrapper} && defined $config{untrusted_committers} && @{$config{untrusted_committers}}) { push @{$config{wrappers}}, { test_receive => 1, wrapper => $config{git_test_receive_wrapper}, wrappermode => (defined $config{git_wrappermode} ? $config{git_wrappermode} : "06755"), }; } # Avoid notes, parser does not handle and they only slow things down. $ENV{GIT_NOTES_REF}=""; # Run receive test only if being called by the wrapper, and not # when generating same. if ($config{test_receive} && ! exists $config{wrapper}) { require IkiWiki::Receive; IkiWiki::Receive::test(); } } sub getsetup () { return plugin => { safe => 0, # rcs plugin rebuild => undef, section => "rcs", }, git_wrapper => { type => "string", example => "/git/wiki.git/hooks/post-update", description => "git hook to generate", safe => 0, # file rebuild => 0, }, git_wrapper_background_command => { type => "string", example => "git push github", description => "shell command for git_wrapper to run, in the background", safe => 0, # command rebuild => 0, }, git_wrappermode => { type => "string", example => '06755', description => "mode for git_wrapper (can safely be made suid)", safe => 0, rebuild => 0, }, git_test_receive_wrapper => { type => "string", example => "/git/wiki.git/hooks/pre-receive", description => "git pre-receive hook to generate", safe => 0, # file rebuild => 0, }, untrusted_committers => { type => "string", example => [], description => "unix users whose commits should be checked by the pre-receive hook", safe => 0, rebuild => 0, }, historyurl => { type => "string", example => "http://git.example.com/gitweb.cgi?p=wiki.git;a=history;f=[[file]];hb=HEAD", description => "gitweb url to show file history ([[file]] substituted)", safe => 1, rebuild => 1, }, diffurl => { type => "string", example => "http://git.example.com/gitweb.cgi?p=wiki.git;a=blobdiff;f=[[file]];h=[[sha1_to]];hp=[[sha1_from]];hb=[[sha1_commit]];hpb=[[sha1_parent]]", description => "gitweb url to show a diff ([[file]], [[sha1_to]], [[sha1_from]], [[sha1_commit]], and [[sha1_parent]] substituted)", safe => 1, rebuild => 1, }, gitorigin_branch => { type => "string", example => "origin", description => "where to pull and push changes (set to empty string to disable)", safe => 0, # paranoia rebuild => 0, }, gitmaster_branch => { type => "string", example => "master", description => "branch that the wiki is stored in", safe => 0, # paranoia rebuild => 0, }, } sub genwrapper { if ($config{test_receive}) { require IkiWiki::Receive; return IkiWiki::Receive::genwrapper(); } else { return ""; } } my $git_dir=undef; my $prefix=undef; sub in_git_dir ($$) { $git_dir=shift; my @ret=shift->(); $git_dir=undef; $prefix=undef; return @ret; } sub safe_git (&@) { # Start a child process safely without resorting to /bin/sh. # Returns command output (in list content) or success state # (in scalar context), or runs the specified data handler. my ($error_handler, $data_handler, @cmdline) = @_; my $pid = open my $OUT, "-|"; error("Cannot fork: $!") if !defined $pid; if (!$pid) { # In child. # Git commands want to be in wc. if (! defined $git_dir) { chdir $config{srcdir} or error("cannot chdir to $config{srcdir}: $!"); } else { chdir $git_dir or error("cannot chdir to $git_dir: $!"); } exec @cmdline or error("Cannot exec '@cmdline': $!"); } # In parent. # git output is probably utf-8 encoded, but may contain # other encodings or invalidly encoded stuff. So do not rely # on the normal utf-8 IO layer, decode it by hand. binmode($OUT); my @lines; while (<$OUT>) { $_=decode_utf8($_, 0); chomp; if (! defined $data_handler) { push @lines, $_; } else { last unless $data_handler->($_); } } close $OUT; $error_handler->("'@cmdline' failed: $!") if $? && $error_handler; return wantarray ? @lines : ($? == 0); } # Convenient wrappers. sub run_or_die ($@) { safe_git(\&error, undef, @_) } sub run_or_cry ($@) { safe_git(sub { warn @_ }, undef, @_) } sub run_or_non ($@) { safe_git(undef, undef, @_) } sub merge_past ($$$) { # Unlike with Subversion, Git cannot make a 'svn merge -rN:M file'. # Git merge commands work with the committed changes, except in the # implicit case of '-m' of git checkout(1). So we should invent a # kludge here. In principle, we need to create a throw-away branch # in preparing for the merge itself. Since branches are cheap (and # branching is fast), this shouldn't cost high. # # The main problem is the presence of _uncommitted_ local changes. One # possible approach to get rid of this situation could be that we first # make a temporary commit in the master branch and later restore the # initial state (this is possible since Git has the ability to undo a # commit, i.e. 'git reset --soft HEAD^'). The method can be summarized # as follows: # # - create a diff of HEAD:current-sha1 # - dummy commit # - create a dummy branch and switch to it # - rewind to past (reset --hard to the current-sha1) # - apply the diff and commit # - switch to master and do the merge with the dummy branch # - make a soft reset (undo the last commit of master) # # The above method has some drawbacks: (1) it needs a redundant commit # just to get rid of local changes, (2) somewhat slow because of the # required system forks. Until someone points a more straight method # (which I would be grateful) I have implemented an alternative method. # In this approach, we hide all the modified files from Git by renaming # them (using the 'rename' builtin) and later restore those files in # the throw-away branch (that is, we put the files themselves instead # of applying a patch). my ($sha1, $file, $message) = @_; my @undo; # undo stack for cleanup in case of an error my $conflict; # file content with conflict markers eval { # Hide local changes from Git by renaming the modified file. # Relative paths must be converted to absolute for renaming. my ($target, $hidden) = ( "$config{srcdir}/${file}", "$config{srcdir}/${file}.${sha1}" ); rename($target, $hidden) or error("rename '$target' to '$hidden' failed: $!"); # Ensure to restore the renamed file on error. push @undo, sub { return if ! -e "$hidden"; # already renamed rename($hidden, $target) or warn "rename '$hidden' to '$target' failed: $!"; }; my $branch = "throw_away_${sha1}"; # supposed to be unique # Create a throw-away branch and rewind backward. push @undo, sub { run_or_cry('git', 'branch', '-D', $branch) }; run_or_die('git', 'branch', $branch, $sha1); # Switch to throw-away branch for the merge operation. push @undo, sub { if (!run_or_cry('git', 'checkout', $config{gitmaster_branch})) { run_or_cry('git', 'checkout','-f',$config{gitmaster_branch}); } }; run_or_die('git', 'checkout', $branch); # Put the modified file in _this_ branch. rename($hidden, $target) or error("rename '$hidden' to '$target' failed: $!"); # _Silently_ commit all modifications in the current branch. run_or_non('git', 'commit', '-m', $message, '-a'); # ... and re-switch to master. run_or_die('git', 'checkout', $config{gitmaster_branch}); # Attempt to merge without complaining. if (!run_or_non('git', 'pull', '--no-commit', '.', $branch)) { $conflict = readfile($target); run_or_die('git', 'reset', '--hard'); } }; my $failure = $@; # Process undo stack (in reverse order). By policy cleanup # actions should normally print a warning on failure. while (my $handle = pop @undo) { $handle->(); } error("Git merge failed!\n$failure\n") if $failure; return $conflict; } sub decode_git_file ($) { my $file=shift; # git does not output utf-8 filenames, but instead # double-quotes them with the utf-8 characters # escaped as \nnn\nnn. if ($file =~ m/^"(.*)"$/) { ($file=$1) =~ s/\\([0-7]{1,3})/chr(oct($1))/eg; } # strip prefix if in a subdir if (! defined $prefix) { ($prefix) = run_or_die('git', 'rev-parse', '--show-prefix'); if (! defined $prefix) { $prefix=""; } } $file =~ s/^\Q$prefix\E//; return decode("utf8", $file); } sub parse_diff_tree ($) { # Parse the raw diff tree chunk and return the info hash. # See git-diff-tree(1) for the syntax. my $dt_ref = shift; # End of stream? return if ! @{ $dt_ref } || !defined $dt_ref->[0] || !length $dt_ref->[0]; my %ci; # Header line. while (my $line = shift @{ $dt_ref }) { return if $line !~ m/^(.+) ($sha1_pattern)/; my $sha1 = $2; $ci{'sha1'} = $sha1; last; } # Identification lines for the commit. while (my $line = shift @{ $dt_ref }) { # Regexps are semi-stolen from gitweb.cgi. if ($line =~ m/^tree ([0-9a-fA-F]{40})$/) { $ci{'tree'} = $1; } elsif ($line =~ m/^parent ([0-9a-fA-F]{40})$/) { # XXX: collecting in reverse order push @{ $ci{'parents'} }, $1; } elsif ($line =~ m/^(author|committer) (.*) ([0-9]+) (.*)$/) { my ($who, $name, $epoch, $tz) = ($1, $2, $3, $4 ); $ci{ $who } = $name; $ci{ "${who}_epoch" } = $epoch; $ci{ "${who}_tz" } = $tz; if ($name =~ m/^([^<]+)\s+<([^@>]+)/) { $ci{"${who}_name"} = $1; $ci{"${who}_username"} = $2; } elsif ($name =~ m/^([^<]+)\s+<>$/) { $ci{"${who}_username"} = $1; } else { $ci{"${who}_username"} = $name; } } elsif ($line =~ m/^$/) { # Trailing empty line signals next section. last; } } debug("No 'tree' seen in diff-tree output") if !defined $ci{'tree'}; if (defined $ci{'parents'}) { $ci{'parent'} = @{ $ci{'parents'} }[0]; } else { $ci{'parent'} = 0 x 40; } # Commit message (optional). while ($dt_ref->[0] =~ /^ /) { my $line = shift @{ $dt_ref }; $line =~ s/^ //; push @{ $ci{'comment'} }, $line; } shift @{ $dt_ref } if $dt_ref->[0] =~ /^$/; # Modified files. while (my $line = shift @{ $dt_ref }) { if ($line =~ m{^ (:+) # number of parents ([^\t]+)\t # modes, sha1, status (.*) # file names $}xo) { my $num_parents = length $1; my @tmp = split(" ", $2); my ($file, $file_to) = split("\t", $3); my @mode_from = splice(@tmp, 0, $num_parents); my $mode_to = shift(@tmp); my @sha1_from = splice(@tmp, 0, $num_parents); my $sha1_to = shift(@tmp); my $status = shift(@tmp); if (length $file) { push @{ $ci{'details'} }, { 'file' => decode_git_file($file), 'sha1_from' => $sha1_from[0], 'sha1_to' => $sha1_to, 'mode_from' => $mode_from[0], 'mode_to' => $mode_to, 'status' => $status, }; } next; }; last; } return \%ci; } sub git_commit_info ($;$) { # Return an array of commit info hashes of num commits # starting from the given sha1sum. my ($sha1, $num) = @_; my @opts; push @opts, "--max-count=$num" if defined $num; my @raw_lines = run_or_die('git', 'log', @opts, '--pretty=raw', '--raw', '--abbrev=40', '--always', '-c', '-r', $sha1, '--', '.'); my @ci; while (my $parsed = parse_diff_tree(\@raw_lines)) { push @ci, $parsed; } warn "Cannot parse commit info for '$sha1' commit" if !@ci; return wantarray ? @ci : $ci[0]; } sub git_sha1 (;$) { # Return head sha1sum (of given file). my $file = shift || q{--}; # Ignore error since a non-existing file might be given. my ($sha1) = run_or_non('git', 'rev-list', '--max-count=1', 'HEAD', '--', $file); if (defined $sha1) { ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now } return defined $sha1 ? $sha1 : ''; } sub rcs_update () { # Update working directory. if (length $config{gitorigin_branch}) { run_or_cry('git', 'pull', '--prune', $config{gitorigin_branch}); } } sub rcs_prepedit ($) { # Return the commit sha1sum of the file when editing begins. # This will be later used in rcs_commit if a merge is required. my ($file) = @_; return git_sha1($file); } sub rcs_commit (@) { # Try to commit the page; returns undef on _success_ and # a version of the page with the rcs's conflict markers on # failure. my %params=@_; # Check to see if the page has been changed by someone else since # rcs_prepedit was called. my $cur = git_sha1($params{file}); my ($prev) = $params{token} =~ /^($sha1_pattern)$/; # untaint if (defined $cur && defined $prev && $cur ne $prev) { my $conflict = merge_past($prev, $params{file}, $dummy_commit_msg); return $conflict if defined $conflict; } return rcs_commit_helper(@_); } sub rcs_commit_staged (@) { # Commits all staged changes. Changes can be staged using rcs_add, # rcs_remove, and rcs_rename. return rcs_commit_helper(@_); } sub rcs_commit_helper (@) { my %params=@_; my %env=%ENV; if (defined $params{session}) { # Set the commit author and email based on web session info. my $u; if (defined $params{session}->param("name")) { $u=$params{session}->param("name"); } elsif (defined $params{session}->remote_addr()) { $u=$params{session}->remote_addr(); } if (defined $u) { $u=encode_utf8($u); $ENV{GIT_AUTHOR_NAME}=$u; } if (defined $params{session}->param("nickname")) { $u=encode_utf8($params{session}->param("nickname")); $u=~s/\s+/_/g; $u=~s/[^-_0-9[:alnum:]]+//g; } if (defined $u) { $ENV{GIT_AUTHOR_EMAIL}="$u\@web"; } } $params{message} = IkiWiki::possibly_foolish_untaint($params{message}); my @opts; if ($params{message} !~ /\S/) { # Force git to allow empty commit messages. # (If this version of git supports it.) my ($version)=`git --version` =~ /git version (.*)/; if ($version ge "1.7.8") { push @opts, "--allow-empty-message", "--no-edit"; } if ($version ge "1.7.2") { push @opts, "--allow-empty-message"; } elsif ($version ge "1.5.4") { push @opts, '--cleanup=verbatim'; } else { $params{message}.="."; } } if (exists $params{file}) { push @opts, '--', $params{file}; } # git commit returns non-zero if nothing really changed. # So we should ignore its exit status (hence run_or_non). if (run_or_non('git', 'commit', '-m', $params{message}, '-q', @opts)) { if (length $config{gitorigin_branch}) { run_or_cry('git', 'push', $config{gitorigin_branch}); } } %ENV=%env; return undef; # success } sub rcs_add ($) { # Add file to archive. my ($file) = @_; run_or_cry('git', 'add', $file); } sub rcs_remove ($) { # Remove file from archive. my ($file) = @_; run_or_cry('git', 'rm', '-f', $file); } sub rcs_rename ($$) { my ($src, $dest) = @_; run_or_cry('git', 'mv', '-f', $src, $dest); } sub rcs_recentchanges ($) { # List of recent changes. my ($num) = @_; eval q{use Date::Parse}; error($@) if $@; my @rets; foreach my $ci (git_commit_info('HEAD', $num || 1)) { # Skip redundant commits. next if ($ci->{'comment'} && @{$ci->{'comment'}}[0] eq $dummy_commit_msg); my ($sha1, $when) = ( $ci->{'sha1'}, $ci->{'author_epoch'} ); my @pages; foreach my $detail (@{ $ci->{'details'} }) { my $file = $detail->{'file'}; my $efile = uri_escape_utf8($file); my $diffurl = defined $config{'diffurl'} ? $config{'diffurl'} : ""; $diffurl =~ s/\[\[file\]\]/$efile/go; $diffurl =~ s/\[\[sha1_parent\]\]/$ci->{'parent'}/go; $diffurl =~ s/\[\[sha1_from\]\]/$detail->{'sha1_from'}/go; $diffurl =~ s/\[\[sha1_to\]\]/$detail->{'sha1_to'}/go; $diffurl =~ s/\[\[sha1_commit\]\]/$sha1/go; push @pages, { page => pagename($file), diffurl => $diffurl, }; } my @messages; my $pastblank=0; foreach my $line (@{$ci->{'comment'}}) { $pastblank=1 if $line eq ''; next if $pastblank && $line=~m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i; push @messages, { line => $line }; } my $user=$ci->{'author_username'}; my $web_commit = ($ci->{'author'} =~ /\@web>/); my $nickname; # Set nickname only if a non-url author_username is available, # and author_name is an url. if ($user !~ /:\/\// && defined $ci->{'author_name'} && $ci->{'author_name'} =~ /:\/\//) { $nickname=$user; $user=$ci->{'author_name'}; } # compatability code for old web commit messages if (! $web_commit && defined $messages[0] && $messages[0]->{line} =~ m/$config{web_commit_regexp}/) { $user = defined $2 ? "$2" : "$3"; $messages[0]->{line} = $4; $web_commit=1; } push @rets, { rev => $sha1, user => $user, nickname => $nickname, committype => $web_commit ? "web" : "git", when => $when, message => [@messages], pages => [@pages], } if @pages; last if @rets >= $num; } return @rets; } sub rcs_diff ($;$) { my $rev=shift; my $maxlines=shift; my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint my @lines; my $addlines=sub { my $line=shift; return if defined $maxlines && @lines == $maxlines; push @lines, $line."\n" if (@lines || $line=~/^diff --git/); return 1; }; safe_git(undef, $addlines, "git", "show", $sha1); if (wantarray) { return @lines; } else { return join("", @lines); } } { my %time_cache; sub findtimes ($$) { my $file=shift; my $id=shift; # 0 = mtime ; 1 = ctime if (! keys %time_cache) { my $date; foreach my $line (run_or_die('git', 'log', '--pretty=format:%at', '--name-only', '--relative')) { if (! defined $date && $line =~ /^(\d+)$/) { $date=$line; } elsif (! length $line) { $date=undef; } else { my $f=decode_git_file($line); if (! $time_cache{$f}) { $time_cache{$f}[0]=$date; # mtime } $time_cache{$f}[1]=$date; # ctime } } } return exists $time_cache{$file} ? $time_cache{$file}[$id] : 0; } } sub rcs_getctime ($) { my $file=shift; return findtimes($file, 1); } sub rcs_getmtime ($) { my $file=shift; return findtimes($file, 0); } { my $ret; sub git_find_root { # The wiki may not be the only thing in the git repo. # Determine if it is in a subdirectory by examining the srcdir, # and its parents, looking for the .git directory. return @$ret if defined $ret; my $subdir=""; my $dir=$config{srcdir}; while (! -d "$dir/.git") { $subdir=IkiWiki::basename($dir)."/".$subdir; $dir=IkiWiki::dirname($dir); if (! length $dir) { error("cannot determine root of git repo"); } } $ret=[$subdir, $dir]; return @$ret; } } sub git_parse_changes { my $reverted = shift; my @changes = @_; my ($subdir, $rootdir) = git_find_root(); my @rets; foreach my $ci (@changes) { foreach my $detail (@{ $ci->{'details'} }) { my $file = $detail->{'file'}; # check that all changed files are in the subdir if (length $subdir && ! ($file =~ s/^\Q$subdir\E//)) { error sprintf(gettext("you are not allowed to change %s"), $file); } my ($action, $mode, $path); if ($detail->{'status'} =~ /^[M]+\d*$/) { $action="change"; $mode=$detail->{'mode_to'}; } elsif ($detail->{'status'} =~ /^[AM]+\d*$/) { $action= $reverted ? "remove" : "add"; $mode=$detail->{'mode_to'}; } elsif ($detail->{'status'} =~ /^[DAM]+\d*/) { $action= $reverted ? "add" : "remove"; $mode=$detail->{'mode_from'}; } else { error "unknown status ".$detail->{'status'}; } # test that the file mode is ok if ($mode !~ /^100[64][64][64]$/) { error sprintf(gettext("you cannot act on a file with mode %s"), $mode); } if ($action eq "change") { if ($detail->{'mode_from'} ne $detail->{'mode_to'}) { error gettext("you are not allowed to change file modes"); } } # extract attachment to temp file if (($action eq 'add' || $action eq 'change') && ! pagetype($file)) { eval q{use File::Temp}; die $@ if $@; my $fh; ($fh, $path)=File::Temp::tempfile(undef, UNLINK => 1); my $cmd = "cd $git_dir && ". "git show $detail->{sha1_to} > '$path'"; if (system($cmd) != 0) { error("failed writing temp file '$path'."); } } push @rets, { file => $file, action => $action, path => $path, }; } } return @rets; } sub rcs_receive () { my @rets; while (<>) { chomp; my ($oldrev, $newrev, $refname) = split(' ', $_, 3); # only allow changes to gitmaster_branch if ($refname !~ /^refs\/heads\/\Q$config{gitmaster_branch}\E$/) { error sprintf(gettext("you are not allowed to change %s"), $refname); } # Avoid chdir when running git here, because the changes # are in the master git repo, not the srcdir repo. # (Also, if a subdir is involved, we don't want to chdir to # it and only see changes in it.) # The pre-receive hook already puts us in the right place. in_git_dir(".", sub { push @rets, git_parse_changes(0, git_commit_info($oldrev."..".$newrev)); }); } return reverse @rets; } sub rcs_preprevert ($) { my $rev=shift; my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint # Examine changes from root of git repo, not from any subdir, # in order to see all changes. my ($subdir, $rootdir) = git_find_root(); in_git_dir($rootdir, sub { my @commits=git_commit_info($sha1, 1); if (! @commits) { error "unknown commit"; # just in case } # git revert will fail on merge commits. Add a nice message. if (exists $commits[0]->{parents} && @{$commits[0]->{parents}} > 1) { error gettext("you are not allowed to revert a merge"); } git_parse_changes(1, @commits); }); } sub rcs_revert ($) { # Try to revert the given rev; returns undef on _success_. my $rev = shift; my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint if (run_or_non('git', 'revert', '--no-commit', $sha1)) { return undef; } else { run_or_die('git', 'reset', '--hard'); return sprintf(gettext("Failed to revert commit %s"), $sha1); } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/format.pm0000644000000000000000000000224412211730001016541 0ustar #!/usr/bin/perl package IkiWiki::Plugin::format; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "preprocess", id => "format", call => \&preprocess); hook(type => "getsetup", id => "format", call => \&getsetup); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, } sub preprocess (@) { my %params=@_; my $format=shift; shift; my $text=IkiWiki::preprocess($params{page}, $params{destpage}, shift); shift; if (! defined $format || ! defined $text) { error(gettext("must specify format and text")); } # Other plugins can register htmlizeformat hooks to add support # for page types not suitable for htmlize, or that need special # processing when included via format. Try them until one succeeds. my $ret; IkiWiki::run_hooks(htmlizeformat => sub { $ret=shift->($format, $text) unless defined $ret; }); if (defined $ret) { return $ret; } elsif (exists $IkiWiki::hooks{htmlize}{$format}) { return IkiWiki::htmlize($params{page}, $params{destpage}, $format, $text); } else { error(sprintf(gettext("unsupported page format %s"), $format)); } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/attachment.pm0000644000000000000000000002544112211730001017405 0ustar #!/usr/bin/perl package IkiWiki::Plugin::attachment; use warnings; use strict; use IkiWiki 3.00; sub import { add_underlay("attachment"); add_underlay("javascript"); add_underlay("jquery"); hook(type => "getsetup", id => "attachment", call => \&getsetup); hook(type => "checkconfig", id => "attachment", call => \&checkconfig); hook(type => "formbuilder_setup", id => "attachment", call => \&formbuilder_setup); hook(type => "formbuilder", id => "attachment", call => \&formbuilder, last => 1); IkiWiki::loadplugin("filecheck"); } sub getsetup () { return plugin => { safe => 1, rebuild => 0, section => "web", }, allowed_attachments => { type => "pagespec", example => "virusfree() and mimetype(image/*) and maxsize(50kb)", description => "enhanced PageSpec specifying what attachments are allowed", link => "ikiwiki/PageSpec/attachment", safe => 1, rebuild => 0, }, virus_checker => { type => "string", example => "clamdscan -", description => "virus checker program (reads STDIN, returns nonzero if virus found)", safe => 0, # executed rebuild => 0, }, } sub check_canattach ($$;$) { my $session=shift; my $dest=shift; # where it's going to be put, under the srcdir my $file=shift; # the path to the attachment currently # Don't allow an attachment to be uploaded with the same name as an # existing page. if (exists $IkiWiki::pagesources{$dest} && $IkiWiki::pagesources{$dest} ne $dest) { error(sprintf(gettext("there is already a page named %s"), $dest)); } # Use a special pagespec to test that the attachment is valid. my $allowed=1; if (defined $config{allowed_attachments} && length $config{allowed_attachments}) { $allowed=pagespec_match($dest, $config{allowed_attachments}, file => $file, user => $session->param("name"), ip => $session->remote_addr(), ); } if (! $allowed) { error(gettext("prohibited by allowed_attachments")." ($allowed)"); } else { return 1; } } sub checkconfig () { $config{cgi_disable_uploads}=0; } sub formbuilder_setup (@) { my %params=@_; my $form=$params{form}; my $q=$params{cgi}; if (defined $form->field("do") && ($form->field("do") eq "edit" || $form->field("do") eq "create")) { # Add attachment field, set type to multipart. $form->enctype(&CGI::MULTIPART); $form->field(name => 'attachment', type => 'file'); # These buttons are not put in the usual place, so # are not added to the normal formbuilder button list. $form->tmpl_param("field-upload" => ''); $form->tmpl_param("field-link" => ''); # Add all the javascript used by the attachments interface. require IkiWiki::Plugin::toggle; my $js=IkiWiki::Plugin::toggle::include_javascript($params{page}); $js.=''."\n"; my @jsfiles=qw{jquery.min jquery-ui.min jquery.tmpl.min jquery.iframe-transport jquery.fileupload jquery.fileupload-ui }; foreach my $file (@jsfiles) { $js.=''."\n"; } $form->tmpl_param("javascript" => $js); # Start with the attachments interface toggled invisible, # but if it was used, keep it open. if ($form->submitted ne "Upload Attachment" && (! defined $q->param("attachment_select") || ! length $q->param("attachment_select"))) { $form->tmpl_param("attachments-class" => "toggleable"); } else { $form->tmpl_param("attachments-class" => "toggleable-open"); } # Save attachments in holding area before previewing and # saving. if ($form->submitted eq "Preview" || $form->submitted eq "Save Page") { attachments_save($form, $params{session}); } } } sub formbuilder (@) { my %params=@_; my $form=$params{form}; my $q=$params{cgi}; return if ! defined $form->field("do") || ($form->field("do") ne "edit" && $form->field("do") ne "create") ; my $filename=Encode::decode_utf8($q->param('attachment')); if (defined $filename && length $filename) { attachment_store($filename, $form, $q, $params{session}); } if ($form->submitted eq "Save Page") { attachments_save($form, $params{session}); } if ($form->submitted eq "Insert Links") { my $page=quotemeta(Encode::decode_utf8($q->param("page"))); my $add=""; foreach my $f ($q->param("attachment_select")) { $f=Encode::decode_utf8($f); $f=~s/^$page\///; if (IkiWiki::isinlinableimage($f) && IkiWiki::Plugin::img->can("import")) { $add.='[[!img '.$f.' align="right" size="" alt=""]]'; } else { $add.="[[$f]]"; } $add.="\n"; } $form->field(name => 'editcontent', value => $form->field('editcontent')."\n\n".$add, force => 1) if length $add; } # Generate the attachment list only after having added any new # attachments. $form->tmpl_param("attachment_list" => [attachment_list($form->field('page'))]); } sub attachment_holding_location { my $page=attachment_location(shift); my $dir=$config{wikistatedir}."/attachments/". IkiWiki::possibly_foolish_untaint(linkpage($page)); $dir=~s/\/$//; return $dir; } sub is_held_attachment { my $attachment=shift; my $f=attachment_holding_location($attachment); if (-f $f) { return $f } else { return undef; } } # Stores the attachment in a holding area, not yet in the wiki proper. sub attachment_store { my $filename=shift; my $form=shift; my $q=shift; my $session=shift; # This is an (apparently undocumented) way to get the name # of the temp file that CGI writes the upload to. my $tempfile=$q->tmpFileName($filename); if (! defined $tempfile || ! length $tempfile) { # perl 5.8 needs an alternative, awful method if ($q =~ /HASH/ && exists $q->{'.tmpfiles'}) { foreach my $key (keys(%{$q->{'.tmpfiles'}})) { $tempfile=$q->tmpFileName(\$key); last if defined $tempfile && length $tempfile; } } if (! defined $tempfile || ! length $tempfile) { error("CGI::tmpFileName failed to return the uploaded file name"); } } $filename=IkiWiki::basename($filename); $filename=~s/.*\\+(.+)/$1/; # hello, windows $filename=IkiWiki::possibly_foolish_untaint(linkpage($filename)); my $dest=attachment_holding_location($form->field('page')); # Check that the user is allowed to edit the attachment. my $final_filename= linkpage(IkiWiki::possibly_foolish_untaint( attachment_location($form->field('page')))). $filename; eval { if (IkiWiki::file_pruned($final_filename)) { error(gettext("bad attachment filename")); } IkiWiki::check_canedit($final_filename, $q, $session); # And that the attachment itself is acceptable. check_canattach($session, $final_filename, $tempfile); }; if ($@) { json_response($q, $form, $dest."/".$filename, $@); error $@; } # Move the attachment into holding directory. # Try to use a fast rename; fall back to copying. IkiWiki::prep_writefile($filename, $dest); unlink($dest."/".$filename); if (rename($tempfile, $dest."/".$filename)) { # The temp file has tight permissions; loosen up. chmod(0666 & ~umask, $dest."/".$filename); } else { my $fh=$q->upload('attachment'); if (! defined $fh || ! ref $fh) { # needed by old CGI versions $fh=$q->param('attachment'); if (! defined $fh || ! ref $fh) { # even that doesn't always work, # fall back to opening the tempfile $fh=undef; open($fh, "<", $tempfile) || error("failed to open \"$tempfile\": $!"); } } binmode($fh); require IkiWiki::Render; writefile($filename, $dest, undef, 1, sub { IkiWiki::fast_file_copy($tempfile, $filename, $fh, @_); }); } json_response($q, $form, $dest."/".$filename, stored_msg()); } # Save all stored attachments for a page. sub attachments_save { my $form=shift; my $session=shift; # Move attachments out of holding directory. my @attachments; my $dir=attachment_holding_location($form->field('page')); foreach my $filename (glob("$dir/*")) { $filename=Encode::decode_utf8($filename); next unless -f $filename; my $destdir=linkpage(IkiWiki::possibly_foolish_untaint( attachment_location($form->field('page')))); my $absdestdir=$config{srcdir}."/".$destdir; my $destfile=IkiWiki::basename($filename); my $dest=$absdestdir.$destfile; unlink($dest); IkiWiki::prep_writefile($destfile, $absdestdir); rename($filename, $dest); push @attachments, $destdir.$destfile; } return unless @attachments; require IkiWiki::Render; IkiWiki::prune($dir, $config{wikistatedir}."/attachments"); # Check the attachments in and trigger a wiki refresh. if ($config{rcs}) { IkiWiki::rcs_add($_) foreach @attachments; IkiWiki::disable_commit_hook(); IkiWiki::rcs_commit_staged( message => gettext("attachment upload"), session => $session, ); IkiWiki::enable_commit_hook(); IkiWiki::rcs_update(); } IkiWiki::refresh(); IkiWiki::saveindex(); } sub attachment_location ($) { my $page=shift; # Put the attachment in a subdir of the page it's attached # to, unless that page is the "index" page. return "" if $page eq 'index'; $page.="/" if length $page; return $page; } sub attachment_list ($) { my $page=shift; my $loc=attachment_location($page); my $std=sub { my $file=shift; my $mtime=shift; my $date=shift; my $size=shift; name => $file, size => IkiWiki::Plugin::filecheck::humansize($size), mtime => $date, mtime_raw => $mtime, }; # attachments already in the wiki my %attachments; foreach my $f (values %pagesources) { if (! defined pagetype($f) && $f=~m/^\Q$loc\E[^\/]+$/) { $attachments{$f}={ $std->($f, $IkiWiki::pagemtime{$f}, displaytime($IkiWiki::pagemtime{$f}), (stat($f))[7]), link => htmllink($page, $page, $f, noimageinline => 1), }; } } # attachments in holding directory my $dir=attachment_holding_location($page); my $heldmsg=gettext("this attachment is not yet saved"); foreach my $file (glob("$dir/*")) { $file=Encode::decode_utf8($file); next unless -f $file; my $base=IkiWiki::basename($file); my $f=$loc.$base; $attachments{$f}={ $std->($f, (stat($file))[9]*2, stored_msg(), (stat(_))[7]), link => $base, } } # Sort newer attachments to the end of the list. return sort { $a->{mtime_raw} <=> $b->{mtime_raw} || $a->{link} cmp $b->{link} } values %attachments; } sub stored_msg { gettext("just uploaded"); } sub json_response ($$$$) { my $q=shift; my $form=shift; my $filename=shift; my $stored_msg=shift; if (! defined $form->submitted || $form->submitted ne "Upload Attachment") { eval q{use JSON}; error $@ if $@; print "Content-type: text/html\n\n"; my $size=-s $filename; print to_json([ { name => IkiWiki::basename($filename), size => $size, humansize => IkiWiki::Plugin::filecheck::humansize($size), stored_msg => $stored_msg, } ]); exit 0; } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/transient.pm0000644000000000000000000000257712211730000017270 0ustar #!/usr/bin/perl package IkiWiki::Plugin::transient; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "transient", call => \&getsetup); hook(type => "checkconfig", id => "transient", call => \&checkconfig); hook(type => "rendered", id => "transient", call => \&rendered); } sub getsetup () { return plugin => { # this plugin is safe but only makes sense as a # dependency; similarly, it needs a rebuild but # only if something else does safe => 0, rebuild => 0, }, } our $transientdir; sub checkconfig () { if (defined $config{wikistatedir}) { $transientdir = $config{wikistatedir}."/transient"; # add_underlay treats relative underlays as relative to the installed # location, not the cwd. That's not what we want here. IkiWiki::add_literal_underlay($transientdir); } } sub rendered (@) { foreach my $file (@_) { # If the corresponding file exists in the transient underlay # and isn't actually being used, we can get rid of it. # Assume that the file that just changed has the same extension # as the obsolete transient version: this'll be true for web # edits, and avoids invoking File::Find. my $casualty = "$transientdir/$file"; if (srcfile($file) ne $casualty && -e $casualty) { debug(sprintf(gettext("removing transient version of %s"), $file)); IkiWiki::prune($casualty, $transientdir); } } } 1; ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/favicon.pm0000644000000000000000000000100712211730001016672 0ustar #!/usr/bin/perl # favicon plugin. package IkiWiki::Plugin::favicon; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "favicon", call => \&getsetup); hook(type => "pagetemplate", id => "favicon", call => \&pagetemplate); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, }, } sub pagetemplate (@) { my %params=@_; my $template=$params{template}; if ($template->query(name => "favicon")) { $template->param(favicon => "favicon.ico"); } } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/sortnaturally.pm0000644000000000000000000000133512211730001020174 0ustar #!/usr/bin/perl # Sort::Naturally-powered title_natural sort order for IkiWiki package IkiWiki::Plugin::sortnaturally; use IkiWiki 3.00; no warnings; sub import { hook(type => "getsetup", id => "sortnaturally", call => \&getsetup); hook(type => "checkconfig", id => "sortnaturally", call => \&checkconfig); } sub getsetup { return plugin => { safe => 1, rebuild => undef, }, } sub checkconfig () { eval q{use Sort::Naturally}; error $@ if $@; } package IkiWiki::SortSpec; sub cmp_title_natural { Sort::Naturally::ncmp(IkiWiki::pagetitle(IkiWiki::basename($a)), IkiWiki::pagetitle(IkiWiki::basename($b))) } sub cmp_path_natural { Sort::Naturally::ncmp(IkiWiki::pagetitle($a), IkiWiki::pagetitle($b)) } 1; ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/teximg.pm0000644000000000000000000001151712211730001016551 0ustar #!/usr/bin/perl # Licensed under GPL v2 or greater # (c) 2007 Patrick Winnertz package IkiWiki::Plugin::teximg; use warnings; use strict; use Digest::MD5 qw(md5_hex); use File::Temp qw(tempdir); use HTML::Entities; use Encode; use IkiWiki 3.00; my $default_prefix = < "getsetup", id => "teximg", call => \&getsetup); hook(type => "preprocess", id => "teximg", call => \&preprocess); } sub getsetup () { return plugin => { safe => 1, rebuild => undef, section => "widget", }, teximg_dvipng => { type => "boolean", description => "Should teximg use dvipng to render, or dvips and convert?", safe => 0, rebuild => undef, }, teximg_prefix => { type => "string", example => $default_prefix, description => "LaTeX prefix for teximg plugin", safe => 0, # Not sure how secure LaTeX is... rebuild => 1, }, teximg_postfix => { type => "string", example => $default_postfix, description => "LaTeX postfix for teximg plugin", safe => 0, # Not sure how secure LaTeX is... rebuild => 1, }, } sub preprocess (@) { my %params = @_; my $height = $params{height}; if (! defined $height || ! length $height) { $height = 12; } else { $height =~ s#(\d+)#$1#; } my $code = $params{code}; if (! defined $code && ! length $code) { error gettext("missing tex code"); } return create($code, check_height($height), \%params); } sub check_height ($) { # Since latex doesn't support unlimited scaling this function # returns the closest supported size. my $height =shift; my @allowed=(8,9,10,11,12,14,17,20); my $ret; my $fit; foreach my $val (@allowed) { my $f = abs($val - $height); if (! defined($fit) || $f < $fit ) { $ret=$val; $fit=$f; } } return $ret; } sub create ($$$) { # This function calls the image generating function and returns # the for the generated image. my $code = shift; my $height = shift; my $params = shift; if (! defined($height) and not length($height) ) { $height = 12; } my $digest = md5_hex(Encode::encode_utf8($code), $height); my $imglink= $params->{page} . "/$digest.png"; my $imglog = $params->{page} . "/$digest.log"; will_render($params->{page}, $imglink); will_render($params->{page}, $imglog); my $imgurl=urlto($imglink, $params->{destpage}); my $logurl=urlto($imglog, $params->{destpage}); if (-e "$config{destdir}/$imglink" || gen_image($code, $height, $digest, $params->{page})) { return qq{}
			.(exists $params->{alt} ? $params->{alt} : encode_entities($code))
			.qq{}; } else { error qq{}.gettext("failed to generate image from code").""; } } sub gen_image ($$$$) { # Actually creates the image. my $code = shift; my $height = shift; my $digest = shift; my $imagedir = shift; if (!defined $config{teximg_prefix}) { $config{teximg_prefix} = $default_prefix; } if (!defined $config{teximg_postfix}) { $config{teximg_postfix} = $default_postfix; } if (!defined $config{teximg_dvipng}) { $config{teximg_dvipng} = length `which dvipng 2>/dev/null`; } my $tex = $config{teximg_prefix}; $tex .= '\['.$code.'\]'; $tex .= $config{teximg_postfix}; $tex =~ s!\\documentclass{article}!\\documentclass[${height}pt]{article}!g; $tex =~ s!\\documentclass{scrartcl}!\\documentclass[${height}pt]{scrartcl}!g; my $tmp = eval { create_tmp_dir($digest) }; if (! $@ && writefile("$digest.tex", $tmp, $tex) && system("cd $tmp; shell_escape=f openout_any=p openin_any=p latex --interaction=nonstopmode $digest.tex < /dev/null > /dev/null") == 0 && # ensure destination directory exists writefile("$imagedir/$digest.png", $config{destdir}, "") && (($config{teximg_dvipng} && system("dvipng -D 120 -bg Transparent -T tight -o $config{destdir}/$imagedir/$digest.png $tmp/$digest.dvi > $tmp/$digest.log") == 0 ) || (!$config{teximg_dvipng} && system("dvips -E $tmp/$digest.dvi -o $tmp/$digest.ps 2> $tmp/$digest.log") == 0 && system("convert -density 120 -trim -transparent \"#FFFFFF\" $tmp/$digest.ps $config{destdir}/$imagedir/$digest.png > $tmp/$digest.log") == 0 ))) { return 1; } else { # store failure log my $log=""; { if (open(my $f, '<', "$tmp/$digest.log")) { local $/=undef; $log = <$f>; close($f); } } writefile("$digest.log", "$config{destdir}/$imagedir", $log); return 0; } } sub create_tmp_dir ($) { # Create a temp directory, it will be removed when ikiwiki exits. my $base = shift; my $template = $base.".XXXXXXXXXX"; my $tmpdir = tempdir($template, TMPDIR => 1, CLEANUP => 1); return $tmpdir; } 1 ikiwiki-3.20130904.1ubuntu1/IkiWiki/Plugin/relativedate.pm0000644000000000000000000000342012211730001017717 0ustar #!/usr/bin/perl package IkiWiki::Plugin::relativedate; use warnings; no warnings 'redefine'; use strict; use IkiWiki 3.00; use POSIX (); use Encode; sub import { add_underlay("javascript"); hook(type => "getsetup", id => "relativedate", call => \&getsetup); hook(type => "format", id => "relativedate", call => \&format); inject(name => "IkiWiki::displaytime", call => \&mydisplaytime); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, }, } sub format (@) { my %params=@_; if (! ($params{content}=~s!^(]*>)!$1.include_javascript($params{page})!em)) { # no tag, probably in preview mode $params{content}=include_javascript(undef).$params{content}; } return $params{content}; } sub include_javascript ($) { my $from=shift; return ''."\n". ''; } sub mydisplaytime ($;$$) { my $time=shift; my $format=shift; my $pubdate=shift; # This needs to be in a form that can be parsed by javascript. # (Being fairly human readable is also nice, as it will be exposed # as the title if javascript is not available.) my $lc_time=POSIX::setlocale(&POSIX::LC_TIME); POSIX::setlocale(&POSIX::LC_TIME, "C"); my $gmtime=decode_utf8(POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time))); POSIX::setlocale(&POSIX::LC_TIME, $lc_time); my $mid=' class="relativedate" title="'.$gmtime.'">'. IkiWiki::formattime($time, $format); if ($config{html5}) { return '