ikiwiki-3.20160121/0000755000000000000000000000000012650125230010433 5ustar ikiwiki-3.20160121/wikilist0000644000000000000000000000110312650125230012210 0ustar # This file is used by ikiwiki-mass-rebuild script to rebuild the listed # wikis. Run this script when upgrading ikiwiki to an incompatible new # version that requires rebuilding everything. # # ikiwiki-mass-rebuild su's to the listed user and then runs ikiwiki --setup # on the specified ikiwiki setup file. # # It's also possible to let a user list setup files in ~user/.ikiwiki/wikilist # in their home directory. To do so, list only the user's name, without a # setup file. The format of ~/.ikiwiki/wikilist is the same as this file. #joey /home/joey/.ikiwiki/ikiwiki.setup ikiwiki-3.20160121/underlays/0000755000000000000000000000000012650125230012441 5ustar ikiwiki-3.20160121/underlays/smiley/0000755000000000000000000000000012650125230013743 5ustar ikiwiki-3.20160121/underlays/smiley/smileys0000777000000000000000000000000012650125230020153 2../../doc/smileysustar ikiwiki-3.20160121/underlays/smiley/smileys.mdwn0000777000000000000000000000000012650125230022063 2../../doc/smileys.mdwnustar ikiwiki-3.20160121/underlays/osm/0000755000000000000000000000000012650125230013237 5ustar ikiwiki-3.20160121/underlays/osm/ikiwiki/0000755000000000000000000000000012650125230014677 5ustar ikiwiki-3.20160121/underlays/osm/ikiwiki/osm.js0000644000000000000000000001260612650125230016040 0ustar // taken from http://stackoverflow.com/questions/901115/get-query-string-values-in-javascript var urlParams = {}; (function () { var e, a = /\\+/g, // Regex for replacing addition symbol with a space r = /([^&=]+)=?([^&]*)/g, d = function (s) { return decodeURIComponent(s.replace(a, " ")); }, q = window.location.search.substring(1); while (e = r.exec(q)) urlParams[d(e[1])] = d(e[2]); })(); function mapsetup(divname, options) { div = document.getElementById(divname); if (options.fullscreen) { permalink = 'permalink'; div.style.top = 0; div.style.left = 0; div.style.position = 'absolute'; div.style.width = '100%'; div.style.height = '100%'; } else { div.style.height = options.height; div.style.width = options.width; div.style.float = options.float; permalink = {base: options.href, title: "View larger map"}; } map = new OpenLayers.Map(divname, { controls: [ new OpenLayers.Control.Navigation(), new OpenLayers.Control.ScaleLine(), new OpenLayers.Control.Permalink(permalink) ], displayProjection: new OpenLayers.Projection("EPSG:4326"), maxExtent: new OpenLayers.Bounds(-20037508.34,-20037508.34,20037508.34,20037508.34), projection: "EPSG:900913", units: "m", maxResolution: 156543.0339, numZoomLevels: 19 }); for (x in options.layers) { layer = options.layers[x]; console.log("setting up layer: " + layer); if (layer.indexOf("Google") >= 0) { if (options.google_apikey && options.google_apikey != 'null') { var gtype = G_NORMAL_MAP; if (layer.indexOf("Satellite") >= 0) { gtype = G_SATELLITE_MAP; } else if (layer.indexOf("Hybrid") >= 0) { gtype = G_HYBRID_MAP // the normal map overlaying the satellite photographs } else if (layer.indexOf("Physical") >= 0) { gtype = G_PHYSICAL_MAP // terrain information } // this nightmare is possible through http://docs.openlayers.org/library/spherical_mercator.html googleLayer = new OpenLayers.Layer.Google( layer, {type: gtype, 'sphericalMercator': true, 'maxExtent': new OpenLayers.Bounds(-20037508.34,-20037508.34,20037508.34,20037508.34), projection: new OpenLayers.Projection("EPSG:3857")} ); map.addLayer(googleLayer); } else { console.log("no API key defined for Google layer, skipping"); } } else if (layer == 'OSM') { // OSM default layer map.addLayer(new OpenLayers.Layer.OSM("OSM (Mapnik)")); } else { // assumed to be a URL text = layer.match(/([^.\/]*\.[^.\/]*(\/[^\$]*)?)\/.*$/i) // take the first two parts of the FQDN and everything before the first $ map.addLayer(new OpenLayers.Layer.OSM("OSM (" + text[1] + ")", layer)); } } if (options.format == 'CSV') { pois = new OpenLayers.Layer.Text( "CSV", { location: options.csvurl, projection: new OpenLayers.Projection("EPSG:4326") }); } else if (options.format == 'GeoJSON') { pois = new OpenLayers.Layer.Vector("GeoJSON", { protocol: new OpenLayers.Protocol.HTTP({ url: options.jsonurl, format: new OpenLayers.Format.GeoJSON() }), strategies: [new OpenLayers.Strategy.Fixed()], projection: new OpenLayers.Projection("EPSG:4326") }); } else { pois = new OpenLayers.Layer.Vector("KML", { protocol: new OpenLayers.Protocol.HTTP({ url: options.kmlurl, format: new OpenLayers.Format.KML({ extractStyles: true, extractAttributes: true }) }), strategies: [new OpenLayers.Strategy.Fixed()], projection: new OpenLayers.Projection("EPSG:4326") }); } map.addLayer(pois); select = new OpenLayers.Control.SelectFeature(pois); map.addControl(select); select.activate(); pois.events.on({ "featureselected": function (event) { var feature = event.feature; var content = '

' +feature.attributes.name + "

" + feature.attributes.description; popup = new OpenLayers.Popup.FramedCloud("chicken", feature.geometry.getBounds().getCenterLonLat(), new OpenLayers.Size(100,100), content, null, true, function () {select.unselectAll()}); feature.popup = popup; map.addPopup(popup); }, "featureunselected": function (event) { var feature = event.feature; if (feature.popup) { map.removePopup(feature.popup); feature.popup.destroy(); delete feature.popup; } } }); if (options.editable) { vlayer = new OpenLayers.Layer.Vector( "Editable" ); map.addControl(new OpenLayers.Control.EditingToolbar(vlayer)); map.addLayer(vlayer); } if (options.fullscreen) { map.addControl(new OpenLayers.Control.PanZoomBar()); map.addControl(new OpenLayers.Control.LayerSwitcher()); map.addControl(new OpenLayers.Control.MousePosition()); map.addControl(new OpenLayers.Control.KeyboardDefaults()); } else { map.addControl(new OpenLayers.Control.ZoomPanel()); } //Set start centrepoint and zoom if (!options.lat || !options.lon) { options.lat = urlParams['lat']; options.lon = urlParams['lon']; } if (!options.zoom) { options.zoom = urlParams['zoom']; } if (options.lat && options.lon) { var lat = options.lat; var lon = options.lon; var zoom= options.zoom || 10; center = new OpenLayers.LonLat( lon, lat ).transform( new OpenLayers.Projection("EPSG:4326"), // transform from WGS 1984 map.getProjectionObject() // to Spherical Mercator Projection ); map.setCenter (center, zoom); } else { pois.events.register("loadend", this, function () { map.zoomToExtent(pois.getDataExtent()); }); } } ikiwiki-3.20160121/underlays/osm/ikiwiki/images/0000755000000000000000000000000012650125230016144 5ustar ikiwiki-3.20160121/underlays/osm/ikiwiki/images/osm.png0000644000000000000000000000564612650125230017463 0ustar PNG  IHDR;0bKGDC pHYsHHFk> vpAgO= IDATHǵY\wV]]]TZx60Q&0 Ei@Q'y`4B5a$%3™m{ߪk}ֽHGtwQ/3X~KBwC-{*8@ `YVݱ/.<>;]7x7P,,n?$P-5n /i쭙 oDz?~wV9g'=3tuu @XUV+\4di qнKz_}M*ڸݹS˅Zw,<5<68765t+~b&~`OTeJPbBH$ك=ݝRCԱqB o}W.^:m=KxԺlAc4|OO_0s&E+D 8m(fj- R՜wc[~28_uSG/8w$nfuz&H .,r.y"l_5_x~Ծ:U&qrvԱ;[_ e{;˝[щ>q2Q2p:au NubAʭ=^WHGI)ۅ_qcS vwwEH3믿L&cO4v7s7s{ÍF"yxGD&="aicO)Y0xAW EQxSn~m7nPꀫ<;X<^aFU xCR46NjLdz/.Q0 c#%f1 >T_{5CT'B9Y-A\Ӣ S )$N}*l[%zhKrtFanl9>iڔ\[[㟾S#hExK~vEQ{\2JJ;niےhJZ@ +ēdh(8H!CZE*P]QQVi6:=2PUs1&{<ؼzU4!TdêH:A5>p=A<6z=CNhĢxHTrZ 6Y;}}в,k_{3cI]J92)-n6f#c \8{= hsɗ4Y\ELGx6=LǑxY}P{w|Rm4.{]__Gneyk8#U_56`<)&˫KA/ Z0mO zi54c1y`ܑaQ.]đ\;8vPݧR^ ,[ Pk u3ɭoa)~^4׏HEx^ȁw޾RW \wezfhg^)x>"6-qM桫ˬ/ēқj$~R.zld@ЏAw_Ge\.-o# Dh方m}~ $U-TCO8uz-H!_>=Xc}VbK4'=qn{N6 ${'#>A?B4]^o+^]vc;,,,=uU49++וo.6׋esѪyHu3ɞѾӣhOaR\M/7stzi v6_~2W\9w\IGNu\oΤ{ŭ&@s T<:FW?d2ʁS1sMP !ԑݟT*iNǞSVLTɋE9_뇳ٮJP(7+rir6GSvwj]|[tE25f-o;? }6r%tEXtdate:create2010-11-06T21:30:27-07:00{K%tEXtdate:modify2010-10-30T13:39:49-07:00"OtEXtSoftwarewww.inkscape.org<IENDB`ikiwiki-3.20160121/underlays/login-selector/0000755000000000000000000000000012650125230015367 5ustar ikiwiki-3.20160121/underlays/login-selector/ikiwiki/0000755000000000000000000000000012650125230017027 5ustar ikiwiki-3.20160121/underlays/login-selector/ikiwiki/login-selector/0000755000000000000000000000000012650125230021755 5ustar ikiwiki-3.20160121/underlays/login-selector/ikiwiki/login-selector/wordpress.png0000644000000000000000000000156612650125230024523 0ustar PNG  IHDRa=IDAT8u]L[u?=2Rj) .fI$1K53M\p5EBH\AmLLJlQV`|e d>/yijv4o(" f sMQ"a@8p`)0I/dj6$I?[D‰ _|Ya<+;G^#NXhj8X}*j-j:GV/uPZN7V'u M0M΅.4gux!Ipe.I!NwhBRL.|v{q3jc]lvqg.OsO7(ss.eE+-BbۗSXSBz X-2p3o;EH"_]M/m Z& ?Of&k,r$ȳ|}%]5GA@1ٜFI af 9=Yq0U2ob ȳRt~ 'fc;Š3|uyCSd3*UB 5^oHfUާZd|N:ΝU-a9ǦȔyQ,6(Z&zI"j:ߌxgw9 Ȣ7*w.5+`*FStY ̮fŗ8~"o$Veήu9ϿT//JKvfdN^KrO3zto:yv_fEr"L29=tV}P"%IENDB`ikiwiki-3.20160121/underlays/login-selector/ikiwiki/login-selector/verisign.png0000644000000000000000000000131212650125230024306 0ustar PNG  IHDRasBIT|d pHYs B(xtEXtSoftwarewww.inkscape.org<%tEXtTitleapproximately the Verisign logo OtEXtAuthorSimon McVittie4tEXtCreation Time2014AIDAT8OHTQ2EQJ`ISdA5[F !7m[.RBVQ" BB}oD6myOP;fl#p.=5I'rk-0`+šLن\(at 3`'V,ambq b.Xz%1Ieݠ"vƤ /76w17pZr#(i(Mu-h,dF9t8Mx-d/ .`'P]BQȽ&LOz;8 <>47z%p ߀~^dß#y!kKy[ \޷.|t X\"তWOM$!~/oF#6h;ǭ8A7P51/YC`͚pL!zhIENDB`ikiwiki-3.20160121/underlays/login-selector/ikiwiki/login-selector/login-selector.js0000644000000000000000000001612012650125230025241 0ustar /* Based on the Simple OpenID Plugin http://code.google.com/p/openid-selector/ This code is licenced under the New BSD License. */ var selections_email_large = { email: { name: 'Email', icon: 'wikiicons/email.png', label: 'Enter your email address:', url: null } }; var selections_openid_large = { openid: { name: 'OpenID', icon: 'wikiicons/openidlogin-bg.gif', label: 'Enter your OpenID:', url: null } }; var selections_openid_small = { verisign: { name: 'Verisign', icon: 'ikiwiki/login-selector/verisign.png', label: 'Enter your Verisign username:', url: 'http://{username}.pip.verisignlabs.com/' }, yahoo: { name: 'Yahoo', icon: 'ikiwiki/login-selector/goa-account-yahoo.png', url: 'http://me.yahoo.com/' }, flickr: { name: 'Flickr', icon: 'ikiwiki/login-selector/goa-account-flickr.png', label: 'Enter your Flickr username:', url: 'http://flickr.com/photos/{username}/' }, wordpress: { name: 'Wordpress', icon: 'ikiwiki/login-selector/wordpress.png', label: 'Enter your Wordpress.com username:', url: 'http://{username}.wordpress.com/' }, aol: { name: 'AOL', icon: 'ikiwiki/login-selector/aol.png', label: 'Enter your AOL username:', url: 'http://openid.aol.com/{username}' } }; var selections = $.extend({}, selections_email_large, selections_openid_large, selections_openid_small); var selector = { ajaxHandler: null, cookie_expires: 6*30, // 6 months. cookie_name: 'openid_selection', // historical name cookie_path: '/', img_path: 'images/', input_id: null, selection_url: null, selection_id: null, othersignin_id: null, init: function(input_id, login_methods, othersignin_id, othersignin_label) { var selector_btns = $('#login_btns'); this.input_id = input_id; $('#login_choice').show(); $('#login_input_area').empty(); // add box for each selection if (login_methods['openid']) { for (id in selections_openid_large) { selector_btns.append(this.getBoxHTML(selections_openid_large[id], 'large')); } } if (login_methods['email']) { for (id in selections_email_large) { selector_btns.prepend(this.getBoxHTML(selections_email_large[id], 'large')); } } if (othersignin_label != "") { this.othersignin_label=othersignin_label; } else { this.othersignin_label="other"; } if (othersignin_id != "") { this.othersignin_id=othersignin_id; selector_btns.prepend( '' + '' + ' ' + this.othersignin_label + '' ); $('#'+this.othersignin_id).hide(); } if (login_methods['openid'] && selections_openid_small) { selector_btns.append('
'); for (id in selections_openid_small) { selector_btns.append(this.getBoxHTML(selections_openid_small[id], 'small')); } } $('#login_selector_form').submit(this.submit); var box_id = this.readCookie(); if (box_id) { this.signin(box_id, true); } }, getBoxHTML: function(selection, box_size) { var label=""; var title="" if (box_size == 'large') { label=' ' + selection["name"]; } else { title=' title="'+selection["name"]+'"'; } var box_id = selection["name"].toLowerCase(); return '' + '' + label + ''; }, /* selection image click */ signin: function(box_id, onload) { if (box_id == 'othersignin') { this.highlight(box_id); $('#login_input_area').empty(); $('#'+this.othersignin_id).show(); this.setCookie(box_id); return; } else { if (this.othersignin_id) { $('#'+this.othersignin_id).hide(); } } var selection = selections[box_id]; if (! selection) { return; } this.highlight(box_id); this.selection_id = box_id; this.selection_url = selection['url']; // prompt user for input? if (selection['label']) { this.setCookie(box_id); this.useInputBox(selection); } else { this.setCookie(''); $('#login_input_area').empty(); if (! onload) { $('#login_selector_form').submit(); } } }, /* Sign-in button click */ submit: function() { var url = selector.selection_url; if (url) { url = url.replace('{username}', $('#entry').val()); selector.setOpenIdUrl(url); } else { selector.setOpenIdUrl(""); } if (selector.ajaxHandler) { selector.ajaxHandler(selector.selection_id, document.getElementById(selector.input_id).value); return false; } return true; }, setOpenIdUrl: function (url) { var hidden = $('#'+this.input_id); if (hidden.length > 0) { hidden.value = url; } else { $('#login_selector_form').append(''); } }, highlight: function (box_id) { // remove previous highlight. var highlight = $('#login_highlight'); if (highlight) { highlight.replaceWith($('#login_highlight a')[0]); } // add new highlight. $('.'+box_id).wrap('
'); }, setCookie: function (value) { var date = new Date(); date.setTime(date.getTime()+(this.cookie_expires*24*60*60*1000)); var expires = "; expires="+date.toGMTString(); document.cookie = this.cookie_name+"="+value+expires+"; path=" + this.cookie_path; }, readCookie: function () { var nameEQ = this.cookie_name + "="; var ca = document.cookie.split(';'); for(var i=0;i < ca.length;i++) { var c = ca[i]; while (c.charAt(0)==' ') c = c.substring(1,c.length); if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length,c.length); } return null; }, useInputBox: function (selection) { var input_area = $('#login_input_area'); var html = ''; var id = selection['name']+'_entry'; var value = ''; var label = selection['label']; var style = ''; if (selection['name'] == 'OpenID') { id = this.input_id; value = ''; style = 'background:#FFF url(wikiicons/openidlogin-bg.gif) no-repeat scroll 0 50%; padding-left:18px;'; } if (label) { html = ''; } html += '' + ''; input_area.empty(); input_area.append(html); $('#'+id).focus(); }, setAjaxHandler: function (ajaxFunction) { this.ajaxHandler = ajaxFunction; } }; ikiwiki-3.20160121/underlays/login-selector/ikiwiki/login-selector/goa-account-yahoo.png0000644000000000000000000000134512650125230026003 0ustar PNG  IHDRasBIT|d pHYs B(xtEXtSoftwarewww.inkscape.org<tEXtTitleOptical Drive>g CIDAT8OTQ{Y0<p|G|` @&X@mcabebD-[hBJ(X]s{,0(DdNqNff'=5 q> h$xp@9Rx*$jm57TB$?xH4UfV8Q~OTW0- 'J\Cݭ=ͩqgH'iJ>]-Y-cldqw /FAZߙ¯4%dܧuRu[4uQ5E>?Z8gmN0Z奈 wϣ2>a1dvdTZo>rr+b 8AG,YJ+1AUCN>b8 ciPvLN*-N,1 Βl`8o/&?[@GOd}12+ӫ-6*BdL;sD$wʶpA/m}-X'8AU&-v3,++ѻ=hjt:Xgm]Љ>WUHs%hR%Ѩ*Ky0F۵~pZgly.`v7IENDB`ikiwiki-3.20160121/underlays/login-selector/ikiwiki/login-selector/goa-account-flickr.png0000644000000000000000000000112012650125230026125 0ustar PNG  IHDRasBIT|d pHYs B(xtEXtSoftwarewww.inkscape.org<tEXtTitleOptical Drive>g IDAT8jSA;7!,Z{(TP2>C})}E B"BBv /h2wqZ\;gsΨhTJyC)|EyoB'fh4ֺlv$!/h*O 'hmpڋ9#1ԫ9}G3×za6MAg~7YtvXtv~#"xc'[kMǬVBr7"2HӴjtV+2 sx2_}gc p[!żkIENDB`ikiwiki-3.20160121/underlays/login-selector/ikiwiki/login-selector/aol.png0000644000000000000000000000071412650125230023240 0ustar PNG  IHDRasBIT|d pHYs B(xtEXtSoftwarewww.inkscape.org< tEXtTitleapproximately the AOL logo0tEXtAuthorSimon McVittie4tEXtCreation Time2014AIDAT8ӯJAYvÂaW6D͚,`/a56Y7 bLnXf凊e;{aRJ1jto&Y4J) I`<ݷ*u (kt[JrddGy%C7@W'c1B;_z.6)[^>]*$|#([\w\-]*)$)/,bK=/\S/,bG=/^\s+/,bB=/\s+$/,bF=/\d/,bx=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,bL=/^[\],:{}\s]*$/,bU=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,bN=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,bH=/(?:^|:|,)(?:\s*\[)+/g,bv=/(webkit)[ \/]([\w.]+)/,bP=/(opera)(?:.*version)?[ \/]([\w.]+)/,bO=/(msie) ([\w.]+)/,bQ=/(mozilla)(?:.*? rv:([\w.]+))?/,by=/-([a-z])/ig,bR=function(bX,bY){return bY.toUpperCase()},bV=br.userAgent,bT,bz,e,bJ=Object.prototype.toString,bD=Object.prototype.hasOwnProperty,bw=Array.prototype.push,bI=Array.prototype.slice,bM=String.prototype.trim,bs=Array.prototype.indexOf,bu={};bC.fn=bC.prototype={constructor:bC,init:function(bX,b1,b0){var bZ,b2,bY,b3;if(!bX){return this}if(bX.nodeType){this.context=this[0]=bX;this.length=1;return this}if(bX==="body"&&!b1&&aq.body){this.context=aq;this[0]=aq.body;this.selector=bX;this.length=1;return this}if(typeof bX==="string"){if(bX.charAt(0)==="<"&&bX.charAt(bX.length-1)===">"&&bX.length>=3){bZ=[null,bX,null]}else{bZ=bW.exec(bX)}if(bZ&&(bZ[1]||!b1)){if(bZ[1]){b1=b1 instanceof bC?b1[0]:b1;b3=(b1?b1.ownerDocument||b1:aq);bY=bx.exec(bX);if(bY){if(bC.isPlainObject(b1)){bX=[aq.createElement(bY[1])];bC.fn.attr.call(bX,b1,true)}else{bX=[b3.createElement(bY[1])]}}else{bY=bC.buildFragment([bZ[1]],[b3]);bX=(bY.cacheable?bC.clone(bY.fragment):bY.fragment).childNodes}return bC.merge(this,bX)}else{b2=aq.getElementById(bZ[2]);if(b2&&b2.parentNode){if(b2.id!==bZ[2]){return b0.find(bX)}this.length=1;this[0]=b2}this.context=aq;this.selector=bX;return this}}else{if(!b1||b1.jquery){return(b1||b0).find(bX)}else{return this.constructor(b1).find(bX)}}}else{if(bC.isFunction(bX)){return b0.ready(bX)}}if(bX.selector!==K){this.selector=bX.selector;this.context=bX.context}return bC.makeArray(bX,this)},selector:"",jquery:"1.6.2",length:0,size:function(){return this.length},toArray:function(){return bI.call(this,0)},get:function(bX){return bX==null?this.toArray():(bX<0?this[this.length+bX]:this[bX])},pushStack:function(bY,b0,bX){var bZ=this.constructor();if(bC.isArray(bY)){bw.apply(bZ,bY)}else{bC.merge(bZ,bY)}bZ.prevObject=this;bZ.context=this.context;if(b0==="find"){bZ.selector=this.selector+(this.selector?" ":"")+bX}else{if(b0){bZ.selector=this.selector+"."+b0+"("+bX+")"}}return bZ},each:function(bY,bX){return bC.each(this,bY,bX)},ready:function(bX){bC.bindReady();bz.done(bX);return this},eq:function(bX){return bX===-1?this.slice(bX):this.slice(bX,+bX+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(bI.apply(this,arguments),"slice",bI.call(arguments).join(","))},map:function(bX){return this.pushStack(bC.map(this,function(bZ,bY){return bX.call(bZ,bY,bZ)}))},end:function(){return this.prevObject||this.constructor(null)},push:bw,sort:[].sort,splice:[].splice};bC.fn.init.prototype=bC.fn;bC.extend=bC.fn.extend=function(){var b6,bZ,bX,bY,b3,b4,b2=arguments[0]||{},b1=1,b0=arguments.length,b5=false;if(typeof b2==="boolean"){b5=b2;b2=arguments[1]||{};b1=2}if(typeof b2!=="object"&&!bC.isFunction(b2)){b2={}}if(b0===b1){b2=this;--b1}for(;b10){return}bz.resolveWith(aq,[bC]);if(bC.fn.trigger){bC(aq).trigger("ready").unbind("ready")}}},bindReady:function(){if(bz){return}bz=bC._Deferred();if(aq.readyState==="complete"){return setTimeout(bC.ready,1)}if(aq.addEventListener){aq.addEventListener("DOMContentLoaded",e,false);a9.addEventListener("load",bC.ready,false)}else{if(aq.attachEvent){aq.attachEvent("onreadystatechange",e);a9.attachEvent("onload",bC.ready);var bX=false;try{bX=a9.frameElement==null}catch(bY){}if(aq.documentElement.doScroll&&bX){bt()}}}},isFunction:function(bX){return bC.type(bX)==="function"},isArray:Array.isArray||function(bX){return bC.type(bX)==="array"},isWindow:function(bX){return bX&&typeof bX==="object"&&"setInterval" in bX},isNaN:function(bX){return bX==null||!bF.test(bX)||isNaN(bX)},type:function(bX){return bX==null?String(bX):bu[bJ.call(bX)]||"object"},isPlainObject:function(bY){if(!bY||bC.type(bY)!=="object"||bY.nodeType||bC.isWindow(bY)){return false}if(bY.constructor&&!bD.call(bY,"constructor")&&!bD.call(bY.constructor.prototype,"isPrototypeOf")){return false}var bX;for(bX in bY){}return bX===K||bD.call(bY,bX)},isEmptyObject:function(bY){for(var bX in bY){return false}return true},error:function(bX){throw bX},parseJSON:function(bX){if(typeof bX!=="string"||!bX){return null}bX=bC.trim(bX);if(a9.JSON&&a9.JSON.parse){return a9.JSON.parse(bX)}if(bL.test(bX.replace(bU,"@").replace(bN,"]").replace(bH,""))){return(new Function("return "+bX))()}bC.error("Invalid JSON: "+bX)},parseXML:function(bZ,bX,bY){if(a9.DOMParser){bY=new DOMParser();bX=bY.parseFromString(bZ,"text/xml")}else{bX=new ActiveXObject("Microsoft.XMLDOM");bX.async="false";bX.loadXML(bZ)}bY=bX.documentElement;if(!bY||!bY.nodeName||bY.nodeName==="parsererror"){bC.error("Invalid XML: "+bZ)}return bX},noop:function(){},globalEval:function(bX){if(bX&&bK.test(bX)){(a9.execScript||function(bY){a9["eval"].call(a9,bY)})(bX)}},camelCase:function(bX){return bX.replace(by,bR)},nodeName:function(bY,bX){return bY.nodeName&&bY.nodeName.toUpperCase()===bX.toUpperCase()},each:function(b0,b3,bZ){var bY,b1=0,b2=b0.length,bX=b2===K||bC.isFunction(b0);if(bZ){if(bX){for(bY in b0){if(b3.apply(b0[bY],bZ)===false){break}}}else{for(;b10&&bX[0]&&bX[bY-1])||bY===0||bC.isArray(bX));if(b0){for(;bZ1?aF.call(arguments,0):bz;if(!(--bv)){e.resolveWith(e,aF.call(bs,0))}}}if(bw>1){for(;bt
a";bv=bC.getElementsByTagName("*");bK=bC.getElementsByTagName("a")[0];if(!bv||!bv.length||!bK){return{}}bD=aq.createElement("select");bt=bD.appendChild(aq.createElement("option"));bB=bC.getElementsByTagName("input")[0];bz={leadingWhitespace:(bC.firstChild.nodeType===3),tbody:!bC.getElementsByTagName("tbody").length,htmlSerialize:!!bC.getElementsByTagName("link").length,style:/top/.test(bK.getAttribute("style")),hrefNormalized:(bK.getAttribute("href")==="/a"),opacity:/^0.55$/.test(bK.style.opacity),cssFloat:!!bK.style.cssFloat,checkOn:(bB.value==="on"),optSelected:bt.selected,getSetAttribute:bC.className!=="t",submitBubbles:true,changeBubbles:true,focusinBubbles:false,deleteExpando:true,noCloneEvent:true,inlineBlockNeedsLayout:false,shrinkWrapBlocks:false,reliableMarginRight:true};bB.checked=true;bz.noCloneChecked=bB.cloneNode(true).checked;bD.disabled=true;bz.optDisabled=!bt.disabled;try{delete bC.test}catch(bH){bz.deleteExpando=false}if(!bC.addEventListener&&bC.attachEvent&&bC.fireEvent){bC.attachEvent("onclick",function(){bz.noCloneEvent=false});bC.cloneNode(true).fireEvent("onclick")}bB=aq.createElement("input");bB.value="t";bB.setAttribute("type","radio");bz.radioValue=bB.value==="t";bB.setAttribute("checked","checked");bC.appendChild(bB);bs=aq.createDocumentFragment();bs.appendChild(bC.firstChild);bz.checkClone=bs.cloneNode(true).cloneNode(true).lastChild.checked;bC.innerHTML="";bC.style.width=bC.style.paddingLeft="1px";bA=aq.getElementsByTagName("body")[0];by=aq.createElement(bA?"div":"body");bI={visibility:"hidden",width:0,height:0,border:0,margin:0};if(bA){b.extend(bI,{position:"absolute",left:-1000,top:-1000})}for(bF in bI){by.style[bF]=bI[bF]}by.appendChild(bC);bE=bA||bJ;bE.insertBefore(by,bE.firstChild);bz.appendChecked=bB.checked;bz.boxModel=bC.offsetWidth===2;if("zoom" in bC.style){bC.style.display="inline";bC.style.zoom=1;bz.inlineBlockNeedsLayout=(bC.offsetWidth===2);bC.style.display="";bC.innerHTML="
";bz.shrinkWrapBlocks=(bC.offsetWidth!==2)}bC.innerHTML="
t
";bG=bC.getElementsByTagName("td");bL=(bG[0].offsetHeight===0);bG[0].style.display="";bG[1].style.display="none";bz.reliableHiddenOffsets=bL&&(bG[0].offsetHeight===0);bC.innerHTML="";if(aq.defaultView&&aq.defaultView.getComputedStyle){bw=aq.createElement("div");bw.style.width="0";bw.style.marginRight="0";bC.appendChild(bw);bz.reliableMarginRight=(parseInt((aq.defaultView.getComputedStyle(bw,null)||{marginRight:0}).marginRight,10)||0)===0}by.innerHTML="";bE.removeChild(by);if(bC.attachEvent){for(bF in {submit:1,change:1,focusin:1}){bx="on"+bF;bL=(bx in bC);if(!bL){bC.setAttribute(bx,"return;");bL=(typeof bC[bx]==="function")}bz[bF+"Bubbles"]=bL}}by=bs=bD=bt=bA=bw=bC=bB=null;return bz})();b.boxModel=b.support.boxModel;var aM=/^(?:\{.*\}|\[.*\])$/,aw=/([a-z])([A-Z])/g;b.extend({cache:{},uuid:0,expando:"jQuery"+(b.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:true,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:true},hasData:function(e){e=e.nodeType?b.cache[e[b.expando]]:e[b.expando];return !!e&&!T(e)},data:function(bu,bs,bw,bv){if(!b.acceptData(bu)){return}var bz=b.expando,by=typeof bs==="string",bx,bA=bu.nodeType,e=bA?b.cache:bu,bt=bA?bu[b.expando]:bu[b.expando]&&b.expando;if((!bt||(bv&&bt&&!e[bt][bz]))&&by&&bw===K){return}if(!bt){if(bA){bu[b.expando]=bt=++b.uuid}else{bt=b.expando}}if(!e[bt]){e[bt]={};if(!bA){e[bt].toJSON=b.noop}}if(typeof bs==="object"||typeof bs==="function"){if(bv){e[bt][bz]=b.extend(e[bt][bz],bs)}else{e[bt]=b.extend(e[bt],bs)}}bx=e[bt];if(bv){if(!bx[bz]){bx[bz]={}}bx=bx[bz]}if(bw!==K){bx[b.camelCase(bs)]=bw}if(bs==="events"&&!bx[bs]){return bx[bz]&&bx[bz].events}return by?bx[b.camelCase(bs)]||bx[bs]:bx},removeData:function(bv,bt,bw){if(!b.acceptData(bv)){return}var by=b.expando,bz=bv.nodeType,bs=bz?b.cache:bv,bu=bz?bv[b.expando]:b.expando;if(!bs[bu]){return}if(bt){var bx=bw?bs[bu][by]:bs[bu];if(bx){delete bx[bt];if(!T(bx)){return}}}if(bw){delete bs[bu][by];if(!T(bs[bu])){return}}var e=bs[bu][by];if(b.support.deleteExpando||bs!=a9){delete bs[bu]}else{bs[bu]=null}if(e){bs[bu]={};if(!bz){bs[bu].toJSON=b.noop}bs[bu][by]=e}else{if(bz){if(b.support.deleteExpando){delete bv[b.expando]}else{if(bv.removeAttribute){bv.removeAttribute(b.expando)}else{bv[b.expando]=null}}}}},_data:function(bs,e,bt){return b.data(bs,e,bt,true)},acceptData:function(bs){if(bs.nodeName){var e=b.noData[bs.nodeName.toLowerCase()];if(e){return !(e===true||bs.getAttribute("classid")!==e)}}return true}});b.fn.extend({data:function(bv,bx){var bw=null;if(typeof bv==="undefined"){if(this.length){bw=b.data(this[0]);if(this[0].nodeType===1){var e=this[0].attributes,bt;for(var bu=0,bs=e.length;bu-1){return true}}return false},val:function(bu){var e,bs,bt=this[0];if(!arguments.length){if(bt){e=b.valHooks[bt.nodeName.toLowerCase()]||b.valHooks[bt.type];if(e&&"get" in e&&(bs=e.get(bt,"value"))!==K){return bs}bs=bt.value;return typeof bs==="string"?bs.replace(aO,""):bs==null?"":bs}return K}var bv=b.isFunction(bu);return this.each(function(bx){var bw=b(this),by;if(this.nodeType!==1){return}if(bv){by=bu.call(this,bx,bw.val())}else{by=bu}if(by==null){by=""}else{if(typeof by==="number"){by+=""}else{if(b.isArray(by)){by=b.map(by,function(bz){return bz==null?"":bz+""})}}}e=b.valHooks[this.nodeName.toLowerCase()]||b.valHooks[this.type];if(!e||!("set" in e)||e.set(this,by,"value")===K){this.value=by}})}});b.extend({valHooks:{option:{get:function(e){var bs=e.attributes.value;return !bs||bs.specified?e.value:e.text}},select:{get:function(e){var bx,bv=e.selectedIndex,by=[],bz=e.options,bu=e.type==="select-one";if(bv<0){return null}for(var bs=bu?bv:0,bw=bu?bv+1:bz.length;bs=0});if(!e.length){bs.selectedIndex=-1}return e}}},attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attrFix:{tabindex:"tabIndex"},attr:function(bx,bu,by,bw){var bs=bx.nodeType;if(!bx||bs===3||bs===8||bs===2){return K}if(bw&&bu in b.attrFn){return b(bx)[bu](by)}if(!("getAttribute" in bx)){return b.prop(bx,bu,by)}var bt,e,bv=bs!==1||!b.isXMLDoc(bx);if(bv){bu=b.attrFix[bu]||bu;e=b.attrHooks[bu];if(!e){if(ak.test(bu)){e=aU}else{if(a0&&bu!=="className"&&(b.nodeName(bx,"form")||a5.test(bu))){e=a0}}}}if(by!==K){if(by===null){b.removeAttr(bx,bu);return K}else{if(e&&"set" in e&&bv&&(bt=e.set(bx,by,bu))!==K){return bt}else{bx.setAttribute(bu,""+by);return by}}}else{if(e&&"get" in e&&bv&&(bt=e.get(bx,bu))!==null){return bt}else{bt=bx.getAttribute(bu);return bt===null?K:bt}}},removeAttr:function(bs,e){var bt;if(bs.nodeType===1){e=b.attrFix[e]||e;if(b.support.getSetAttribute){bs.removeAttribute(e)}else{b.attr(bs,e,"");bs.removeAttributeNode(bs.getAttributeNode(e))}if(ak.test(e)&&(bt=b.propFix[e]||e) in bs){bs[bt]=false}}},attrHooks:{type:{set:function(e,bs){if(g.test(e.nodeName)&&e.parentNode){b.error("type property can't be changed")}else{if(!b.support.radioValue&&bs==="radio"&&b.nodeName(e,"input")){var bt=e.value;e.setAttribute("type",bs);if(bt){e.value=bt}return bs}}}},tabIndex:{get:function(bs){var e=bs.getAttributeNode("tabIndex");return e&&e.specified?parseInt(e.value,10):D.test(bs.nodeName)||l.test(bs.nodeName)&&bs.href?0:K}},value:{get:function(bs,e){if(a0&&b.nodeName(bs,"button")){return a0.get(bs,e)}return e in bs?bs.value:null},set:function(bs,bt,e){if(a0&&b.nodeName(bs,"button")){return a0.set(bs,bt,e)}bs.value=bt}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(bw,bu,bx){var bs=bw.nodeType;if(!bw||bs===3||bs===8||bs===2){return K}var bt,e,bv=bs!==1||!b.isXMLDoc(bw);if(bv){bu=b.propFix[bu]||bu;e=b.propHooks[bu]}if(bx!==K){if(e&&"set" in e&&(bt=e.set(bw,bx,bu))!==K){return bt}else{return(bw[bu]=bx)}}else{if(e&&"get" in e&&(bt=e.get(bw,bu))!==K){return bt}else{return bw[bu]}}},propHooks:{}});aU={get:function(bs,e){return b.prop(bs,e)?e.toLowerCase():K},set:function(bs,bu,e){var bt;if(bu===false){b.removeAttr(bs,e)}else{bt=b.propFix[e]||e;if(bt in bs){bs[bt]=true}bs.setAttribute(e,e.toLowerCase())}return e}};if(!b.support.getSetAttribute){b.attrFix=b.propFix;a0=b.attrHooks.name=b.attrHooks.title=b.valHooks.button={get:function(bt,bs){var e;e=bt.getAttributeNode(bs);return e&&e.nodeValue!==""?e.nodeValue:K},set:function(bt,bu,bs){var e=bt.getAttributeNode(bs);if(e){e.nodeValue=bu;return bu}}};b.each(["width","height"],function(bs,e){b.attrHooks[e]=b.extend(b.attrHooks[e],{set:function(bt,bu){if(bu===""){bt.setAttribute(e,"auto");return bu}}})})}if(!b.support.hrefNormalized){b.each(["href","src","width","height"],function(bs,e){b.attrHooks[e]=b.extend(b.attrHooks[e],{get:function(bu){var bt=bu.getAttribute(e,2);return bt===null?K:bt}})})}if(!b.support.style){b.attrHooks.style={get:function(e){return e.style.cssText.toLowerCase()||K},set:function(e,bs){return(e.style.cssText=""+bs)}}}if(!b.support.optSelected){b.propHooks.selected=b.extend(b.propHooks.selected,{get:function(bs){var e=bs.parentNode;if(e){e.selectedIndex;if(e.parentNode){e.parentNode.selectedIndex}}}})}if(!b.support.checkOn){b.each(["radio","checkbox"],function(){b.valHooks[this]={get:function(e){return e.getAttribute("value")===null?"on":e.value}}})}b.each(["radio","checkbox"],function(){b.valHooks[this]=b.extend(b.valHooks[this],{set:function(e,bs){if(b.isArray(bs)){return(e.checked=b.inArray(b(e).val(),bs)>=0)}}})});var aW=/\.(.*)$/,bb=/^(?:textarea|input|select)$/i,N=/\./g,bf=/ /g,aC=/[^\w\s.|`]/g,G=function(e){return e.replace(aC,"\\$&")};b.event={add:function(bu,by,bD,bw){if(bu.nodeType===3||bu.nodeType===8){return}if(bD===false){bD=bh}else{if(!bD){return}}var bs,bC;if(bD.handler){bs=bD;bD=bs.handler}if(!bD.guid){bD.guid=b.guid++}var bz=b._data(bu);if(!bz){return}var bE=bz.events,bx=bz.handle;if(!bE){bz.events=bE={}}if(!bx){bz.handle=bx=function(bF){return typeof b!=="undefined"&&(!bF||b.event.triggered!==bF.type)?b.event.handle.apply(bx.elem,arguments):K}}bx.elem=bu;by=by.split(" ");var bB,bv=0,e;while((bB=by[bv++])){bC=bs?b.extend({},bs):{handler:bD,data:bw};if(bB.indexOf(".")>-1){e=bB.split(".");bB=e.shift();bC.namespace=e.slice(0).sort().join(".")}else{e=[];bC.namespace=""}bC.type=bB;if(!bC.guid){bC.guid=bD.guid}var bt=bE[bB],bA=b.event.special[bB]||{};if(!bt){bt=bE[bB]=[];if(!bA.setup||bA.setup.call(bu,bw,e,bx)===false){if(bu.addEventListener){bu.addEventListener(bB,bx,false)}else{if(bu.attachEvent){bu.attachEvent("on"+bB,bx)}}}}if(bA.add){bA.add.call(bu,bC);if(!bC.handler.guid){bC.handler.guid=bD.guid}}bt.push(bC);b.event.global[bB]=true}bu=null},global:{},remove:function(bG,bB,bt,bx){if(bG.nodeType===3||bG.nodeType===8){return}if(bt===false){bt=bh}var bJ,bw,by,bD,bE=0,bu,bz,bC,bv,bA,e,bI,bF=b.hasData(bG)&&b._data(bG),bs=bF&&bF.events;if(!bF||!bs){return}if(bB&&bB.type){bt=bB.handler;bB=bB.type}if(!bB||typeof bB==="string"&&bB.charAt(0)==="."){bB=bB||"";for(bw in bs){b.event.remove(bG,bw+bB)}return}bB=bB.split(" ");while((bw=bB[bE++])){bI=bw;e=null;bu=bw.indexOf(".")<0;bz=[];if(!bu){bz=bw.split(".");bw=bz.shift();bC=new RegExp("(^|\\.)"+b.map(bz.slice(0).sort(),G).join("\\.(?:.*\\.)?")+"(\\.|$)")}bA=bs[bw];if(!bA){continue}if(!bt){for(bD=0;bD=0){bB=bB.slice(0,-1);bs=true}if(bB.indexOf(".")>=0){bt=bB.split(".");bB=bt.shift();bt.sort()}if((!bw||b.event.customEvent[bB])&&!b.event.global[bB]){return}e=typeof e==="object"?e[b.expando]?e:new b.Event(bB,e):new b.Event(bB);e.type=bB;e.exclusive=bs;e.namespace=bt.join(".");e.namespace_re=new RegExp("(^|\\.)"+bt.join("\\.(?:.*\\.)?")+"(\\.|$)");if(bD||!bw){e.preventDefault();e.stopPropagation()}if(!bw){b.each(b.cache,function(){var bF=b.expando,bE=this[bF];if(bE&&bE.events&&bE.events[bB]){b.event.trigger(e,by,bE.handle.elem)}});return}if(bw.nodeType===3||bw.nodeType===8){return}e.result=K;e.target=bw;by=by!=null?b.makeArray(by):[];by.unshift(e);var bC=bw,bu=bB.indexOf(":")<0?"on"+bB:"";do{var bz=b._data(bC,"handle");e.currentTarget=bC;if(bz){bz.apply(bC,by)}if(bu&&b.acceptData(bC)&&bC[bu]&&bC[bu].apply(bC,by)===false){e.result=false;e.preventDefault()}bC=bC.parentNode||bC.ownerDocument||bC===e.target.ownerDocument&&a9}while(bC&&!e.isPropagationStopped());if(!e.isDefaultPrevented()){var bv,bA=b.event.special[bB]||{};if((!bA._default||bA._default.call(bw.ownerDocument,e)===false)&&!(bB==="click"&&b.nodeName(bw,"a"))&&b.acceptData(bw)){try{if(bu&&bw[bB]){bv=bw[bu];if(bv){bw[bu]=null}b.event.triggered=bB;bw[bB]()}}catch(bx){}if(bv){bw[bu]=bv}b.event.triggered=K}}return e.result},handle:function(by){by=b.event.fix(by||a9.event);var bs=((b._data(this,"events")||{})[by.type]||[]).slice(0),bx=!by.exclusive&&!by.namespace,bv=Array.prototype.slice.call(arguments,0);bv[0]=by;by.currentTarget=this;for(var bu=0,e=bs.length;bu-1?b.map(bs.options,function(bu){return bu.selected}).join("-"):""}else{if(b.nodeName(bs,"select")){bt=bs.selectedIndex}}}return bt},Z=function Z(bu){var bs=bu.target,bt,bv;if(!bb.test(bs.nodeName)||bs.readOnly){return}bt=b._data(bs,"_change_data");bv=k(bs);if(bu.type!=="focusout"||bs.type!=="radio"){b._data(bs,"_change_data",bv)}if(bt===K||bv===bt){return}if(bt!=null||bv){bu.type="change";bu.liveFired=K;b.event.trigger(bu,arguments[1],bs)}};b.event.special.change={filters:{focusout:Z,beforedeactivate:Z,click:function(bu){var bt=bu.target,bs=b.nodeName(bt,"input")?bt.type:"";if(bs==="radio"||bs==="checkbox"||b.nodeName(bt,"select")){Z.call(this,bu)}},keydown:function(bu){var bt=bu.target,bs=b.nodeName(bt,"input")?bt.type:"";if((bu.keyCode===13&&!b.nodeName(bt,"textarea"))||(bu.keyCode===32&&(bs==="checkbox"||bs==="radio"))||bs==="select-multiple"){Z.call(this,bu)}},beforeactivate:function(bt){var bs=bt.target;b._data(bs,"_change_data",k(bs))}},setup:function(bt,bs){if(this.type==="file"){return false}for(var e in bk){b.event.add(this,e+".specialChange",bk[e])}return bb.test(this.nodeName)},teardown:function(e){b.event.remove(this,".specialChange");return bb.test(this.nodeName)}};bk=b.event.special.change.filters;bk.focus=bk.beforeactivate}function aT(bs,bu,e){var bt=b.extend({},e[0]);bt.type=bs;bt.originalEvent={};bt.liveFired=K;b.event.handle.call(bu,bt);if(bt.isDefaultPrevented()){e[0].preventDefault()}}if(!b.support.focusinBubbles){b.each({focus:"focusin",blur:"focusout"},function(bu,e){var bs=0;b.event.special[e]={setup:function(){if(bs++===0){aq.addEventListener(bu,bt,true)}},teardown:function(){if(--bs===0){aq.removeEventListener(bu,bt,true)}}};function bt(bv){var bw=b.event.fix(bv);bw.type=e;bw.originalEvent={};b.event.trigger(bw,null,bw.target);if(bw.isDefaultPrevented()){bv.preventDefault()}}})}b.each(["bind","one"],function(bs,e){b.fn[e]=function(by,bz,bx){var bw;if(typeof by==="object"){for(var bv in by){this[e](bv,bz,by[bv],bx)}return this}if(arguments.length===2||bz===false){bx=bz;bz=K}if(e==="one"){bw=function(bA){b(this).unbind(bA,bw);return bx.apply(this,arguments)};bw.guid=bx.guid||b.guid++}else{bw=bx}if(by==="unload"&&e!=="one"){this.one(by,bz,bx)}else{for(var bu=0,bt=this.length;bubu){break}bC.currentTarget=bw.elem;bC.data=bw.handleObj.data;bC.handleObj=bw.handleObj;bG=bw.handleObj.origHandler.apply(bw.elem,arguments);if(bG===false||bC.isPropagationStopped()){bu=bw.level;if(bG===false){bz=false}if(bC.isImmediatePropagationStopped()){break}}}return bz}function p(bs,e){return(bs&&bs!=="*"?bs+".":"")+e.replace(N,"`").replace(bf,"&")}b.each(("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error").split(" "),function(bs,e){b.fn[e]=function(bu,bt){if(bt==null){bt=bu;bu=null}return arguments.length>0?this.bind(e,bu,bt):this.trigger(e)};if(b.attrFn){b.attrFn[e]=true}}); /* * Sizzle CSS Selector Engine * Copyright 2011, The Dojo Foundation * Released under the MIT, BSD, and GPL Licenses. * More information: http://sizzlejs.com/ */ (function(){var bC=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,bD=0,bG=Object.prototype.toString,bx=false,bw=true,bE=/\\/g,bK=/\W/;[0,0].sort(function(){bw=false;return 0});var bu=function(bP,e,bS,bT){bS=bS||[];e=e||aq;var bV=e;if(e.nodeType!==1&&e.nodeType!==9){return[]}if(!bP||typeof bP!=="string"){return bS}var bM,bX,b0,bL,bW,bZ,bY,bR,bO=true,bN=bu.isXML(e),bQ=[],bU=bP;do{bC.exec("");bM=bC.exec(bU);if(bM){bU=bM[3];bQ.push(bM[1]);if(bM[2]){bL=bM[3];break}}}while(bM);if(bQ.length>1&&by.exec(bP)){if(bQ.length===2&&bz.relative[bQ[0]]){bX=bH(bQ[0]+bQ[1],e)}else{bX=bz.relative[bQ[0]]?[e]:bu(bQ.shift(),e);while(bQ.length){bP=bQ.shift();if(bz.relative[bP]){bP+=bQ.shift()}bX=bH(bP,bX)}}}else{if(!bT&&bQ.length>1&&e.nodeType===9&&!bN&&bz.match.ID.test(bQ[0])&&!bz.match.ID.test(bQ[bQ.length-1])){bW=bu.find(bQ.shift(),e,bN);e=bW.expr?bu.filter(bW.expr,bW.set)[0]:bW.set[0]}if(e){bW=bT?{expr:bQ.pop(),set:bA(bT)}:bu.find(bQ.pop(),bQ.length===1&&(bQ[0]==="~"||bQ[0]==="+")&&e.parentNode?e.parentNode:e,bN);bX=bW.expr?bu.filter(bW.expr,bW.set):bW.set;if(bQ.length>0){b0=bA(bX)}else{bO=false}while(bQ.length){bZ=bQ.pop();bY=bZ;if(!bz.relative[bZ]){bZ=""}else{bY=bQ.pop()}if(bY==null){bY=e}bz.relative[bZ](b0,bY,bN)}}else{b0=bQ=[]}}if(!b0){b0=bX}if(!b0){bu.error(bZ||bP)}if(bG.call(b0)==="[object Array]"){if(!bO){bS.push.apply(bS,b0)}else{if(e&&e.nodeType===1){for(bR=0;b0[bR]!=null;bR++){if(b0[bR]&&(b0[bR]===true||b0[bR].nodeType===1&&bu.contains(e,b0[bR]))){bS.push(bX[bR])}}}else{for(bR=0;b0[bR]!=null;bR++){if(b0[bR]&&b0[bR].nodeType===1){bS.push(bX[bR])}}}}}else{bA(b0,bS)}if(bL){bu(bL,bV,bS,bT);bu.uniqueSort(bS)}return bS};bu.uniqueSort=function(bL){if(bF){bx=bw;bL.sort(bF);if(bx){for(var e=1;e0};bu.find=function(bR,e,bS){var bQ;if(!bR){return[]}for(var bN=0,bM=bz.order.length;bN":function(bQ,bL){var bP,bO=typeof bL==="string",bM=0,e=bQ.length;if(bO&&!bK.test(bL)){bL=bL.toLowerCase();for(;bM=0)){if(!bM){e.push(bP)}}else{if(bM){bL[bO]=false}}}}return false},ID:function(e){return e[1].replace(bE,"")},TAG:function(bL,e){return bL[1].replace(bE,"").toLowerCase()},CHILD:function(e){if(e[1]==="nth"){if(!e[2]){bu.error(e[0])}e[2]=e[2].replace(/^\+|\s*/g,"");var bL=/(-?)(\d*)(?:n([+\-]?\d*))?/.exec(e[2]==="even"&&"2n"||e[2]==="odd"&&"2n+1"||!/\D/.test(e[2])&&"0n+"+e[2]||e[2]);e[2]=(bL[1]+(bL[2]||1))-0;e[3]=bL[3]-0}else{if(e[2]){bu.error(e[0])}}e[0]=bD++;return e},ATTR:function(bO,bL,bM,e,bP,bQ){var bN=bO[1]=bO[1].replace(bE,"");if(!bQ&&bz.attrMap[bN]){bO[1]=bz.attrMap[bN]}bO[4]=(bO[4]||bO[5]||"").replace(bE,"");if(bO[2]==="~="){bO[4]=" "+bO[4]+" "}return bO},PSEUDO:function(bO,bL,bM,e,bP){if(bO[1]==="not"){if((bC.exec(bO[3])||"").length>1||/^\w/.test(bO[3])){bO[3]=bu(bO[3],null,null,bL)}else{var bN=bu.filter(bO[3],bL,bM,true^bP);if(!bM){e.push.apply(e,bN)}return false}}else{if(bz.match.POS.test(bO[0])||bz.match.CHILD.test(bO[0])){return true}}return bO},POS:function(e){e.unshift(true);return e}},filters:{enabled:function(e){return e.disabled===false&&e.type!=="hidden"},disabled:function(e){return e.disabled===true},checked:function(e){return e.checked===true},selected:function(e){if(e.parentNode){e.parentNode.selectedIndex}return e.selected===true},parent:function(e){return !!e.firstChild},empty:function(e){return !e.firstChild},has:function(bM,bL,e){return !!bu(e[3],bM).length},header:function(e){return(/h\d/i).test(e.nodeName)},text:function(bM){var e=bM.getAttribute("type"),bL=bM.type;return bM.nodeName.toLowerCase()==="input"&&"text"===bL&&(e===bL||e===null)},radio:function(e){return e.nodeName.toLowerCase()==="input"&&"radio"===e.type},checkbox:function(e){return e.nodeName.toLowerCase()==="input"&&"checkbox"===e.type},file:function(e){return e.nodeName.toLowerCase()==="input"&&"file"===e.type},password:function(e){return e.nodeName.toLowerCase()==="input"&&"password"===e.type},submit:function(bL){var e=bL.nodeName.toLowerCase();return(e==="input"||e==="button")&&"submit"===bL.type},image:function(e){return e.nodeName.toLowerCase()==="input"&&"image"===e.type},reset:function(bL){var e=bL.nodeName.toLowerCase();return(e==="input"||e==="button")&&"reset"===bL.type},button:function(bL){var e=bL.nodeName.toLowerCase();return e==="input"&&"button"===bL.type||e==="button"},input:function(e){return(/input|select|textarea|button/i).test(e.nodeName)},focus:function(e){return e===e.ownerDocument.activeElement}},setFilters:{first:function(bL,e){return e===0},last:function(bM,bL,e,bN){return bL===bN.length-1},even:function(bL,e){return e%2===0},odd:function(bL,e){return e%2===1},lt:function(bM,bL,e){return bLe[3]-0},nth:function(bM,bL,e){return e[3]-0===bL},eq:function(bM,bL,e){return e[3]-0===bL}},filter:{PSEUDO:function(bM,bR,bQ,bS){var e=bR[1],bL=bz.filters[e];if(bL){return bL(bM,bQ,bR,bS)}else{if(e==="contains"){return(bM.textContent||bM.innerText||bu.getText([bM])||"").indexOf(bR[3])>=0}else{if(e==="not"){var bN=bR[3];for(var bP=0,bO=bN.length;bP=0)}}},ID:function(bL,e){return bL.nodeType===1&&bL.getAttribute("id")===e},TAG:function(bL,e){return(e==="*"&&bL.nodeType===1)||bL.nodeName.toLowerCase()===e},CLASS:function(bL,e){return(" "+(bL.className||bL.getAttribute("class"))+" ").indexOf(e)>-1},ATTR:function(bP,bN){var bM=bN[1],e=bz.attrHandle[bM]?bz.attrHandle[bM](bP):bP[bM]!=null?bP[bM]:bP.getAttribute(bM),bQ=e+"",bO=bN[2],bL=bN[4];return e==null?bO==="!=":bO==="="?bQ===bL:bO==="*="?bQ.indexOf(bL)>=0:bO==="~="?(" "+bQ+" ").indexOf(bL)>=0:!bL?bQ&&e!==false:bO==="!="?bQ!==bL:bO==="^="?bQ.indexOf(bL)===0:bO==="$="?bQ.substr(bQ.length-bL.length)===bL:bO==="|="?bQ===bL||bQ.substr(0,bL.length+1)===bL+"-":false},POS:function(bO,bL,bM,bP){var e=bL[2],bN=bz.setFilters[e];if(bN){return bN(bO,bM,bL,bP)}}}};var by=bz.match.POS,bt=function(bL,e){return"\\"+(e-0+1)};for(var bv in bz.match){bz.match[bv]=new RegExp(bz.match[bv].source+(/(?![^\[]*\])(?![^\(]*\))/.source));bz.leftMatch[bv]=new RegExp(/(^(?:.|\r|\n)*?)/.source+bz.match[bv].source.replace(/\\(\d+)/g,bt))}var bA=function(bL,e){bL=Array.prototype.slice.call(bL,0);if(e){e.push.apply(e,bL);return e}return bL};try{Array.prototype.slice.call(aq.documentElement.childNodes,0)[0].nodeType}catch(bJ){bA=function(bO,bN){var bM=0,bL=bN||[];if(bG.call(bO)==="[object Array]"){Array.prototype.push.apply(bL,bO)}else{if(typeof bO.length==="number"){for(var e=bO.length;bM";e.insertBefore(bL,e.firstChild);if(aq.getElementById(bM)){bz.find.ID=function(bO,bP,bQ){if(typeof bP.getElementById!=="undefined"&&!bQ){var bN=bP.getElementById(bO[1]);return bN?bN.id===bO[1]||typeof bN.getAttributeNode!=="undefined"&&bN.getAttributeNode("id").nodeValue===bO[1]?[bN]:K:[]}};bz.filter.ID=function(bP,bN){var bO=typeof bP.getAttributeNode!=="undefined"&&bP.getAttributeNode("id");return bP.nodeType===1&&bO&&bO.nodeValue===bN}}e.removeChild(bL);e=bL=null})();(function(){var e=aq.createElement("div");e.appendChild(aq.createComment(""));if(e.getElementsByTagName("*").length>0){bz.find.TAG=function(bL,bP){var bO=bP.getElementsByTagName(bL[1]);if(bL[1]==="*"){var bN=[];for(var bM=0;bO[bM];bM++){if(bO[bM].nodeType===1){bN.push(bO[bM])}}bO=bN}return bO}}e.innerHTML="";if(e.firstChild&&typeof e.firstChild.getAttribute!=="undefined"&&e.firstChild.getAttribute("href")!=="#"){bz.attrHandle.href=function(bL){return bL.getAttribute("href",2)}}e=null})();if(aq.querySelectorAll){(function(){var e=bu,bN=aq.createElement("div"),bM="__sizzle__";bN.innerHTML="

";if(bN.querySelectorAll&&bN.querySelectorAll(".TEST").length===0){return}bu=function(bY,bP,bT,bX){bP=bP||aq;if(!bX&&!bu.isXML(bP)){var bW=/^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(bY);if(bW&&(bP.nodeType===1||bP.nodeType===9)){if(bW[1]){return bA(bP.getElementsByTagName(bY),bT)}else{if(bW[2]&&bz.find.CLASS&&bP.getElementsByClassName){return bA(bP.getElementsByClassName(bW[2]),bT)}}}if(bP.nodeType===9){if(bY==="body"&&bP.body){return bA([bP.body],bT)}else{if(bW&&bW[3]){var bS=bP.getElementById(bW[3]);if(bS&&bS.parentNode){if(bS.id===bW[3]){return bA([bS],bT)}}else{return bA([],bT)}}}try{return bA(bP.querySelectorAll(bY),bT)}catch(bU){}}else{if(bP.nodeType===1&&bP.nodeName.toLowerCase()!=="object"){var bQ=bP,bR=bP.getAttribute("id"),bO=bR||bM,b0=bP.parentNode,bZ=/^\s*[+~]/.test(bY);if(!bR){bP.setAttribute("id",bO)}else{bO=bO.replace(/'/g,"\\$&")}if(bZ&&b0){bP=bP.parentNode}try{if(!bZ||b0){return bA(bP.querySelectorAll("[id='"+bO+"'] "+bY),bT)}}catch(bV){}finally{if(!bR){bQ.removeAttribute("id")}}}}}return e(bY,bP,bT,bX)};for(var bL in e){bu[bL]=e[bL]}bN=null})()}(function(){var e=aq.documentElement,bM=e.matchesSelector||e.mozMatchesSelector||e.webkitMatchesSelector||e.msMatchesSelector;if(bM){var bO=!bM.call(aq.createElement("div"),"div"),bL=false;try{bM.call(aq.documentElement,"[test!='']:sizzle")}catch(bN){bL=true}bu.matchesSelector=function(bQ,bS){bS=bS.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!bu.isXML(bQ)){try{if(bL||!bz.match.PSEUDO.test(bS)&&!/!=/.test(bS)){var bP=bM.call(bQ,bS);if(bP||!bO||bQ.document&&bQ.document.nodeType!==11){return bP}}}catch(bR){}}return bu(bS,null,null,[bQ]).length>0}}})();(function(){var e=aq.createElement("div");e.innerHTML="
";if(!e.getElementsByClassName||e.getElementsByClassName("e").length===0){return}e.lastChild.className="e";if(e.getElementsByClassName("e").length===1){return}bz.order.splice(1,0,"CLASS");bz.find.CLASS=function(bL,bM,bN){if(typeof bM.getElementsByClassName!=="undefined"&&!bN){return bM.getElementsByClassName(bL[1])}};e=null})();function bs(bL,bQ,bP,bT,bR,bS){for(var bN=0,bM=bT.length;bN0){bO=e;break}}}e=e[bL]}bT[bN]=bO}}}if(aq.documentElement.contains){bu.contains=function(bL,e){return bL!==e&&(bL.contains?bL.contains(e):true)}}else{if(aq.documentElement.compareDocumentPosition){bu.contains=function(bL,e){return !!(bL.compareDocumentPosition(e)&16)}}else{bu.contains=function(){return false}}}bu.isXML=function(e){var bL=(e?e.ownerDocument||e:0).documentElement;return bL?bL.nodeName!=="HTML":false};var bH=function(e,bR){var bP,bN=[],bO="",bM=bR.nodeType?[bR]:bR;while((bP=bz.match.PSEUDO.exec(e))){bO+=bP[0];e=e.replace(bz.match.PSEUDO,"")}e=bz.relative[e]?e+"*":e;for(var bQ=0,bL=bM.length;bQ0){for(by=bx;by0:this.filter(e).length>0)},closest:function(bB,bs){var by=[],bv,bt,bA=this[0];if(b.isArray(bB)){var bx,bu,bw={},e=1;if(bA&&bB.length){for(bv=0,bt=bB.length;bv-1:b(bA).is(bx)){by.push({selector:bu,elem:bA,level:e})}}bA=bA.parentNode;e++}}return by}var bz=H.test(bB)||typeof bB!=="string"?b(bB,bs||this.context):0;for(bv=0,bt=this.length;bv-1:b.find.matchesSelector(bA,bB)){by.push(bA);break}else{bA=bA.parentNode;if(!bA||!bA.ownerDocument||bA===bs||bA.nodeType===11){break}}}}by=by.length>1?b.unique(by):by;return this.pushStack(by,"closest",bB)},index:function(e){if(!e||typeof e==="string"){return b.inArray(this[0],e?b(e):this.parent().children())}return b.inArray(e.jquery?e[0]:e,this)},add:function(e,bs){var bu=typeof e==="string"?b(e,bs):b.makeArray(e&&e.nodeType?[e]:e),bt=b.merge(this.get(),bu);return this.pushStack(C(bu[0])||C(bt[0])?bt:b.unique(bt))},andSelf:function(){return this.add(this.prevObject)}});function C(e){return !e||!e.parentNode||e.parentNode.nodeType===11}b.each({parent:function(bs){var e=bs.parentNode;return e&&e.nodeType!==11?e:null},parents:function(e){return b.dir(e,"parentNode")},parentsUntil:function(bs,e,bt){return b.dir(bs,"parentNode",bt)},next:function(e){return b.nth(e,2,"nextSibling")},prev:function(e){return b.nth(e,2,"previousSibling")},nextAll:function(e){return b.dir(e,"nextSibling")},prevAll:function(e){return b.dir(e,"previousSibling")},nextUntil:function(bs,e,bt){return b.dir(bs,"nextSibling",bt)},prevUntil:function(bs,e,bt){return b.dir(bs,"previousSibling",bt)},siblings:function(e){return b.sibling(e.parentNode.firstChild,e)},children:function(e){return b.sibling(e.firstChild)},contents:function(e){return b.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:b.makeArray(e.childNodes)}},function(e,bs){b.fn[e]=function(bw,bt){var bv=b.map(this,bs,bw),bu=P.call(arguments);if(!Y.test(e)){bt=bw}if(bt&&typeof bt==="string"){bv=b.filter(bt,bv)}bv=this.length>1&&!au[e]?b.unique(bv):bv;if((this.length>1||a7.test(bt))&&am.test(e)){bv=bv.reverse()}return this.pushStack(bv,e,bu.join(","))}});b.extend({filter:function(bt,e,bs){if(bs){bt=":not("+bt+")"}return e.length===1?b.find.matchesSelector(e[0],bt)?[e[0]]:[]:b.find.matches(bt,e)},dir:function(bt,bs,bv){var e=[],bu=bt[bs];while(bu&&bu.nodeType!==9&&(bv===K||bu.nodeType!==1||!b(bu).is(bv))){if(bu.nodeType===1){e.push(bu)}bu=bu[bs]}return e},nth:function(bv,e,bt,bu){e=e||1;var bs=0;for(;bv;bv=bv[bt]){if(bv.nodeType===1&&++bs===e){break}}return bv},sibling:function(bt,bs){var e=[];for(;bt;bt=bt.nextSibling){if(bt.nodeType===1&&bt!==bs){e.push(bt)}}return e}});function aB(bu,bt,e){bt=bt||0;if(b.isFunction(bt)){return b.grep(bu,function(bw,bv){var bx=!!bt.call(bw,bv,bw);return bx===e})}else{if(bt.nodeType){return b.grep(bu,function(bw,bv){return(bw===bt)===e})}else{if(typeof bt==="string"){var bs=b.grep(bu,function(bv){return bv.nodeType===1});if(bn.test(bt)){return b.filter(bt,bs,!e)}else{bt=b.filter(bt,bs)}}}}return b.grep(bu,function(bw,bv){return(b.inArray(bw,bt)>=0)===e})}var ad=/ jQuery\d+="(?:\d+|null)"/g,an=/^\s+/,S=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,d=/<([\w:]+)/,w=/",""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]};at.optgroup=at.option;at.tbody=at.tfoot=at.colgroup=at.caption=at.thead;at.th=at.td;if(!b.support.htmlSerialize){at._default=[1,"div
","
"]}b.fn.extend({text:function(e){if(b.isFunction(e)){return this.each(function(bt){var bs=b(this);bs.text(e.call(this,bt,bs.text()))})}if(typeof e!=="object"&&e!==K){return this.empty().append((this[0]&&this[0].ownerDocument||aq).createTextNode(e))}return b.text(this)},wrapAll:function(e){if(b.isFunction(e)){return this.each(function(bt){b(this).wrapAll(e.call(this,bt))})}if(this[0]){var bs=b(e,this[0].ownerDocument).eq(0).clone(true);if(this[0].parentNode){bs.insertBefore(this[0])}bs.map(function(){var bt=this;while(bt.firstChild&&bt.firstChild.nodeType===1){bt=bt.firstChild}return bt}).append(this)}return this},wrapInner:function(e){if(b.isFunction(e)){return this.each(function(bs){b(this).wrapInner(e.call(this,bs))})}return this.each(function(){var bs=b(this),bt=bs.contents();if(bt.length){bt.wrapAll(e)}else{bs.append(e)}})},wrap:function(e){return this.each(function(){b(this).wrapAll(e)})},unwrap:function(){return this.parent().each(function(){if(!b.nodeName(this,"body")){b(this).replaceWith(this.childNodes)}}).end()},append:function(){return this.domManip(arguments,true,function(e){if(this.nodeType===1){this.appendChild(e)}})},prepend:function(){return this.domManip(arguments,true,function(e){if(this.nodeType===1){this.insertBefore(e,this.firstChild)}})},before:function(){if(this[0]&&this[0].parentNode){return this.domManip(arguments,false,function(bs){this.parentNode.insertBefore(bs,this)})}else{if(arguments.length){var e=b(arguments[0]);e.push.apply(e,this.toArray());return this.pushStack(e,"before",arguments)}}},after:function(){if(this[0]&&this[0].parentNode){return this.domManip(arguments,false,function(bs){this.parentNode.insertBefore(bs,this.nextSibling)})}else{if(arguments.length){var e=this.pushStack(this,"after",arguments);e.push.apply(e,b(arguments[0]).toArray());return e}}},remove:function(e,bu){for(var bs=0,bt;(bt=this[bs])!=null;bs++){if(!e||b.filter(e,[bt]).length){if(!bu&&bt.nodeType===1){b.cleanData(bt.getElementsByTagName("*"));b.cleanData([bt])}if(bt.parentNode){bt.parentNode.removeChild(bt)}}}return this},empty:function(){for(var e=0,bs;(bs=this[e])!=null;e++){if(bs.nodeType===1){b.cleanData(bs.getElementsByTagName("*"))}while(bs.firstChild){bs.removeChild(bs.firstChild)}}return this},clone:function(bs,e){bs=bs==null?false:bs;e=e==null?bs:e;return this.map(function(){return b.clone(this,bs,e)})},html:function(bu){if(bu===K){return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(ad,""):null}else{if(typeof bu==="string"&&!O.test(bu)&&(b.support.leadingWhitespace||!an.test(bu))&&!at[(d.exec(bu)||["",""])[1].toLowerCase()]){bu=bu.replace(S,"<$1>");try{for(var bt=0,bs=this.length;bt1&&bt0?this.clone(true):this).get();b(bz[bx])[bs](bv);bw=bw.concat(bv)}return this.pushStack(bw,e,bz.selector)}}});function bc(e){if("getElementsByTagName" in e){return e.getElementsByTagName("*")}else{if("querySelectorAll" in e){return e.querySelectorAll("*")}else{return[]}}}function av(e){if(e.type==="checkbox"||e.type==="radio"){e.defaultChecked=e.checked}}function E(e){if(b.nodeName(e,"input")){av(e)}else{if("getElementsByTagName" in e){b.grep(e.getElementsByTagName("input"),av)}}}b.extend({clone:function(bv,bx,bt){var bw=bv.cloneNode(true),e,bs,bu;if((!b.support.noCloneEvent||!b.support.noCloneChecked)&&(bv.nodeType===1||bv.nodeType===11)&&!b.isXMLDoc(bv)){ae(bv,bw);e=bc(bv);bs=bc(bw);for(bu=0;e[bu];++bu){ae(e[bu],bs[bu])}}if(bx){t(bv,bw);if(bt){e=bc(bv);bs=bc(bw);for(bu=0;e[bu];++bu){t(e[bu],bs[bu])}}}e=bs=null;return bw},clean:function(bt,bv,bE,bx){var bC;bv=bv||aq;if(typeof bv.createElement==="undefined"){bv=bv.ownerDocument||bv[0]&&bv[0].ownerDocument||aq}var bF=[],by;for(var bB=0,bw;(bw=bt[bB])!=null;bB++){if(typeof bw==="number"){bw+=""}if(!bw){continue}if(typeof bw==="string"){if(!V.test(bw)){bw=bv.createTextNode(bw)}else{bw=bw.replace(S,"<$1>");var bH=(d.exec(bw)||["",""])[1].toLowerCase(),bu=at[bH]||at._default,bA=bu[0],bs=bv.createElement("div");bs.innerHTML=bu[1]+bw+bu[2];while(bA--){bs=bs.lastChild}if(!b.support.tbody){var e=w.test(bw),bz=bH==="table"&&!e?bs.firstChild&&bs.firstChild.childNodes:bu[1]===""&&!e?bs.childNodes:[];for(by=bz.length-1;by>=0;--by){if(b.nodeName(bz[by],"tbody")&&!bz[by].childNodes.length){bz[by].parentNode.removeChild(bz[by])}}}if(!b.support.leadingWhitespace&&an.test(bw)){bs.insertBefore(bv.createTextNode(an.exec(bw)[0]),bs.firstChild)}bw=bs.childNodes}}var bD;if(!b.support.appendChecked){if(bw[0]&&typeof(bD=bw.length)==="number"){for(by=0;by=0){return bu+"px"}}else{return bu}}}});if(!b.support.opacity){b.cssHooks.opacity={get:function(bs,e){return ap.test((e&&bs.currentStyle?bs.currentStyle.filter:bs.style.filter)||"")?(parseFloat(RegExp.$1)/100)+"":e?"1":""},set:function(bv,bw){var bu=bv.style,bs=bv.currentStyle;bu.zoom=1;var e=b.isNaN(bw)?"":"alpha(opacity="+bw*100+")",bt=bs&&bs.filter||bu.filter||"";bu.filter=ah.test(bt)?bt.replace(ah,e):bt+" "+e}}}b(function(){if(!b.support.reliableMarginRight){b.cssHooks.marginRight={get:function(bt,bs){var e;b.swap(bt,{display:"inline-block"},function(){if(bs){e=W(bt,"margin-right","marginRight")}else{e=bt.style.marginRight}});return e}}}});if(aq.defaultView&&aq.defaultView.getComputedStyle){aE=function(bv,bt){var bs,bu,e;bt=bt.replace(z,"-$1").toLowerCase();if(!(bu=bv.ownerDocument.defaultView)){return K}if((e=bu.getComputedStyle(bv,null))){bs=e.getPropertyValue(bt);if(bs===""&&!b.contains(bv.ownerDocument.documentElement,bv)){bs=b.style(bv,bt)}}return bs}}if(aq.documentElement.currentStyle){aS=function(bv,bt){var bw,bs=bv.currentStyle&&bv.currentStyle[bt],e=bv.runtimeStyle&&bv.runtimeStyle[bt],bu=bv.style;if(!ba.test(bs)&&bl.test(bs)){bw=bu.left;if(e){bv.runtimeStyle.left=bv.currentStyle.left}bu.left=bt==="fontSize"?"1em":(bs||0);bs=bu.pixelLeft+"px";bu.left=bw;if(e){bv.runtimeStyle.left=e}}return bs===""?"auto":bs}}W=aE||aS;function o(bt,bs,e){var bv=bs==="width"?bt.offsetWidth:bt.offsetHeight,bu=bs==="width"?aj:aY;if(bv>0){if(e!=="border"){b.each(bu,function(){if(!e){bv-=parseFloat(b.css(bt,"padding"+this))||0}if(e==="margin"){bv+=parseFloat(b.css(bt,e+this))||0}else{bv-=parseFloat(b.css(bt,"border"+this+"Width"))||0}})}return bv+"px"}bv=W(bt,bs,bs);if(bv<0||bv==null){bv=bt.style[bs]||0}bv=parseFloat(bv)||0;if(e){b.each(bu,function(){bv+=parseFloat(b.css(bt,"padding"+this))||0;if(e!=="padding"){bv+=parseFloat(b.css(bt,"border"+this+"Width"))||0}if(e==="margin"){bv+=parseFloat(b.css(bt,e+this))||0}})}return bv+"px"}if(b.expr&&b.expr.filters){b.expr.filters.hidden=function(bt){var bs=bt.offsetWidth,e=bt.offsetHeight;return(bs===0&&e===0)||(!b.support.reliableHiddenOffsets&&(bt.style.display||b.css(bt,"display"))==="none")};b.expr.filters.visible=function(e){return !b.expr.filters.hidden(e)}}var j=/%20/g,al=/\[\]$/,bq=/\r?\n/g,bo=/#.*$/,az=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,aV=/^(?:color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,aI=/^(?:about|app|app\-storage|.+\-extension|file|widget):$/,aL=/^(?:GET|HEAD)$/,c=/^\/\//,L=/\?/,a3=/)<[^<]*)*<\/script>/gi,q=/^(?:select|textarea)/i,h=/\s+/,bp=/([?&])_=[^&]*/,J=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/,A=b.fn.load,X={},r={},aA,s;try{aA=bi.href}catch(ar){aA=aq.createElement("a");aA.href="";aA=aA.href}s=J.exec(aA.toLowerCase())||[];function f(e){return function(bv,bx){if(typeof bv!=="string"){bx=bv;bv="*"}if(b.isFunction(bx)){var bu=bv.toLowerCase().split(h),bt=0,bw=bu.length,bs,by,bz;for(;bt=0){var e=bt.slice(bv,bt.length);bt=bt.slice(0,bv)}var bu="GET";if(bw){if(b.isFunction(bw)){bx=bw;bw=K}else{if(typeof bw==="object"){bw=b.param(bw,b.ajaxSettings.traditional);bu="POST"}}}var bs=this;b.ajax({url:bt,type:bu,dataType:"html",data:bw,complete:function(bz,by,bA){bA=bz.responseText;if(bz.isResolved()){bz.done(function(bB){bA=bB});bs.html(e?b("
").append(bA.replace(a3,"")).find(e):bA)}if(bx){bs.each(bx,[bA,by,bz])}}});return this},serialize:function(){return b.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?b.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||q.test(this.nodeName)||aV.test(this.type))}).map(function(e,bs){var bt=b(this).val();return bt==null?null:b.isArray(bt)?b.map(bt,function(bv,bu){return{name:bs.name,value:bv.replace(bq,"\r\n")}}):{name:bs.name,value:bt.replace(bq,"\r\n")}}).get()}});b.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,bs){b.fn[bs]=function(bt){return this.bind(bs,bt)}});b.each(["get","post"],function(e,bs){b[bs]=function(bt,bv,bw,bu){if(b.isFunction(bv)){bu=bu||bw;bw=bv;bv=K}return b.ajax({type:bs,url:bt,data:bv,success:bw,dataType:bu})}});b.extend({getScript:function(e,bs){return b.get(e,K,bs,"script")},getJSON:function(e,bs,bt){return b.get(e,bs,bt,"json")},ajaxSetup:function(bt,e){if(!e){e=bt;bt=b.extend(true,b.ajaxSettings,e)}else{b.extend(true,bt,b.ajaxSettings,e)}for(var bs in {context:1,url:1}){if(bs in e){bt[bs]=e[bs]}else{if(bs in b.ajaxSettings){bt[bs]=b.ajaxSettings[bs]}}}return bt},ajaxSettings:{url:aA,isLocal:aI.test(s[1]),global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":"*/*"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a9.String,"text html":true,"text json":b.parseJSON,"text xml":b.parseXML}},ajaxPrefilter:f(X),ajaxTransport:f(r),ajax:function(bw,bu){if(typeof bw==="object"){bu=bw;bw=K}bu=bu||{};var bA=b.ajaxSetup({},bu),bP=bA.context||bA,bD=bP!==bA&&(bP.nodeType||bP instanceof b)?b(bP):b.event,bO=b.Deferred(),bK=b._Deferred(),by=bA.statusCode||{},bz,bE={},bL={},bN,bv,bI,bB,bF,bx=0,bt,bH,bG={readyState:0,setRequestHeader:function(bQ,bR){if(!bx){var e=bQ.toLowerCase();bQ=bL[e]=bL[e]||bQ;bE[bQ]=bR}return this},getAllResponseHeaders:function(){return bx===2?bN:null},getResponseHeader:function(bQ){var e;if(bx===2){if(!bv){bv={};while((e=az.exec(bN))){bv[e[1].toLowerCase()]=e[2]}}e=bv[bQ.toLowerCase()]}return e===K?null:e},overrideMimeType:function(e){if(!bx){bA.mimeType=e}return this},abort:function(e){e=e||"abort";if(bI){bI.abort(e)}bC(0,e);return this}};function bC(bV,bT,bW,bS){if(bx===2){return}bx=2;if(bB){clearTimeout(bB)}bI=K;bN=bS||"";bG.readyState=bV?4:0;var bQ,b0,bZ,bU=bW?bg(bA,bG,bW):K,bR,bY;if(bV>=200&&bV<300||bV===304){if(bA.ifModified){if((bR=bG.getResponseHeader("Last-Modified"))){b.lastModified[bz]=bR}if((bY=bG.getResponseHeader("Etag"))){b.etag[bz]=bY}}if(bV===304){bT="notmodified";bQ=true}else{try{b0=F(bA,bU);bT="success";bQ=true}catch(bX){bT="parsererror";bZ=bX}}}else{bZ=bT;if(!bT||bV){bT="error";if(bV<0){bV=0}}}bG.status=bV;bG.statusText=bT;if(bQ){bO.resolveWith(bP,[b0,bT,bG])}else{bO.rejectWith(bP,[bG,bT,bZ])}bG.statusCode(by);by=K;if(bt){bD.trigger("ajax"+(bQ?"Success":"Error"),[bG,bA,bQ?b0:bZ])}bK.resolveWith(bP,[bG,bT]);if(bt){bD.trigger("ajaxComplete",[bG,bA]);if(!(--b.active)){b.event.trigger("ajaxStop")}}}bO.promise(bG);bG.success=bG.done;bG.error=bG.fail;bG.complete=bK.done;bG.statusCode=function(bQ){if(bQ){var e;if(bx<2){for(e in bQ){by[e]=[by[e],bQ[e]]}}else{e=bQ[bG.status];bG.then(e,e)}}return this};bA.url=((bw||bA.url)+"").replace(bo,"").replace(c,s[1]+"//");bA.dataTypes=b.trim(bA.dataType||"*").toLowerCase().split(h);if(bA.crossDomain==null){bF=J.exec(bA.url.toLowerCase());bA.crossDomain=!!(bF&&(bF[1]!=s[1]||bF[2]!=s[2]||(bF[3]||(bF[1]==="http:"?80:443))!=(s[3]||(s[1]==="http:"?80:443))))}if(bA.data&&bA.processData&&typeof bA.data!=="string"){bA.data=b.param(bA.data,bA.traditional)}aQ(X,bA,bu,bG);if(bx===2){return false}bt=bA.global;bA.type=bA.type.toUpperCase();bA.hasContent=!aL.test(bA.type);if(bt&&b.active++===0){b.event.trigger("ajaxStart")}if(!bA.hasContent){if(bA.data){bA.url+=(L.test(bA.url)?"&":"?")+bA.data}bz=bA.url;if(bA.cache===false){var bs=b.now(),bM=bA.url.replace(bp,"$1_="+bs);bA.url=bM+((bM===bA.url)?(L.test(bA.url)?"&":"?")+"_="+bs:"")}}if(bA.data&&bA.hasContent&&bA.contentType!==false||bu.contentType){bG.setRequestHeader("Content-Type",bA.contentType)}if(bA.ifModified){bz=bz||bA.url;if(b.lastModified[bz]){bG.setRequestHeader("If-Modified-Since",b.lastModified[bz])}if(b.etag[bz]){bG.setRequestHeader("If-None-Match",b.etag[bz])}}bG.setRequestHeader("Accept",bA.dataTypes[0]&&bA.accepts[bA.dataTypes[0]]?bA.accepts[bA.dataTypes[0]]+(bA.dataTypes[0]!=="*"?", */*; q=0.01":""):bA.accepts["*"]);for(bH in bA.headers){bG.setRequestHeader(bH,bA.headers[bH])}if(bA.beforeSend&&(bA.beforeSend.call(bP,bG,bA)===false||bx===2)){bG.abort();return false}for(bH in {success:1,error:1,complete:1}){bG[bH](bA[bH])}bI=aQ(r,bA,bu,bG);if(!bI){bC(-1,"No Transport")}else{bG.readyState=1;if(bt){bD.trigger("ajaxSend",[bG,bA])}if(bA.async&&bA.timeout>0){bB=setTimeout(function(){bG.abort("timeout")},bA.timeout)}try{bx=1;bI.send(bE,bC)}catch(bJ){if(status<2){bC(-1,bJ)}else{b.error(bJ)}}}return bG},param:function(e,bt){var bs=[],bv=function(bw,bx){bx=b.isFunction(bx)?bx():bx;bs[bs.length]=encodeURIComponent(bw)+"="+encodeURIComponent(bx)};if(bt===K){bt=b.ajaxSettings.traditional}if(b.isArray(e)||(e.jquery&&!b.isPlainObject(e))){b.each(e,function(){bv(this.name,this.value)})}else{for(var bu in e){v(bu,e[bu],bt,bv)}}return bs.join("&").replace(j,"+")}});function v(bt,bv,bs,bu){if(b.isArray(bv)){b.each(bv,function(bx,bw){if(bs||al.test(bt)){bu(bt,bw)}else{v(bt+"["+(typeof bw==="object"||b.isArray(bw)?bx:"")+"]",bw,bs,bu)}})}else{if(!bs&&bv!=null&&typeof bv==="object"){for(var e in bv){v(bt+"["+e+"]",bv[e],bs,bu)}}else{bu(bt,bv)}}}b.extend({active:0,lastModified:{},etag:{}});function bg(bA,bz,bw){var bs=bA.contents,by=bA.dataTypes,bt=bA.responseFields,bv,bx,bu,e;for(bx in bt){if(bx in bw){bz[bt[bx]]=bw[bx]}}while(by[0]==="*"){by.shift();if(bv===K){bv=bA.mimeType||bz.getResponseHeader("content-type")}}if(bv){for(bx in bs){if(bs[bx]&&bs[bx].test(bv)){by.unshift(bx);break}}}if(by[0] in bw){bu=by[0]}else{for(bx in bw){if(!by[0]||bA.converters[bx+" "+by[0]]){bu=bx;break}if(!e){e=bx}}bu=bu||e}if(bu){if(bu!==by[0]){by.unshift(bu)}return bw[bu]}}function F(bE,bw){if(bE.dataFilter){bw=bE.dataFilter(bw,bE.dataType)}var bA=bE.dataTypes,bD={},bx,bB,bt=bA.length,by,bz=bA[0],bu,bv,bC,bs,e;for(bx=1;bx=bs.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();bs.animatedProperties[this.prop]=true;for(bt in bs.animatedProperties){if(bs.animatedProperties[bt]!==true){e=false}}if(e){if(bs.overflow!=null&&!b.support.shrinkWrapBlocks){b.each(["","X","Y"],function(bz,bA){bw.style["overflow"+bA]=bs.overflow[bz]})}if(bs.hide){b(bw).hide()}if(bs.hide||bs.show){for(var bx in bs.animatedProperties){b.style(bw,bx,bs.orig[bx])}}bs.complete.call(bw)}return false}else{if(bs.duration==Infinity){this.now=bu}else{by=bu-this.startTime;this.state=by/bs.duration;this.pos=b.easing[bs.animatedProperties[this.prop]](this.state,by,0,1,bs.duration);this.now=this.start+((this.end-this.start)*this.pos)}this.update()}return true}};b.extend(b.fx,{tick:function(){for(var bs=b.timers,e=0;e").appendTo(e),bt=bs.css("display");bs.remove();if(bt==="none"||bt===""){if(!a6){a6=aq.createElement("iframe");a6.frameBorder=a6.width=a6.height=0}e.appendChild(a6);if(!m||!a6.createElement){m=(a6.contentWindow||a6.contentDocument).document;m.write((aq.compatMode==="CSS1Compat"?"":"")+"");m.close()}bs=m.createElement(bu);m.body.appendChild(bs);bt=b.css(bs,"display");e.removeChild(a6)}Q[bu]=bt}return Q[bu]}var U=/^t(?:able|d|h)$/i,aa=/^(?:body|html)$/i;if("getBoundingClientRect" in aq.documentElement){b.fn.offset=function(bF){var bv=this[0],by;if(bF){return this.each(function(e){b.offset.setOffset(this,bF,e)})}if(!bv||!bv.ownerDocument){return null}if(bv===bv.ownerDocument.body){return b.offset.bodyOffset(bv)}try{by=bv.getBoundingClientRect()}catch(bC){}var bE=bv.ownerDocument,bt=bE.documentElement;if(!by||!b.contains(bt,bv)){return by?{top:by.top,left:by.left}:{top:0,left:0}}var bz=bE.body,bA=aG(bE),bx=bt.clientTop||bz.clientTop||0,bB=bt.clientLeft||bz.clientLeft||0,bs=bA.pageYOffset||b.support.boxModel&&bt.scrollTop||bz.scrollTop,bw=bA.pageXOffset||b.support.boxModel&&bt.scrollLeft||bz.scrollLeft,bD=by.top+bs-bx,bu=by.left+bw-bB;return{top:bD,left:bu}}}else{b.fn.offset=function(bC){var bw=this[0];if(bC){return this.each(function(bD){b.offset.setOffset(this,bC,bD)})}if(!bw||!bw.ownerDocument){return null}if(bw===bw.ownerDocument.body){return b.offset.bodyOffset(bw)}b.offset.initialize();var bz,bt=bw.offsetParent,bs=bw,bB=bw.ownerDocument,bu=bB.documentElement,bx=bB.body,by=bB.defaultView,e=by?by.getComputedStyle(bw,null):bw.currentStyle,bA=bw.offsetTop,bv=bw.offsetLeft;while((bw=bw.parentNode)&&bw!==bx&&bw!==bu){if(b.offset.supportsFixedPosition&&e.position==="fixed"){break}bz=by?by.getComputedStyle(bw,null):bw.currentStyle;bA-=bw.scrollTop;bv-=bw.scrollLeft;if(bw===bt){bA+=bw.offsetTop;bv+=bw.offsetLeft;if(b.offset.doesNotAddBorder&&!(b.offset.doesAddBorderForTableAndCells&&U.test(bw.nodeName))){bA+=parseFloat(bz.borderTopWidth)||0;bv+=parseFloat(bz.borderLeftWidth)||0}bs=bt;bt=bw.offsetParent}if(b.offset.subtractsBorderForOverflowNotVisible&&bz.overflow!=="visible"){bA+=parseFloat(bz.borderTopWidth)||0;bv+=parseFloat(bz.borderLeftWidth)||0}e=bz}if(e.position==="relative"||e.position==="static"){bA+=bx.offsetTop;bv+=bx.offsetLeft}if(b.offset.supportsFixedPosition&&e.position==="fixed"){bA+=Math.max(bu.scrollTop,bx.scrollTop);bv+=Math.max(bu.scrollLeft,bx.scrollLeft)}return{top:bA,left:bv}}}b.offset={initialize:function(){var e=aq.body,bs=aq.createElement("div"),bv,bx,bw,by,bt=parseFloat(b.css(e,"marginTop"))||0,bu="
";b.extend(bs.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});bs.innerHTML=bu;e.insertBefore(bs,e.firstChild);bv=bs.firstChild;bx=bv.firstChild;by=bv.nextSibling.firstChild.firstChild;this.doesNotAddBorder=(bx.offsetTop!==5);this.doesAddBorderForTableAndCells=(by.offsetTop===5);bx.style.position="fixed";bx.style.top="20px";this.supportsFixedPosition=(bx.offsetTop===20||bx.offsetTop===15);bx.style.position=bx.style.top="";bv.style.overflow="hidden";bv.style.position="relative";this.subtractsBorderForOverflowNotVisible=(bx.offsetTop===-5);this.doesNotIncludeMarginInBodyOffset=(e.offsetTop!==bt);e.removeChild(bs);b.offset.initialize=b.noop},bodyOffset:function(e){var bt=e.offsetTop,bs=e.offsetLeft;b.offset.initialize();if(b.offset.doesNotIncludeMarginInBodyOffset){bt+=parseFloat(b.css(e,"marginTop"))||0;bs+=parseFloat(b.css(e,"marginLeft"))||0}return{top:bt,left:bs}},setOffset:function(bu,bD,bx){var by=b.css(bu,"position");if(by==="static"){bu.style.position="relative"}var bw=b(bu),bs=bw.offset(),e=b.css(bu,"top"),bB=b.css(bu,"left"),bC=(by==="absolute"||by==="fixed")&&b.inArray("auto",[e,bB])>-1,bA={},bz={},bt,bv;if(bC){bz=bw.position();bt=bz.top;bv=bz.left}else{bt=parseFloat(e)||0;bv=parseFloat(bB)||0}if(b.isFunction(bD)){bD=bD.call(bu,bx,bs)}if(bD.top!=null){bA.top=(bD.top-bs.top)+bt}if(bD.left!=null){bA.left=(bD.left-bs.left)+bv}if("using" in bD){bD.using.call(bu,bA)}else{bw.css(bA)}}};b.fn.extend({position:function(){if(!this[0]){return null}var bt=this[0],bs=this.offsetParent(),bu=this.offset(),e=aa.test(bs[0].nodeName)?{top:0,left:0}:bs.offset();bu.top-=parseFloat(b.css(bt,"marginTop"))||0;bu.left-=parseFloat(b.css(bt,"marginLeft"))||0;e.top+=parseFloat(b.css(bs[0],"borderTopWidth"))||0;e.left+=parseFloat(b.css(bs[0],"borderLeftWidth"))||0;return{top:bu.top-e.top,left:bu.left-e.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||aq.body;while(e&&(!aa.test(e.nodeName)&&b.css(e,"position")==="static")){e=e.offsetParent}return e})}});b.each(["Left","Top"],function(bs,e){var bt="scroll"+e;b.fn[bt]=function(bw){var bu,bv;if(bw===K){bu=this[0];if(!bu){return null}bv=aG(bu);return bv?("pageXOffset" in bv)?bv[bs?"pageYOffset":"pageXOffset"]:b.support.boxModel&&bv.document.documentElement[bt]||bv.document.body[bt]:bu[bt]}return this.each(function(){bv=aG(this);if(bv){bv.scrollTo(!bs?bw:b(bv).scrollLeft(),bs?bw:b(bv).scrollTop())}else{this[bt]=bw}})}});function aG(e){return b.isWindow(e)?e:e.nodeType===9?e.defaultView||e.parentWindow:false}b.each(["Height","Width"],function(bs,e){var bt=e.toLowerCase();b.fn["inner"+e]=function(){var bu=this[0];return bu&&bu.style?parseFloat(b.css(bu,bt,"padding")):null};b.fn["outer"+e]=function(bv){var bu=this[0];return bu&&bu.style?parseFloat(b.css(bu,bt,bv?"margin":"border")):null};b.fn[bt]=function(bv){var bw=this[0];if(!bw){return bv==null?null:this}if(b.isFunction(bv)){return this.each(function(bA){var bz=b(this);bz[bt](bv.call(this,bA,bz[bt]()))})}if(b.isWindow(bw)){var bx=bw.document.documentElement["client"+e];return bw.document.compatMode==="CSS1Compat"&&bx||bw.document.body["client"+e]||bx}else{if(bw.nodeType===9){return Math.max(bw.documentElement["client"+e],bw.body["scroll"+e],bw.documentElement["scroll"+e],bw.body["offset"+e],bw.documentElement["offset"+e])}else{if(bv===K){var by=b.css(bw,bt),bu=parseFloat(by);return b.isNaN(bu)?by:bu}else{return this.css(bt,typeof bv==="string"?bv:bv+"px")}}}}});a9.jQuery=a9.$=b})(window);ikiwiki-3.20160121/underlays/jquery/ikiwiki/jquery.js0000644000000000000000000071525212650125230017311 0ustar /*! * jQuery JavaScript Library v1.6.2 * http://jquery.com/ * * Copyright 2011, John Resig * Dual licensed under the MIT or GPL Version 2 licenses. * http://jquery.org/license * * Includes Sizzle.js * http://sizzlejs.com/ * Copyright 2011, The Dojo Foundation * Released under the MIT, BSD, and GPL Licenses. * * Date: Thu Jun 30 14:16:56 2011 -0400 */ (function( window, undefined ) { // Use the correct document accordingly with window argument (sandbox) var document = window.document, navigator = window.navigator, location = window.location; var jQuery = (function() { // Define a local copy of jQuery var jQuery = function( selector, context ) { // The jQuery object is actually just the init constructor 'enhanced' return new jQuery.fn.init( selector, context, rootjQuery ); }, // Map over jQuery in case of overwrite _jQuery = window.jQuery, // Map over the $ in case of overwrite _$ = window.$, // A central reference to the root jQuery(document) rootjQuery, // A simple way to check for HTML strings or ID strings // (both of which we optimize for) quickExpr = /^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/, // Check if a string has a non-whitespace character in it rnotwhite = /\S/, // Used for trimming whitespace trimLeft = /^\s+/, trimRight = /\s+$/, // Check for digits rdigit = /\d/, // Match a standalone tag rsingleTag = /^<(\w+)\s*\/?>(?:<\/\1>)?$/, // JSON RegExp rvalidchars = /^[\],:{}\s]*$/, rvalidescape = /\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, rvalidtokens = /"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, rvalidbraces = /(?:^|:|,)(?:\s*\[)+/g, // Useragent RegExp rwebkit = /(webkit)[ \/]([\w.]+)/, ropera = /(opera)(?:.*version)?[ \/]([\w.]+)/, rmsie = /(msie) ([\w.]+)/, rmozilla = /(mozilla)(?:.*? rv:([\w.]+))?/, // Matches dashed string for camelizing rdashAlpha = /-([a-z])/ig, // Used by jQuery.camelCase as callback to replace() fcamelCase = function( all, letter ) { return letter.toUpperCase(); }, // Keep a UserAgent string for use with jQuery.browser userAgent = navigator.userAgent, // For matching the engine and version of the browser browserMatch, // The deferred used on DOM ready readyList, // The ready event handler DOMContentLoaded, // Save a reference to some core methods toString = Object.prototype.toString, hasOwn = Object.prototype.hasOwnProperty, push = Array.prototype.push, slice = Array.prototype.slice, trim = String.prototype.trim, indexOf = Array.prototype.indexOf, // [[Class]] -> type pairs class2type = {}; jQuery.fn = jQuery.prototype = { constructor: jQuery, init: function( selector, context, rootjQuery ) { var match, elem, ret, doc; // Handle $(""), $(null), or $(undefined) if ( !selector ) { return this; } // Handle $(DOMElement) if ( selector.nodeType ) { this.context = this[0] = selector; this.length = 1; return this; } // The body element only exists once, optimize finding it if ( selector === "body" && !context && document.body ) { this.context = document; this[0] = document.body; this.selector = selector; this.length = 1; return this; } // Handle HTML strings if ( typeof selector === "string" ) { // Are we dealing with HTML string or an ID? if ( selector.charAt(0) === "<" && selector.charAt( selector.length - 1 ) === ">" && selector.length >= 3 ) { // Assume that strings that start and end with <> are HTML and skip the regex check match = [ null, selector, null ]; } else { match = quickExpr.exec( selector ); } // Verify a match, and that no context was specified for #id if ( match && (match[1] || !context) ) { // HANDLE: $(html) -> $(array) if ( match[1] ) { context = context instanceof jQuery ? context[0] : context; doc = (context ? context.ownerDocument || context : document); // If a single string is passed in and it's a single tag // just do a createElement and skip the rest ret = rsingleTag.exec( selector ); if ( ret ) { if ( jQuery.isPlainObject( context ) ) { selector = [ document.createElement( ret[1] ) ]; jQuery.fn.attr.call( selector, context, true ); } else { selector = [ doc.createElement( ret[1] ) ]; } } else { ret = jQuery.buildFragment( [ match[1] ], [ doc ] ); selector = (ret.cacheable ? jQuery.clone(ret.fragment) : ret.fragment).childNodes; } return jQuery.merge( this, selector ); // HANDLE: $("#id") } else { elem = document.getElementById( match[2] ); // Check parentNode to catch when Blackberry 4.6 returns // nodes that are no longer in the document #6963 if ( elem && elem.parentNode ) { // Handle the case where IE and Opera return items // by name instead of ID if ( elem.id !== match[2] ) { return rootjQuery.find( selector ); } // Otherwise, we inject the element directly into the jQuery object this.length = 1; this[0] = elem; } this.context = document; this.selector = selector; return this; } // HANDLE: $(expr, $(...)) } else if ( !context || context.jquery ) { return (context || rootjQuery).find( selector ); // HANDLE: $(expr, context) // (which is just equivalent to: $(context).find(expr) } else { return this.constructor( context ).find( selector ); } // HANDLE: $(function) // Shortcut for document ready } else if ( jQuery.isFunction( selector ) ) { return rootjQuery.ready( selector ); } if (selector.selector !== undefined) { this.selector = selector.selector; this.context = selector.context; } return jQuery.makeArray( selector, this ); }, // Start with an empty selector selector: "", // The current version of jQuery being used jquery: "1.6.2", // The default length of a jQuery object is 0 length: 0, // The number of elements contained in the matched element set size: function() { return this.length; }, toArray: function() { return slice.call( this, 0 ); }, // Get the Nth element in the matched element set OR // Get the whole matched element set as a clean array get: function( num ) { return num == null ? // Return a 'clean' array this.toArray() : // Return just the object ( num < 0 ? this[ this.length + num ] : this[ num ] ); }, // Take an array of elements and push it onto the stack // (returning the new matched element set) pushStack: function( elems, name, selector ) { // Build a new jQuery matched element set var ret = this.constructor(); if ( jQuery.isArray( elems ) ) { push.apply( ret, elems ); } else { jQuery.merge( ret, elems ); } // Add the old object onto the stack (as a reference) ret.prevObject = this; ret.context = this.context; if ( name === "find" ) { ret.selector = this.selector + (this.selector ? " " : "") + selector; } else if ( name ) { ret.selector = this.selector + "." + name + "(" + selector + ")"; } // Return the newly-formed element set return ret; }, // Execute a callback for every element in the matched set. // (You can seed the arguments with an array of args, but this is // only used internally.) each: function( callback, args ) { return jQuery.each( this, callback, args ); }, ready: function( fn ) { // Attach the listeners jQuery.bindReady(); // Add the callback readyList.done( fn ); return this; }, eq: function( i ) { return i === -1 ? this.slice( i ) : this.slice( i, +i + 1 ); }, first: function() { return this.eq( 0 ); }, last: function() { return this.eq( -1 ); }, slice: function() { return this.pushStack( slice.apply( this, arguments ), "slice", slice.call(arguments).join(",") ); }, map: function( callback ) { return this.pushStack( jQuery.map(this, function( elem, i ) { return callback.call( elem, i, elem ); })); }, end: function() { return this.prevObject || this.constructor(null); }, // For internal use only. // Behaves like an Array's method, not like a jQuery method. push: push, sort: [].sort, splice: [].splice }; // Give the init function the jQuery prototype for later instantiation jQuery.fn.init.prototype = jQuery.fn; jQuery.extend = jQuery.fn.extend = function() { var options, name, src, copy, copyIsArray, clone, target = arguments[0] || {}, i = 1, length = arguments.length, deep = false; // Handle a deep copy situation if ( typeof target === "boolean" ) { deep = target; target = arguments[1] || {}; // skip the boolean and the target i = 2; } // Handle case when target is a string or something (possible in deep copy) if ( typeof target !== "object" && !jQuery.isFunction(target) ) { target = {}; } // extend jQuery itself if only one argument is passed if ( length === i ) { target = this; --i; } for ( ; i < length; i++ ) { // Only deal with non-null/undefined values if ( (options = arguments[ i ]) != null ) { // Extend the base object for ( name in options ) { src = target[ name ]; copy = options[ name ]; // Prevent never-ending loop if ( target === copy ) { continue; } // Recurse if we're merging plain objects or arrays if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) { if ( copyIsArray ) { copyIsArray = false; clone = src && jQuery.isArray(src) ? src : []; } else { clone = src && jQuery.isPlainObject(src) ? src : {}; } // Never move original objects, clone them target[ name ] = jQuery.extend( deep, clone, copy ); // Don't bring in undefined values } else if ( copy !== undefined ) { target[ name ] = copy; } } } } // Return the modified object return target; }; jQuery.extend({ noConflict: function( deep ) { if ( window.$ === jQuery ) { window.$ = _$; } if ( deep && window.jQuery === jQuery ) { window.jQuery = _jQuery; } return jQuery; }, // Is the DOM ready to be used? Set to true once it occurs. isReady: false, // A counter to track how many items to wait for before // the ready event fires. See #6781 readyWait: 1, // Hold (or release) the ready event holdReady: function( hold ) { if ( hold ) { jQuery.readyWait++; } else { jQuery.ready( true ); } }, // Handle when the DOM is ready ready: function( wait ) { // Either a released hold or an DOMready/load event and not yet ready if ( (wait === true && !--jQuery.readyWait) || (wait !== true && !jQuery.isReady) ) { // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). if ( !document.body ) { return setTimeout( jQuery.ready, 1 ); } // Remember that the DOM is ready jQuery.isReady = true; // If a normal DOM Ready event fired, decrement, and wait if need be if ( wait !== true && --jQuery.readyWait > 0 ) { return; } // If there are functions bound, to execute readyList.resolveWith( document, [ jQuery ] ); // Trigger any bound ready events if ( jQuery.fn.trigger ) { jQuery( document ).trigger( "ready" ).unbind( "ready" ); } } }, bindReady: function() { if ( readyList ) { return; } readyList = jQuery._Deferred(); // Catch cases where $(document).ready() is called after the // browser event has already occurred. if ( document.readyState === "complete" ) { // Handle it asynchronously to allow scripts the opportunity to delay ready return setTimeout( jQuery.ready, 1 ); } // Mozilla, Opera and webkit nightlies currently support this event if ( document.addEventListener ) { // Use the handy event callback document.addEventListener( "DOMContentLoaded", DOMContentLoaded, false ); // A fallback to window.onload, that will always work window.addEventListener( "load", jQuery.ready, false ); // If IE event model is used } else if ( document.attachEvent ) { // ensure firing before onload, // maybe late but safe also for iframes document.attachEvent( "onreadystatechange", DOMContentLoaded ); // A fallback to window.onload, that will always work window.attachEvent( "onload", jQuery.ready ); // If IE and not a frame // continually check to see if the document is ready var toplevel = false; try { toplevel = window.frameElement == null; } catch(e) {} if ( document.documentElement.doScroll && toplevel ) { doScrollCheck(); } } }, // See test/unit/core.js for details concerning isFunction. // Since version 1.3, DOM methods and functions like alert // aren't supported. They return false on IE (#2968). isFunction: function( obj ) { return jQuery.type(obj) === "function"; }, isArray: Array.isArray || function( obj ) { return jQuery.type(obj) === "array"; }, // A crude way of determining if an object is a window isWindow: function( obj ) { return obj && typeof obj === "object" && "setInterval" in obj; }, isNaN: function( obj ) { return obj == null || !rdigit.test( obj ) || isNaN( obj ); }, type: function( obj ) { return obj == null ? String( obj ) : class2type[ toString.call(obj) ] || "object"; }, isPlainObject: function( obj ) { // Must be an Object. // Because of IE, we also have to check the presence of the constructor property. // Make sure that DOM nodes and window objects don't pass through, as well if ( !obj || jQuery.type(obj) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) { return false; } // Not own constructor property must be Object if ( obj.constructor && !hasOwn.call(obj, "constructor") && !hasOwn.call(obj.constructor.prototype, "isPrototypeOf") ) { return false; } // Own properties are enumerated firstly, so to speed up, // if last one is own, then all properties are own. var key; for ( key in obj ) {} return key === undefined || hasOwn.call( obj, key ); }, isEmptyObject: function( obj ) { for ( var name in obj ) { return false; } return true; }, error: function( msg ) { throw msg; }, parseJSON: function( data ) { if ( typeof data !== "string" || !data ) { return null; } // Make sure leading/trailing whitespace is removed (IE can't handle it) data = jQuery.trim( data ); // Attempt to parse using the native JSON parser first if ( window.JSON && window.JSON.parse ) { return window.JSON.parse( data ); } // Make sure the incoming data is actual JSON // Logic borrowed from http://json.org/json2.js if ( rvalidchars.test( data.replace( rvalidescape, "@" ) .replace( rvalidtokens, "]" ) .replace( rvalidbraces, "")) ) { return (new Function( "return " + data ))(); } jQuery.error( "Invalid JSON: " + data ); }, // Cross-browser xml parsing // (xml & tmp used internally) parseXML: function( data , xml , tmp ) { if ( window.DOMParser ) { // Standard tmp = new DOMParser(); xml = tmp.parseFromString( data , "text/xml" ); } else { // IE xml = new ActiveXObject( "Microsoft.XMLDOM" ); xml.async = "false"; xml.loadXML( data ); } tmp = xml.documentElement; if ( ! tmp || ! tmp.nodeName || tmp.nodeName === "parsererror" ) { jQuery.error( "Invalid XML: " + data ); } return xml; }, noop: function() {}, // Evaluates a script in a global context // Workarounds based on findings by Jim Driscoll // http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context globalEval: function( data ) { if ( data && rnotwhite.test( data ) ) { // We use execScript on Internet Explorer // We use an anonymous function so that context is window // rather than jQuery in Firefox ( window.execScript || function( data ) { window[ "eval" ].call( window, data ); } )( data ); } }, // Converts a dashed string to camelCased string; // Used by both the css and data modules camelCase: function( string ) { return string.replace( rdashAlpha, fcamelCase ); }, nodeName: function( elem, name ) { return elem.nodeName && elem.nodeName.toUpperCase() === name.toUpperCase(); }, // args is for internal usage only each: function( object, callback, args ) { var name, i = 0, length = object.length, isObj = length === undefined || jQuery.isFunction( object ); if ( args ) { if ( isObj ) { for ( name in object ) { if ( callback.apply( object[ name ], args ) === false ) { break; } } } else { for ( ; i < length; ) { if ( callback.apply( object[ i++ ], args ) === false ) { break; } } } // A special, fast, case for the most common use of each } else { if ( isObj ) { for ( name in object ) { if ( callback.call( object[ name ], name, object[ name ] ) === false ) { break; } } } else { for ( ; i < length; ) { if ( callback.call( object[ i ], i, object[ i++ ] ) === false ) { break; } } } } return object; }, // Use native String.trim function wherever possible trim: trim ? function( text ) { return text == null ? "" : trim.call( text ); } : // Otherwise use our own trimming functionality function( text ) { return text == null ? "" : text.toString().replace( trimLeft, "" ).replace( trimRight, "" ); }, // results is for internal usage only makeArray: function( array, results ) { var ret = results || []; if ( array != null ) { // The window, strings (and functions) also have 'length' // The extra typeof function check is to prevent crashes // in Safari 2 (See: #3039) // Tweaked logic slightly to handle Blackberry 4.7 RegExp issues #6930 var type = jQuery.type( array ); if ( array.length == null || type === "string" || type === "function" || type === "regexp" || jQuery.isWindow( array ) ) { push.call( ret, array ); } else { jQuery.merge( ret, array ); } } return ret; }, inArray: function( elem, array ) { if ( indexOf ) { return indexOf.call( array, elem ); } for ( var i = 0, length = array.length; i < length; i++ ) { if ( array[ i ] === elem ) { return i; } } return -1; }, merge: function( first, second ) { var i = first.length, j = 0; if ( typeof second.length === "number" ) { for ( var l = second.length; j < l; j++ ) { first[ i++ ] = second[ j ]; } } else { while ( second[j] !== undefined ) { first[ i++ ] = second[ j++ ]; } } first.length = i; return first; }, grep: function( elems, callback, inv ) { var ret = [], retVal; inv = !!inv; // Go through the array, only saving the items // that pass the validator function for ( var i = 0, length = elems.length; i < length; i++ ) { retVal = !!callback( elems[ i ], i ); if ( inv !== retVal ) { ret.push( elems[ i ] ); } } return ret; }, // arg is for internal usage only map: function( elems, callback, arg ) { var value, key, ret = [], i = 0, length = elems.length, // jquery objects are treated as arrays isArray = elems instanceof jQuery || length !== undefined && typeof length === "number" && ( ( length > 0 && elems[ 0 ] && elems[ length -1 ] ) || length === 0 || jQuery.isArray( elems ) ) ; // Go through the array, translating each of the items to their if ( isArray ) { for ( ; i < length; i++ ) { value = callback( elems[ i ], i, arg ); if ( value != null ) { ret[ ret.length ] = value; } } // Go through every key on the object, } else { for ( key in elems ) { value = callback( elems[ key ], key, arg ); if ( value != null ) { ret[ ret.length ] = value; } } } // Flatten any nested arrays return ret.concat.apply( [], ret ); }, // A global GUID counter for objects guid: 1, // Bind a function to a context, optionally partially applying any // arguments. proxy: function( fn, context ) { if ( typeof context === "string" ) { var tmp = fn[ context ]; context = fn; fn = tmp; } // Quick check to determine if target is callable, in the spec // this throws a TypeError, but we will just return undefined. if ( !jQuery.isFunction( fn ) ) { return undefined; } // Simulated bind var args = slice.call( arguments, 2 ), proxy = function() { return fn.apply( context, args.concat( slice.call( arguments ) ) ); }; // Set the guid of unique handler to the same of original handler, so it can be removed proxy.guid = fn.guid = fn.guid || proxy.guid || jQuery.guid++; return proxy; }, // Mutifunctional method to get and set values to a collection // The value/s can optionally be executed if it's a function access: function( elems, key, value, exec, fn, pass ) { var length = elems.length; // Setting many attributes if ( typeof key === "object" ) { for ( var k in key ) { jQuery.access( elems, k, key[k], exec, fn, value ); } return elems; } // Setting one attribute if ( value !== undefined ) { // Optionally, function values get executed if exec is true exec = !pass && exec && jQuery.isFunction(value); for ( var i = 0; i < length; i++ ) { fn( elems[i], key, exec ? value.call( elems[i], i, fn( elems[i], key ) ) : value, pass ); } return elems; } // Getting an attribute return length ? fn( elems[0], key ) : undefined; }, now: function() { return (new Date()).getTime(); }, // Use of jQuery.browser is frowned upon. // More details: http://docs.jquery.com/Utilities/jQuery.browser uaMatch: function( ua ) { ua = ua.toLowerCase(); var match = rwebkit.exec( ua ) || ropera.exec( ua ) || rmsie.exec( ua ) || ua.indexOf("compatible") < 0 && rmozilla.exec( ua ) || []; return { browser: match[1] || "", version: match[2] || "0" }; }, sub: function() { function jQuerySub( selector, context ) { return new jQuerySub.fn.init( selector, context ); } jQuery.extend( true, jQuerySub, this ); jQuerySub.superclass = this; jQuerySub.fn = jQuerySub.prototype = this(); jQuerySub.fn.constructor = jQuerySub; jQuerySub.sub = this.sub; jQuerySub.fn.init = function init( selector, context ) { if ( context && context instanceof jQuery && !(context instanceof jQuerySub) ) { context = jQuerySub( context ); } return jQuery.fn.init.call( this, selector, context, rootjQuerySub ); }; jQuerySub.fn.init.prototype = jQuerySub.fn; var rootjQuerySub = jQuerySub(document); return jQuerySub; }, browser: {} }); // Populate the class2type map jQuery.each("Boolean Number String Function Array Date RegExp Object".split(" "), function(i, name) { class2type[ "[object " + name + "]" ] = name.toLowerCase(); }); browserMatch = jQuery.uaMatch( userAgent ); if ( browserMatch.browser ) { jQuery.browser[ browserMatch.browser ] = true; jQuery.browser.version = browserMatch.version; } // Deprecated, use jQuery.browser.webkit instead if ( jQuery.browser.webkit ) { jQuery.browser.safari = true; } // IE doesn't match non-breaking spaces with \s if ( rnotwhite.test( "\xA0" ) ) { trimLeft = /^[\s\xA0]+/; trimRight = /[\s\xA0]+$/; } // All jQuery objects should point back to these rootjQuery = jQuery(document); // Cleanup functions for the document ready method if ( document.addEventListener ) { DOMContentLoaded = function() { document.removeEventListener( "DOMContentLoaded", DOMContentLoaded, false ); jQuery.ready(); }; } else if ( document.attachEvent ) { DOMContentLoaded = function() { // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). if ( document.readyState === "complete" ) { document.detachEvent( "onreadystatechange", DOMContentLoaded ); jQuery.ready(); } }; } // The DOM ready check for Internet Explorer function doScrollCheck() { if ( jQuery.isReady ) { return; } try { // If IE is used, use the trick by Diego Perini // http://javascript.nwbox.com/IEContentLoaded/ document.documentElement.doScroll("left"); } catch(e) { setTimeout( doScrollCheck, 1 ); return; } // and execute any waiting functions jQuery.ready(); } return jQuery; })(); var // Promise methods promiseMethods = "done fail isResolved isRejected promise then always pipe".split( " " ), // Static reference to slice sliceDeferred = [].slice; jQuery.extend({ // Create a simple deferred (one callbacks list) _Deferred: function() { var // callbacks list callbacks = [], // stored [ context , args ] fired, // to avoid firing when already doing so firing, // flag to know if the deferred has been cancelled cancelled, // the deferred itself deferred = { // done( f1, f2, ...) done: function() { if ( !cancelled ) { var args = arguments, i, length, elem, type, _fired; if ( fired ) { _fired = fired; fired = 0; } for ( i = 0, length = args.length; i < length; i++ ) { elem = args[ i ]; type = jQuery.type( elem ); if ( type === "array" ) { deferred.done.apply( deferred, elem ); } else if ( type === "function" ) { callbacks.push( elem ); } } if ( _fired ) { deferred.resolveWith( _fired[ 0 ], _fired[ 1 ] ); } } return this; }, // resolve with given context and args resolveWith: function( context, args ) { if ( !cancelled && !fired && !firing ) { // make sure args are available (#8421) args = args || []; firing = 1; try { while( callbacks[ 0 ] ) { callbacks.shift().apply( context, args ); } } finally { fired = [ context, args ]; firing = 0; } } return this; }, // resolve with this as context and given arguments resolve: function() { deferred.resolveWith( this, arguments ); return this; }, // Has this deferred been resolved? isResolved: function() { return !!( firing || fired ); }, // Cancel cancel: function() { cancelled = 1; callbacks = []; return this; } }; return deferred; }, // Full fledged deferred (two callbacks list) Deferred: function( func ) { var deferred = jQuery._Deferred(), failDeferred = jQuery._Deferred(), promise; // Add errorDeferred methods, then and promise jQuery.extend( deferred, { then: function( doneCallbacks, failCallbacks ) { deferred.done( doneCallbacks ).fail( failCallbacks ); return this; }, always: function() { return deferred.done.apply( deferred, arguments ).fail.apply( this, arguments ); }, fail: failDeferred.done, rejectWith: failDeferred.resolveWith, reject: failDeferred.resolve, isRejected: failDeferred.isResolved, pipe: function( fnDone, fnFail ) { return jQuery.Deferred(function( newDefer ) { jQuery.each( { done: [ fnDone, "resolve" ], fail: [ fnFail, "reject" ] }, function( handler, data ) { var fn = data[ 0 ], action = data[ 1 ], returned; if ( jQuery.isFunction( fn ) ) { deferred[ handler ](function() { returned = fn.apply( this, arguments ); if ( returned && jQuery.isFunction( returned.promise ) ) { returned.promise().then( newDefer.resolve, newDefer.reject ); } else { newDefer[ action ]( returned ); } }); } else { deferred[ handler ]( newDefer[ action ] ); } }); }).promise(); }, // Get a promise for this deferred // If obj is provided, the promise aspect is added to the object promise: function( obj ) { if ( obj == null ) { if ( promise ) { return promise; } promise = obj = {}; } var i = promiseMethods.length; while( i-- ) { obj[ promiseMethods[i] ] = deferred[ promiseMethods[i] ]; } return obj; } }); // Make sure only one callback list will be used deferred.done( failDeferred.cancel ).fail( deferred.cancel ); // Unexpose cancel delete deferred.cancel; // Call given func if any if ( func ) { func.call( deferred, deferred ); } return deferred; }, // Deferred helper when: function( firstParam ) { var args = arguments, i = 0, length = args.length, count = length, deferred = length <= 1 && firstParam && jQuery.isFunction( firstParam.promise ) ? firstParam : jQuery.Deferred(); function resolveFunc( i ) { return function( value ) { args[ i ] = arguments.length > 1 ? sliceDeferred.call( arguments, 0 ) : value; if ( !( --count ) ) { // Strange bug in FF4: // Values changed onto the arguments object sometimes end up as undefined values // outside the $.when method. Cloning the object into a fresh array solves the issue deferred.resolveWith( deferred, sliceDeferred.call( args, 0 ) ); } }; } if ( length > 1 ) { for( ; i < length; i++ ) { if ( args[ i ] && jQuery.isFunction( args[ i ].promise ) ) { args[ i ].promise().then( resolveFunc(i), deferred.reject ); } else { --count; } } if ( !count ) { deferred.resolveWith( deferred, args ); } } else if ( deferred !== firstParam ) { deferred.resolveWith( deferred, length ? [ firstParam ] : [] ); } return deferred.promise(); } }); jQuery.support = (function() { var div = document.createElement( "div" ), documentElement = document.documentElement, all, a, select, opt, input, marginDiv, support, fragment, body, testElementParent, testElement, testElementStyle, tds, events, eventName, i, isSupported; // Preliminary tests div.setAttribute("className", "t"); div.innerHTML = "
a"; all = div.getElementsByTagName( "*" ); a = div.getElementsByTagName( "a" )[ 0 ]; // Can't get basic test support if ( !all || !all.length || !a ) { return {}; } // First batch of supports tests select = document.createElement( "select" ); opt = select.appendChild( document.createElement("option") ); input = div.getElementsByTagName( "input" )[ 0 ]; support = { // IE strips leading whitespace when .innerHTML is used leadingWhitespace: ( div.firstChild.nodeType === 3 ), // Make sure that tbody elements aren't automatically inserted // IE will insert them into empty tables tbody: !div.getElementsByTagName( "tbody" ).length, // Make sure that link elements get serialized correctly by innerHTML // This requires a wrapper element in IE htmlSerialize: !!div.getElementsByTagName( "link" ).length, // Get the style information from getAttribute // (IE uses .cssText instead) style: /top/.test( a.getAttribute("style") ), // Make sure that URLs aren't manipulated // (IE normalizes it by default) hrefNormalized: ( a.getAttribute( "href" ) === "/a" ), // Make sure that element opacity exists // (IE uses filter instead) // Use a regex to work around a WebKit issue. See #5145 opacity: /^0.55$/.test( a.style.opacity ), // Verify style float existence // (IE uses styleFloat instead of cssFloat) cssFloat: !!a.style.cssFloat, // Make sure that if no value is specified for a checkbox // that it defaults to "on". // (WebKit defaults to "" instead) checkOn: ( input.value === "on" ), // Make sure that a selected-by-default option has a working selected property. // (WebKit defaults to false instead of true, IE too, if it's in an optgroup) optSelected: opt.selected, // Test setAttribute on camelCase class. If it works, we need attrFixes when doing get/setAttribute (ie6/7) getSetAttribute: div.className !== "t", // Will be defined later submitBubbles: true, changeBubbles: true, focusinBubbles: false, deleteExpando: true, noCloneEvent: true, inlineBlockNeedsLayout: false, shrinkWrapBlocks: false, reliableMarginRight: true }; // Make sure checked status is properly cloned input.checked = true; support.noCloneChecked = input.cloneNode( true ).checked; // Make sure that the options inside disabled selects aren't marked as disabled // (WebKit marks them as disabled) select.disabled = true; support.optDisabled = !opt.disabled; // Test to see if it's possible to delete an expando from an element // Fails in Internet Explorer try { delete div.test; } catch( e ) { support.deleteExpando = false; } if ( !div.addEventListener && div.attachEvent && div.fireEvent ) { div.attachEvent( "onclick", function() { // Cloning a node shouldn't copy over any // bound event handlers (IE does this) support.noCloneEvent = false; }); div.cloneNode( true ).fireEvent( "onclick" ); } // Check if a radio maintains it's value // after being appended to the DOM input = document.createElement("input"); input.value = "t"; input.setAttribute("type", "radio"); support.radioValue = input.value === "t"; input.setAttribute("checked", "checked"); div.appendChild( input ); fragment = document.createDocumentFragment(); fragment.appendChild( div.firstChild ); // WebKit doesn't clone checked state correctly in fragments support.checkClone = fragment.cloneNode( true ).cloneNode( true ).lastChild.checked; div.innerHTML = ""; // Figure out if the W3C box model works as expected div.style.width = div.style.paddingLeft = "1px"; body = document.getElementsByTagName( "body" )[ 0 ]; // We use our own, invisible, body unless the body is already present // in which case we use a div (#9239) testElement = document.createElement( body ? "div" : "body" ); testElementStyle = { visibility: "hidden", width: 0, height: 0, border: 0, margin: 0 }; if ( body ) { jQuery.extend( testElementStyle, { position: "absolute", left: -1000, top: -1000 }); } for ( i in testElementStyle ) { testElement.style[ i ] = testElementStyle[ i ]; } testElement.appendChild( div ); testElementParent = body || documentElement; testElementParent.insertBefore( testElement, testElementParent.firstChild ); // Check if a disconnected checkbox will retain its checked // value of true after appended to the DOM (IE6/7) support.appendChecked = input.checked; support.boxModel = div.offsetWidth === 2; if ( "zoom" in div.style ) { // Check if natively block-level elements act like inline-block // elements when setting their display to 'inline' and giving // them layout // (IE < 8 does this) div.style.display = "inline"; div.style.zoom = 1; support.inlineBlockNeedsLayout = ( div.offsetWidth === 2 ); // Check if elements with layout shrink-wrap their children // (IE 6 does this) div.style.display = ""; div.innerHTML = "
"; support.shrinkWrapBlocks = ( div.offsetWidth !== 2 ); } div.innerHTML = "
t
"; tds = div.getElementsByTagName( "td" ); // Check if table cells still have offsetWidth/Height when they are set // to display:none and there are still other visible table cells in a // table row; if so, offsetWidth/Height are not reliable for use when // determining if an element has been hidden directly using // display:none (it is still safe to use offsets if a parent element is // hidden; don safety goggles and see bug #4512 for more information). // (only IE 8 fails this test) isSupported = ( tds[ 0 ].offsetHeight === 0 ); tds[ 0 ].style.display = ""; tds[ 1 ].style.display = "none"; // Check if empty table cells still have offsetWidth/Height // (IE < 8 fail this test) support.reliableHiddenOffsets = isSupported && ( tds[ 0 ].offsetHeight === 0 ); div.innerHTML = ""; // Check if div with explicit width and no margin-right incorrectly // gets computed margin-right based on width of container. For more // info see bug #3333 // Fails in WebKit before Feb 2011 nightlies // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right if ( document.defaultView && document.defaultView.getComputedStyle ) { marginDiv = document.createElement( "div" ); marginDiv.style.width = "0"; marginDiv.style.marginRight = "0"; div.appendChild( marginDiv ); support.reliableMarginRight = ( parseInt( ( document.defaultView.getComputedStyle( marginDiv, null ) || { marginRight: 0 } ).marginRight, 10 ) || 0 ) === 0; } // Remove the body element we added testElement.innerHTML = ""; testElementParent.removeChild( testElement ); // Technique from Juriy Zaytsev // http://thinkweb2.com/projects/prototype/detecting-event-support-without-browser-sniffing/ // We only care about the case where non-standard event systems // are used, namely in IE. Short-circuiting here helps us to // avoid an eval call (in setAttribute) which can cause CSP // to go haywire. See: https://developer.mozilla.org/en/Security/CSP if ( div.attachEvent ) { for( i in { submit: 1, change: 1, focusin: 1 } ) { eventName = "on" + i; isSupported = ( eventName in div ); if ( !isSupported ) { div.setAttribute( eventName, "return;" ); isSupported = ( typeof div[ eventName ] === "function" ); } support[ i + "Bubbles" ] = isSupported; } } // Null connected elements to avoid leaks in IE testElement = fragment = select = opt = body = marginDiv = div = input = null; return support; })(); // Keep track of boxModel jQuery.boxModel = jQuery.support.boxModel; var rbrace = /^(?:\{.*\}|\[.*\])$/, rmultiDash = /([a-z])([A-Z])/g; jQuery.extend({ cache: {}, // Please use with caution uuid: 0, // Unique for each copy of jQuery on the page // Non-digits removed to match rinlinejQuery expando: "jQuery" + ( jQuery.fn.jquery + Math.random() ).replace( /\D/g, "" ), // The following elements throw uncatchable exceptions if you // attempt to add expando properties to them. noData: { "embed": true, // Ban all objects except for Flash (which handle expandos) "object": "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000", "applet": true }, hasData: function( elem ) { elem = elem.nodeType ? jQuery.cache[ elem[jQuery.expando] ] : elem[ jQuery.expando ]; return !!elem && !isEmptyDataObject( elem ); }, data: function( elem, name, data, pvt /* Internal Use Only */ ) { if ( !jQuery.acceptData( elem ) ) { return; } var internalKey = jQuery.expando, getByName = typeof name === "string", thisCache, // We have to handle DOM nodes and JS objects differently because IE6-7 // can't GC object references properly across the DOM-JS boundary isNode = elem.nodeType, // Only DOM nodes need the global jQuery cache; JS object data is // attached directly to the object so GC can occur automatically cache = isNode ? jQuery.cache : elem, // Only defining an ID for JS objects if its cache already exists allows // the code to shortcut on the same path as a DOM node with no cache id = isNode ? elem[ jQuery.expando ] : elem[ jQuery.expando ] && jQuery.expando; // Avoid doing any more work than we need to when trying to get data on an // object that has no data at all if ( (!id || (pvt && id && !cache[ id ][ internalKey ])) && getByName && data === undefined ) { return; } if ( !id ) { // Only DOM nodes need a new unique ID for each element since their data // ends up in the global cache if ( isNode ) { elem[ jQuery.expando ] = id = ++jQuery.uuid; } else { id = jQuery.expando; } } if ( !cache[ id ] ) { cache[ id ] = {}; // TODO: This is a hack for 1.5 ONLY. Avoids exposing jQuery // metadata on plain JS objects when the object is serialized using // JSON.stringify if ( !isNode ) { cache[ id ].toJSON = jQuery.noop; } } // An object can be passed to jQuery.data instead of a key/value pair; this gets // shallow copied over onto the existing cache if ( typeof name === "object" || typeof name === "function" ) { if ( pvt ) { cache[ id ][ internalKey ] = jQuery.extend(cache[ id ][ internalKey ], name); } else { cache[ id ] = jQuery.extend(cache[ id ], name); } } thisCache = cache[ id ]; // Internal jQuery data is stored in a separate object inside the object's data // cache in order to avoid key collisions between internal data and user-defined // data if ( pvt ) { if ( !thisCache[ internalKey ] ) { thisCache[ internalKey ] = {}; } thisCache = thisCache[ internalKey ]; } if ( data !== undefined ) { thisCache[ jQuery.camelCase( name ) ] = data; } // TODO: This is a hack for 1.5 ONLY. It will be removed in 1.6. Users should // not attempt to inspect the internal events object using jQuery.data, as this // internal data object is undocumented and subject to change. if ( name === "events" && !thisCache[name] ) { return thisCache[ internalKey ] && thisCache[ internalKey ].events; } return getByName ? // Check for both converted-to-camel and non-converted data property names thisCache[ jQuery.camelCase( name ) ] || thisCache[ name ] : thisCache; }, removeData: function( elem, name, pvt /* Internal Use Only */ ) { if ( !jQuery.acceptData( elem ) ) { return; } var internalKey = jQuery.expando, isNode = elem.nodeType, // See jQuery.data for more information cache = isNode ? jQuery.cache : elem, // See jQuery.data for more information id = isNode ? elem[ jQuery.expando ] : jQuery.expando; // If there is already no cache entry for this object, there is no // purpose in continuing if ( !cache[ id ] ) { return; } if ( name ) { var thisCache = pvt ? cache[ id ][ internalKey ] : cache[ id ]; if ( thisCache ) { delete thisCache[ name ]; // If there is no data left in the cache, we want to continue // and let the cache object itself get destroyed if ( !isEmptyDataObject(thisCache) ) { return; } } } // See jQuery.data for more information if ( pvt ) { delete cache[ id ][ internalKey ]; // Don't destroy the parent cache unless the internal data object // had been the only thing left in it if ( !isEmptyDataObject(cache[ id ]) ) { return; } } var internalCache = cache[ id ][ internalKey ]; // Browsers that fail expando deletion also refuse to delete expandos on // the window, but it will allow it on all other JS objects; other browsers // don't care if ( jQuery.support.deleteExpando || cache != window ) { delete cache[ id ]; } else { cache[ id ] = null; } // We destroyed the entire user cache at once because it's faster than // iterating through each key, but we need to continue to persist internal // data if it existed if ( internalCache ) { cache[ id ] = {}; // TODO: This is a hack for 1.5 ONLY. Avoids exposing jQuery // metadata on plain JS objects when the object is serialized using // JSON.stringify if ( !isNode ) { cache[ id ].toJSON = jQuery.noop; } cache[ id ][ internalKey ] = internalCache; // Otherwise, we need to eliminate the expando on the node to avoid // false lookups in the cache for entries that no longer exist } else if ( isNode ) { // IE does not allow us to delete expando properties from nodes, // nor does it have a removeAttribute function on Document nodes; // we must handle all of these cases if ( jQuery.support.deleteExpando ) { delete elem[ jQuery.expando ]; } else if ( elem.removeAttribute ) { elem.removeAttribute( jQuery.expando ); } else { elem[ jQuery.expando ] = null; } } }, // For internal use only. _data: function( elem, name, data ) { return jQuery.data( elem, name, data, true ); }, // A method for determining if a DOM node can handle the data expando acceptData: function( elem ) { if ( elem.nodeName ) { var match = jQuery.noData[ elem.nodeName.toLowerCase() ]; if ( match ) { return !(match === true || elem.getAttribute("classid") !== match); } } return true; } }); jQuery.fn.extend({ data: function( key, value ) { var data = null; if ( typeof key === "undefined" ) { if ( this.length ) { data = jQuery.data( this[0] ); if ( this[0].nodeType === 1 ) { var attr = this[0].attributes, name; for ( var i = 0, l = attr.length; i < l; i++ ) { name = attr[i].name; if ( name.indexOf( "data-" ) === 0 ) { name = jQuery.camelCase( name.substring(5) ); dataAttr( this[0], name, data[ name ] ); } } } } return data; } else if ( typeof key === "object" ) { return this.each(function() { jQuery.data( this, key ); }); } var parts = key.split("."); parts[1] = parts[1] ? "." + parts[1] : ""; if ( value === undefined ) { data = this.triggerHandler("getData" + parts[1] + "!", [parts[0]]); // Try to fetch any internally stored data first if ( data === undefined && this.length ) { data = jQuery.data( this[0], key ); data = dataAttr( this[0], key, data ); } return data === undefined && parts[1] ? this.data( parts[0] ) : data; } else { return this.each(function() { var $this = jQuery( this ), args = [ parts[0], value ]; $this.triggerHandler( "setData" + parts[1] + "!", args ); jQuery.data( this, key, value ); $this.triggerHandler( "changeData" + parts[1] + "!", args ); }); } }, removeData: function( key ) { return this.each(function() { jQuery.removeData( this, key ); }); } }); function dataAttr( elem, key, data ) { // If nothing was found internally, try to fetch any // data from the HTML5 data-* attribute if ( data === undefined && elem.nodeType === 1 ) { var name = "data-" + key.replace( rmultiDash, "$1-$2" ).toLowerCase(); data = elem.getAttribute( name ); if ( typeof data === "string" ) { try { data = data === "true" ? true : data === "false" ? false : data === "null" ? null : !jQuery.isNaN( data ) ? parseFloat( data ) : rbrace.test( data ) ? jQuery.parseJSON( data ) : data; } catch( e ) {} // Make sure we set the data so it isn't changed later jQuery.data( elem, key, data ); } else { data = undefined; } } return data; } // TODO: This is a hack for 1.5 ONLY to allow objects with a single toJSON // property to be considered empty objects; this property always exists in // order to make sure JSON.stringify does not expose internal metadata function isEmptyDataObject( obj ) { for ( var name in obj ) { if ( name !== "toJSON" ) { return false; } } return true; } function handleQueueMarkDefer( elem, type, src ) { var deferDataKey = type + "defer", queueDataKey = type + "queue", markDataKey = type + "mark", defer = jQuery.data( elem, deferDataKey, undefined, true ); if ( defer && ( src === "queue" || !jQuery.data( elem, queueDataKey, undefined, true ) ) && ( src === "mark" || !jQuery.data( elem, markDataKey, undefined, true ) ) ) { // Give room for hard-coded callbacks to fire first // and eventually mark/queue something else on the element setTimeout( function() { if ( !jQuery.data( elem, queueDataKey, undefined, true ) && !jQuery.data( elem, markDataKey, undefined, true ) ) { jQuery.removeData( elem, deferDataKey, true ); defer.resolve(); } }, 0 ); } } jQuery.extend({ _mark: function( elem, type ) { if ( elem ) { type = (type || "fx") + "mark"; jQuery.data( elem, type, (jQuery.data(elem,type,undefined,true) || 0) + 1, true ); } }, _unmark: function( force, elem, type ) { if ( force !== true ) { type = elem; elem = force; force = false; } if ( elem ) { type = type || "fx"; var key = type + "mark", count = force ? 0 : ( (jQuery.data( elem, key, undefined, true) || 1 ) - 1 ); if ( count ) { jQuery.data( elem, key, count, true ); } else { jQuery.removeData( elem, key, true ); handleQueueMarkDefer( elem, type, "mark" ); } } }, queue: function( elem, type, data ) { if ( elem ) { type = (type || "fx") + "queue"; var q = jQuery.data( elem, type, undefined, true ); // Speed up dequeue by getting out quickly if this is just a lookup if ( data ) { if ( !q || jQuery.isArray(data) ) { q = jQuery.data( elem, type, jQuery.makeArray(data), true ); } else { q.push( data ); } } return q || []; } }, dequeue: function( elem, type ) { type = type || "fx"; var queue = jQuery.queue( elem, type ), fn = queue.shift(), defer; // If the fx queue is dequeued, always remove the progress sentinel if ( fn === "inprogress" ) { fn = queue.shift(); } if ( fn ) { // Add a progress sentinel to prevent the fx queue from being // automatically dequeued if ( type === "fx" ) { queue.unshift("inprogress"); } fn.call(elem, function() { jQuery.dequeue(elem, type); }); } if ( !queue.length ) { jQuery.removeData( elem, type + "queue", true ); handleQueueMarkDefer( elem, type, "queue" ); } } }); jQuery.fn.extend({ queue: function( type, data ) { if ( typeof type !== "string" ) { data = type; type = "fx"; } if ( data === undefined ) { return jQuery.queue( this[0], type ); } return this.each(function() { var queue = jQuery.queue( this, type, data ); if ( type === "fx" && queue[0] !== "inprogress" ) { jQuery.dequeue( this, type ); } }); }, dequeue: function( type ) { return this.each(function() { jQuery.dequeue( this, type ); }); }, // Based off of the plugin by Clint Helfers, with permission. // http://blindsignals.com/index.php/2009/07/jquery-delay/ delay: function( time, type ) { time = jQuery.fx ? jQuery.fx.speeds[time] || time : time; type = type || "fx"; return this.queue( type, function() { var elem = this; setTimeout(function() { jQuery.dequeue( elem, type ); }, time ); }); }, clearQueue: function( type ) { return this.queue( type || "fx", [] ); }, // Get a promise resolved when queues of a certain type // are emptied (fx is the type by default) promise: function( type, object ) { if ( typeof type !== "string" ) { object = type; type = undefined; } type = type || "fx"; var defer = jQuery.Deferred(), elements = this, i = elements.length, count = 1, deferDataKey = type + "defer", queueDataKey = type + "queue", markDataKey = type + "mark", tmp; function resolve() { if ( !( --count ) ) { defer.resolveWith( elements, [ elements ] ); } } while( i-- ) { if (( tmp = jQuery.data( elements[ i ], deferDataKey, undefined, true ) || ( jQuery.data( elements[ i ], queueDataKey, undefined, true ) || jQuery.data( elements[ i ], markDataKey, undefined, true ) ) && jQuery.data( elements[ i ], deferDataKey, jQuery._Deferred(), true ) )) { count++; tmp.done( resolve ); } } resolve(); return defer.promise(); } }); var rclass = /[\n\t\r]/g, rspace = /\s+/, rreturn = /\r/g, rtype = /^(?:button|input)$/i, rfocusable = /^(?:button|input|object|select|textarea)$/i, rclickable = /^a(?:rea)?$/i, rboolean = /^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i, rinvalidChar = /\:|^on/, formHook, boolHook; jQuery.fn.extend({ attr: function( name, value ) { return jQuery.access( this, name, value, true, jQuery.attr ); }, removeAttr: function( name ) { return this.each(function() { jQuery.removeAttr( this, name ); }); }, prop: function( name, value ) { return jQuery.access( this, name, value, true, jQuery.prop ); }, removeProp: function( name ) { name = jQuery.propFix[ name ] || name; return this.each(function() { // try/catch handles cases where IE balks (such as removing a property on window) try { this[ name ] = undefined; delete this[ name ]; } catch( e ) {} }); }, addClass: function( value ) { var classNames, i, l, elem, setClass, c, cl; if ( jQuery.isFunction( value ) ) { return this.each(function( j ) { jQuery( this ).addClass( value.call(this, j, this.className) ); }); } if ( value && typeof value === "string" ) { classNames = value.split( rspace ); for ( i = 0, l = this.length; i < l; i++ ) { elem = this[ i ]; if ( elem.nodeType === 1 ) { if ( !elem.className && classNames.length === 1 ) { elem.className = value; } else { setClass = " " + elem.className + " "; for ( c = 0, cl = classNames.length; c < cl; c++ ) { if ( !~setClass.indexOf( " " + classNames[ c ] + " " ) ) { setClass += classNames[ c ] + " "; } } elem.className = jQuery.trim( setClass ); } } } } return this; }, removeClass: function( value ) { var classNames, i, l, elem, className, c, cl; if ( jQuery.isFunction( value ) ) { return this.each(function( j ) { jQuery( this ).removeClass( value.call(this, j, this.className) ); }); } if ( (value && typeof value === "string") || value === undefined ) { classNames = (value || "").split( rspace ); for ( i = 0, l = this.length; i < l; i++ ) { elem = this[ i ]; if ( elem.nodeType === 1 && elem.className ) { if ( value ) { className = (" " + elem.className + " ").replace( rclass, " " ); for ( c = 0, cl = classNames.length; c < cl; c++ ) { className = className.replace(" " + classNames[ c ] + " ", " "); } elem.className = jQuery.trim( className ); } else { elem.className = ""; } } } } return this; }, toggleClass: function( value, stateVal ) { var type = typeof value, isBool = typeof stateVal === "boolean"; if ( jQuery.isFunction( value ) ) { return this.each(function( i ) { jQuery( this ).toggleClass( value.call(this, i, this.className, stateVal), stateVal ); }); } return this.each(function() { if ( type === "string" ) { // toggle individual class names var className, i = 0, self = jQuery( this ), state = stateVal, classNames = value.split( rspace ); while ( (className = classNames[ i++ ]) ) { // check each className given, space seperated list state = isBool ? state : !self.hasClass( className ); self[ state ? "addClass" : "removeClass" ]( className ); } } else if ( type === "undefined" || type === "boolean" ) { if ( this.className ) { // store className if set jQuery._data( this, "__className__", this.className ); } // toggle whole className this.className = this.className || value === false ? "" : jQuery._data( this, "__className__" ) || ""; } }); }, hasClass: function( selector ) { var className = " " + selector + " "; for ( var i = 0, l = this.length; i < l; i++ ) { if ( (" " + this[i].className + " ").replace(rclass, " ").indexOf( className ) > -1 ) { return true; } } return false; }, val: function( value ) { var hooks, ret, elem = this[0]; if ( !arguments.length ) { if ( elem ) { hooks = jQuery.valHooks[ elem.nodeName.toLowerCase() ] || jQuery.valHooks[ elem.type ]; if ( hooks && "get" in hooks && (ret = hooks.get( elem, "value" )) !== undefined ) { return ret; } ret = elem.value; return typeof ret === "string" ? // handle most common string cases ret.replace(rreturn, "") : // handle cases where value is null/undef or number ret == null ? "" : ret; } return undefined; } var isFunction = jQuery.isFunction( value ); return this.each(function( i ) { var self = jQuery(this), val; if ( this.nodeType !== 1 ) { return; } if ( isFunction ) { val = value.call( this, i, self.val() ); } else { val = value; } // Treat null/undefined as ""; convert numbers to string if ( val == null ) { val = ""; } else if ( typeof val === "number" ) { val += ""; } else if ( jQuery.isArray( val ) ) { val = jQuery.map(val, function ( value ) { return value == null ? "" : value + ""; }); } hooks = jQuery.valHooks[ this.nodeName.toLowerCase() ] || jQuery.valHooks[ this.type ]; // If set returns undefined, fall back to normal setting if ( !hooks || !("set" in hooks) || hooks.set( this, val, "value" ) === undefined ) { this.value = val; } }); } }); jQuery.extend({ valHooks: { option: { get: function( elem ) { // attributes.value is undefined in Blackberry 4.7 but // uses .value. See #6932 var val = elem.attributes.value; return !val || val.specified ? elem.value : elem.text; } }, select: { get: function( elem ) { var value, index = elem.selectedIndex, values = [], options = elem.options, one = elem.type === "select-one"; // Nothing was selected if ( index < 0 ) { return null; } // Loop through all the selected options for ( var i = one ? index : 0, max = one ? index + 1 : options.length; i < max; i++ ) { var option = options[ i ]; // Don't return options that are disabled or in a disabled optgroup if ( option.selected && (jQuery.support.optDisabled ? !option.disabled : option.getAttribute("disabled") === null) && (!option.parentNode.disabled || !jQuery.nodeName( option.parentNode, "optgroup" )) ) { // Get the specific value for the option value = jQuery( option ).val(); // We don't need an array for one selects if ( one ) { return value; } // Multi-Selects return an array values.push( value ); } } // Fixes Bug #2551 -- select.val() broken in IE after form.reset() if ( one && !values.length && options.length ) { return jQuery( options[ index ] ).val(); } return values; }, set: function( elem, value ) { var values = jQuery.makeArray( value ); jQuery(elem).find("option").each(function() { this.selected = jQuery.inArray( jQuery(this).val(), values ) >= 0; }); if ( !values.length ) { elem.selectedIndex = -1; } return values; } } }, attrFn: { val: true, css: true, html: true, text: true, data: true, width: true, height: true, offset: true }, attrFix: { // Always normalize to ensure hook usage tabindex: "tabIndex" }, attr: function( elem, name, value, pass ) { var nType = elem.nodeType; // don't get/set attributes on text, comment and attribute nodes if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { return undefined; } if ( pass && name in jQuery.attrFn ) { return jQuery( elem )[ name ]( value ); } // Fallback to prop when attributes are not supported if ( !("getAttribute" in elem) ) { return jQuery.prop( elem, name, value ); } var ret, hooks, notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); // Normalize the name if needed if ( notxml ) { name = jQuery.attrFix[ name ] || name; hooks = jQuery.attrHooks[ name ]; if ( !hooks ) { // Use boolHook for boolean attributes if ( rboolean.test( name ) ) { hooks = boolHook; // Use formHook for forms and if the name contains certain characters } else if ( formHook && name !== "className" && (jQuery.nodeName( elem, "form" ) || rinvalidChar.test( name )) ) { hooks = formHook; } } } if ( value !== undefined ) { if ( value === null ) { jQuery.removeAttr( elem, name ); return undefined; } else if ( hooks && "set" in hooks && notxml && (ret = hooks.set( elem, value, name )) !== undefined ) { return ret; } else { elem.setAttribute( name, "" + value ); return value; } } else if ( hooks && "get" in hooks && notxml && (ret = hooks.get( elem, name )) !== null ) { return ret; } else { ret = elem.getAttribute( name ); // Non-existent attributes return null, we normalize to undefined return ret === null ? undefined : ret; } }, removeAttr: function( elem, name ) { var propName; if ( elem.nodeType === 1 ) { name = jQuery.attrFix[ name ] || name; if ( jQuery.support.getSetAttribute ) { // Use removeAttribute in browsers that support it elem.removeAttribute( name ); } else { jQuery.attr( elem, name, "" ); elem.removeAttributeNode( elem.getAttributeNode( name ) ); } // Set corresponding property to false for boolean attributes if ( rboolean.test( name ) && (propName = jQuery.propFix[ name ] || name) in elem ) { elem[ propName ] = false; } } }, attrHooks: { type: { set: function( elem, value ) { // We can't allow the type property to be changed (since it causes problems in IE) if ( rtype.test( elem.nodeName ) && elem.parentNode ) { jQuery.error( "type property can't be changed" ); } else if ( !jQuery.support.radioValue && value === "radio" && jQuery.nodeName(elem, "input") ) { // Setting the type on a radio button after the value resets the value in IE6-9 // Reset value to it's default in case type is set after value // This is for element creation var val = elem.value; elem.setAttribute( "type", value ); if ( val ) { elem.value = val; } return value; } } }, tabIndex: { get: function( elem ) { // elem.tabIndex doesn't always return the correct value when it hasn't been explicitly set // http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ var attributeNode = elem.getAttributeNode("tabIndex"); return attributeNode && attributeNode.specified ? parseInt( attributeNode.value, 10 ) : rfocusable.test( elem.nodeName ) || rclickable.test( elem.nodeName ) && elem.href ? 0 : undefined; } }, // Use the value property for back compat // Use the formHook for button elements in IE6/7 (#1954) value: { get: function( elem, name ) { if ( formHook && jQuery.nodeName( elem, "button" ) ) { return formHook.get( elem, name ); } return name in elem ? elem.value : null; }, set: function( elem, value, name ) { if ( formHook && jQuery.nodeName( elem, "button" ) ) { return formHook.set( elem, value, name ); } // Does not return so that setAttribute is also used elem.value = value; } } }, propFix: { tabindex: "tabIndex", readonly: "readOnly", "for": "htmlFor", "class": "className", maxlength: "maxLength", cellspacing: "cellSpacing", cellpadding: "cellPadding", rowspan: "rowSpan", colspan: "colSpan", usemap: "useMap", frameborder: "frameBorder", contenteditable: "contentEditable" }, prop: function( elem, name, value ) { var nType = elem.nodeType; // don't get/set properties on text, comment and attribute nodes if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { return undefined; } var ret, hooks, notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); if ( notxml ) { // Fix name and attach hooks name = jQuery.propFix[ name ] || name; hooks = jQuery.propHooks[ name ]; } if ( value !== undefined ) { if ( hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ) { return ret; } else { return (elem[ name ] = value); } } else { if ( hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== undefined ) { return ret; } else { return elem[ name ]; } } }, propHooks: {} }); // Hook for boolean attributes boolHook = { get: function( elem, name ) { // Align boolean attributes with corresponding properties return jQuery.prop( elem, name ) ? name.toLowerCase() : undefined; }, set: function( elem, value, name ) { var propName; if ( value === false ) { // Remove boolean attributes when set to false jQuery.removeAttr( elem, name ); } else { // value is true since we know at this point it's type boolean and not false // Set boolean attributes to the same name and set the DOM property propName = jQuery.propFix[ name ] || name; if ( propName in elem ) { // Only set the IDL specifically if it already exists on the element elem[ propName ] = true; } elem.setAttribute( name, name.toLowerCase() ); } return name; } }; // IE6/7 do not support getting/setting some attributes with get/setAttribute if ( !jQuery.support.getSetAttribute ) { // propFix is more comprehensive and contains all fixes jQuery.attrFix = jQuery.propFix; // Use this for any attribute on a form in IE6/7 formHook = jQuery.attrHooks.name = jQuery.attrHooks.title = jQuery.valHooks.button = { get: function( elem, name ) { var ret; ret = elem.getAttributeNode( name ); // Return undefined if nodeValue is empty string return ret && ret.nodeValue !== "" ? ret.nodeValue : undefined; }, set: function( elem, value, name ) { // Check form objects in IE (multiple bugs related) // Only use nodeValue if the attribute node exists on the form var ret = elem.getAttributeNode( name ); if ( ret ) { ret.nodeValue = value; return value; } } }; // Set width and height to auto instead of 0 on empty string( Bug #8150 ) // This is for removals jQuery.each([ "width", "height" ], function( i, name ) { jQuery.attrHooks[ name ] = jQuery.extend( jQuery.attrHooks[ name ], { set: function( elem, value ) { if ( value === "" ) { elem.setAttribute( name, "auto" ); return value; } } }); }); } // Some attributes require a special call on IE if ( !jQuery.support.hrefNormalized ) { jQuery.each([ "href", "src", "width", "height" ], function( i, name ) { jQuery.attrHooks[ name ] = jQuery.extend( jQuery.attrHooks[ name ], { get: function( elem ) { var ret = elem.getAttribute( name, 2 ); return ret === null ? undefined : ret; } }); }); } if ( !jQuery.support.style ) { jQuery.attrHooks.style = { get: function( elem ) { // Return undefined in the case of empty string // Normalize to lowercase since IE uppercases css property names return elem.style.cssText.toLowerCase() || undefined; }, set: function( elem, value ) { return (elem.style.cssText = "" + value); } }; } // Safari mis-reports the default selected property of an option // Accessing the parent's selectedIndex property fixes it if ( !jQuery.support.optSelected ) { jQuery.propHooks.selected = jQuery.extend( jQuery.propHooks.selected, { get: function( elem ) { var parent = elem.parentNode; if ( parent ) { parent.selectedIndex; // Make sure that it also works with optgroups, see #5701 if ( parent.parentNode ) { parent.parentNode.selectedIndex; } } } }); } // Radios and checkboxes getter/setter if ( !jQuery.support.checkOn ) { jQuery.each([ "radio", "checkbox" ], function() { jQuery.valHooks[ this ] = { get: function( elem ) { // Handle the case where in Webkit "" is returned instead of "on" if a value isn't specified return elem.getAttribute("value") === null ? "on" : elem.value; } }; }); } jQuery.each([ "radio", "checkbox" ], function() { jQuery.valHooks[ this ] = jQuery.extend( jQuery.valHooks[ this ], { set: function( elem, value ) { if ( jQuery.isArray( value ) ) { return (elem.checked = jQuery.inArray( jQuery(elem).val(), value ) >= 0); } } }); }); var rnamespaces = /\.(.*)$/, rformElems = /^(?:textarea|input|select)$/i, rperiod = /\./g, rspaces = / /g, rescape = /[^\w\s.|`]/g, fcleanup = function( nm ) { return nm.replace(rescape, "\\$&"); }; /* * A number of helper functions used for managing events. * Many of the ideas behind this code originated from * Dean Edwards' addEvent library. */ jQuery.event = { // Bind an event to an element // Original by Dean Edwards add: function( elem, types, handler, data ) { if ( elem.nodeType === 3 || elem.nodeType === 8 ) { return; } if ( handler === false ) { handler = returnFalse; } else if ( !handler ) { // Fixes bug #7229. Fix recommended by jdalton return; } var handleObjIn, handleObj; if ( handler.handler ) { handleObjIn = handler; handler = handleObjIn.handler; } // Make sure that the function being executed has a unique ID if ( !handler.guid ) { handler.guid = jQuery.guid++; } // Init the element's event structure var elemData = jQuery._data( elem ); // If no elemData is found then we must be trying to bind to one of the // banned noData elements if ( !elemData ) { return; } var events = elemData.events, eventHandle = elemData.handle; if ( !events ) { elemData.events = events = {}; } if ( !eventHandle ) { elemData.handle = eventHandle = function( e ) { // Discard the second event of a jQuery.event.trigger() and // when an event is called after a page has unloaded return typeof jQuery !== "undefined" && (!e || jQuery.event.triggered !== e.type) ? jQuery.event.handle.apply( eventHandle.elem, arguments ) : undefined; }; } // Add elem as a property of the handle function // This is to prevent a memory leak with non-native events in IE. eventHandle.elem = elem; // Handle multiple events separated by a space // jQuery(...).bind("mouseover mouseout", fn); types = types.split(" "); var type, i = 0, namespaces; while ( (type = types[ i++ ]) ) { handleObj = handleObjIn ? jQuery.extend({}, handleObjIn) : { handler: handler, data: data }; // Namespaced event handlers if ( type.indexOf(".") > -1 ) { namespaces = type.split("."); type = namespaces.shift(); handleObj.namespace = namespaces.slice(0).sort().join("."); } else { namespaces = []; handleObj.namespace = ""; } handleObj.type = type; if ( !handleObj.guid ) { handleObj.guid = handler.guid; } // Get the current list of functions bound to this event var handlers = events[ type ], special = jQuery.event.special[ type ] || {}; // Init the event handler queue if ( !handlers ) { handlers = events[ type ] = []; // Check for a special event handler // Only use addEventListener/attachEvent if the special // events handler returns false if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) { // Bind the global event handler to the element if ( elem.addEventListener ) { elem.addEventListener( type, eventHandle, false ); } else if ( elem.attachEvent ) { elem.attachEvent( "on" + type, eventHandle ); } } } if ( special.add ) { special.add.call( elem, handleObj ); if ( !handleObj.handler.guid ) { handleObj.handler.guid = handler.guid; } } // Add the function to the element's handler list handlers.push( handleObj ); // Keep track of which events have been used, for event optimization jQuery.event.global[ type ] = true; } // Nullify elem to prevent memory leaks in IE elem = null; }, global: {}, // Detach an event or set of events from an element remove: function( elem, types, handler, pos ) { // don't do events on text and comment nodes if ( elem.nodeType === 3 || elem.nodeType === 8 ) { return; } if ( handler === false ) { handler = returnFalse; } var ret, type, fn, j, i = 0, all, namespaces, namespace, special, eventType, handleObj, origType, elemData = jQuery.hasData( elem ) && jQuery._data( elem ), events = elemData && elemData.events; if ( !elemData || !events ) { return; } // types is actually an event object here if ( types && types.type ) { handler = types.handler; types = types.type; } // Unbind all events for the element if ( !types || typeof types === "string" && types.charAt(0) === "." ) { types = types || ""; for ( type in events ) { jQuery.event.remove( elem, type + types ); } return; } // Handle multiple events separated by a space // jQuery(...).unbind("mouseover mouseout", fn); types = types.split(" "); while ( (type = types[ i++ ]) ) { origType = type; handleObj = null; all = type.indexOf(".") < 0; namespaces = []; if ( !all ) { // Namespaced event handlers namespaces = type.split("."); type = namespaces.shift(); namespace = new RegExp("(^|\\.)" + jQuery.map( namespaces.slice(0).sort(), fcleanup ).join("\\.(?:.*\\.)?") + "(\\.|$)"); } eventType = events[ type ]; if ( !eventType ) { continue; } if ( !handler ) { for ( j = 0; j < eventType.length; j++ ) { handleObj = eventType[ j ]; if ( all || namespace.test( handleObj.namespace ) ) { jQuery.event.remove( elem, origType, handleObj.handler, j ); eventType.splice( j--, 1 ); } } continue; } special = jQuery.event.special[ type ] || {}; for ( j = pos || 0; j < eventType.length; j++ ) { handleObj = eventType[ j ]; if ( handler.guid === handleObj.guid ) { // remove the given handler for the given type if ( all || namespace.test( handleObj.namespace ) ) { if ( pos == null ) { eventType.splice( j--, 1 ); } if ( special.remove ) { special.remove.call( elem, handleObj ); } } if ( pos != null ) { break; } } } // remove generic event handler if no more handlers exist if ( eventType.length === 0 || pos != null && eventType.length === 1 ) { if ( !special.teardown || special.teardown.call( elem, namespaces ) === false ) { jQuery.removeEvent( elem, type, elemData.handle ); } ret = null; delete events[ type ]; } } // Remove the expando if it's no longer used if ( jQuery.isEmptyObject( events ) ) { var handle = elemData.handle; if ( handle ) { handle.elem = null; } delete elemData.events; delete elemData.handle; if ( jQuery.isEmptyObject( elemData ) ) { jQuery.removeData( elem, undefined, true ); } } }, // Events that are safe to short-circuit if no handlers are attached. // Native DOM events should not be added, they may have inline handlers. customEvent: { "getData": true, "setData": true, "changeData": true }, trigger: function( event, data, elem, onlyHandlers ) { // Event object or event type var type = event.type || event, namespaces = [], exclusive; if ( type.indexOf("!") >= 0 ) { // Exclusive events trigger only for the exact event (no namespaces) type = type.slice(0, -1); exclusive = true; } if ( type.indexOf(".") >= 0 ) { // Namespaced trigger; create a regexp to match event type in handle() namespaces = type.split("."); type = namespaces.shift(); namespaces.sort(); } if ( (!elem || jQuery.event.customEvent[ type ]) && !jQuery.event.global[ type ] ) { // No jQuery handlers for this event type, and it can't have inline handlers return; } // Caller can pass in an Event, Object, or just an event type string event = typeof event === "object" ? // jQuery.Event object event[ jQuery.expando ] ? event : // Object literal new jQuery.Event( type, event ) : // Just the event type (string) new jQuery.Event( type ); event.type = type; event.exclusive = exclusive; event.namespace = namespaces.join("."); event.namespace_re = new RegExp("(^|\\.)" + namespaces.join("\\.(?:.*\\.)?") + "(\\.|$)"); // triggerHandler() and global events don't bubble or run the default action if ( onlyHandlers || !elem ) { event.preventDefault(); event.stopPropagation(); } // Handle a global trigger if ( !elem ) { // TODO: Stop taunting the data cache; remove global events and always attach to document jQuery.each( jQuery.cache, function() { // internalKey variable is just used to make it easier to find // and potentially change this stuff later; currently it just // points to jQuery.expando var internalKey = jQuery.expando, internalCache = this[ internalKey ]; if ( internalCache && internalCache.events && internalCache.events[ type ] ) { jQuery.event.trigger( event, data, internalCache.handle.elem ); } }); return; } // Don't do events on text and comment nodes if ( elem.nodeType === 3 || elem.nodeType === 8 ) { return; } // Clean up the event in case it is being reused event.result = undefined; event.target = elem; // Clone any incoming data and prepend the event, creating the handler arg list data = data != null ? jQuery.makeArray( data ) : []; data.unshift( event ); var cur = elem, // IE doesn't like method names with a colon (#3533, #8272) ontype = type.indexOf(":") < 0 ? "on" + type : ""; // Fire event on the current element, then bubble up the DOM tree do { var handle = jQuery._data( cur, "handle" ); event.currentTarget = cur; if ( handle ) { handle.apply( cur, data ); } // Trigger an inline bound script if ( ontype && jQuery.acceptData( cur ) && cur[ ontype ] && cur[ ontype ].apply( cur, data ) === false ) { event.result = false; event.preventDefault(); } // Bubble up to document, then to window cur = cur.parentNode || cur.ownerDocument || cur === event.target.ownerDocument && window; } while ( cur && !event.isPropagationStopped() ); // If nobody prevented the default action, do it now if ( !event.isDefaultPrevented() ) { var old, special = jQuery.event.special[ type ] || {}; if ( (!special._default || special._default.call( elem.ownerDocument, event ) === false) && !(type === "click" && jQuery.nodeName( elem, "a" )) && jQuery.acceptData( elem ) ) { // Call a native DOM method on the target with the same name name as the event. // Can't use an .isFunction)() check here because IE6/7 fails that test. // IE<9 dies on focus to hidden element (#1486), may want to revisit a try/catch. try { if ( ontype && elem[ type ] ) { // Don't re-trigger an onFOO event when we call its FOO() method old = elem[ ontype ]; if ( old ) { elem[ ontype ] = null; } jQuery.event.triggered = type; elem[ type ](); } } catch ( ieError ) {} if ( old ) { elem[ ontype ] = old; } jQuery.event.triggered = undefined; } } return event.result; }, handle: function( event ) { event = jQuery.event.fix( event || window.event ); // Snapshot the handlers list since a called handler may add/remove events. var handlers = ((jQuery._data( this, "events" ) || {})[ event.type ] || []).slice(0), run_all = !event.exclusive && !event.namespace, args = Array.prototype.slice.call( arguments, 0 ); // Use the fix-ed Event rather than the (read-only) native event args[0] = event; event.currentTarget = this; for ( var j = 0, l = handlers.length; j < l; j++ ) { var handleObj = handlers[ j ]; // Triggered event must 1) be non-exclusive and have no namespace, or // 2) have namespace(s) a subset or equal to those in the bound event. if ( run_all || event.namespace_re.test( handleObj.namespace ) ) { // Pass in a reference to the handler function itself // So that we can later remove it event.handler = handleObj.handler; event.data = handleObj.data; event.handleObj = handleObj; var ret = handleObj.handler.apply( this, args ); if ( ret !== undefined ) { event.result = ret; if ( ret === false ) { event.preventDefault(); event.stopPropagation(); } } if ( event.isImmediatePropagationStopped() ) { break; } } } return event.result; }, props: "altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "), fix: function( event ) { if ( event[ jQuery.expando ] ) { return event; } // store a copy of the original event object // and "clone" to set read-only properties var originalEvent = event; event = jQuery.Event( originalEvent ); for ( var i = this.props.length, prop; i; ) { prop = this.props[ --i ]; event[ prop ] = originalEvent[ prop ]; } // Fix target property, if necessary if ( !event.target ) { // Fixes #1925 where srcElement might not be defined either event.target = event.srcElement || document; } // check if target is a textnode (safari) if ( event.target.nodeType === 3 ) { event.target = event.target.parentNode; } // Add relatedTarget, if necessary if ( !event.relatedTarget && event.fromElement ) { event.relatedTarget = event.fromElement === event.target ? event.toElement : event.fromElement; } // Calculate pageX/Y if missing and clientX/Y available if ( event.pageX == null && event.clientX != null ) { var eventDocument = event.target.ownerDocument || document, doc = eventDocument.documentElement, body = eventDocument.body; event.pageX = event.clientX + (doc && doc.scrollLeft || body && body.scrollLeft || 0) - (doc && doc.clientLeft || body && body.clientLeft || 0); event.pageY = event.clientY + (doc && doc.scrollTop || body && body.scrollTop || 0) - (doc && doc.clientTop || body && body.clientTop || 0); } // Add which for key events if ( event.which == null && (event.charCode != null || event.keyCode != null) ) { event.which = event.charCode != null ? event.charCode : event.keyCode; } // Add metaKey to non-Mac browsers (use ctrl for PC's and Meta for Macs) if ( !event.metaKey && event.ctrlKey ) { event.metaKey = event.ctrlKey; } // Add which for click: 1 === left; 2 === middle; 3 === right // Note: button is not normalized, so don't use it if ( !event.which && event.button !== undefined ) { event.which = (event.button & 1 ? 1 : ( event.button & 2 ? 3 : ( event.button & 4 ? 2 : 0 ) )); } return event; }, // Deprecated, use jQuery.guid instead guid: 1E8, // Deprecated, use jQuery.proxy instead proxy: jQuery.proxy, special: { ready: { // Make sure the ready event is setup setup: jQuery.bindReady, teardown: jQuery.noop }, live: { add: function( handleObj ) { jQuery.event.add( this, liveConvert( handleObj.origType, handleObj.selector ), jQuery.extend({}, handleObj, {handler: liveHandler, guid: handleObj.handler.guid}) ); }, remove: function( handleObj ) { jQuery.event.remove( this, liveConvert( handleObj.origType, handleObj.selector ), handleObj ); } }, beforeunload: { setup: function( data, namespaces, eventHandle ) { // We only want to do this special case on windows if ( jQuery.isWindow( this ) ) { this.onbeforeunload = eventHandle; } }, teardown: function( namespaces, eventHandle ) { if ( this.onbeforeunload === eventHandle ) { this.onbeforeunload = null; } } } } }; jQuery.removeEvent = document.removeEventListener ? function( elem, type, handle ) { if ( elem.removeEventListener ) { elem.removeEventListener( type, handle, false ); } } : function( elem, type, handle ) { if ( elem.detachEvent ) { elem.detachEvent( "on" + type, handle ); } }; jQuery.Event = function( src, props ) { // Allow instantiation without the 'new' keyword if ( !this.preventDefault ) { return new jQuery.Event( src, props ); } // Event object if ( src && src.type ) { this.originalEvent = src; this.type = src.type; // Events bubbling up the document may have been marked as prevented // by a handler lower down the tree; reflect the correct value. this.isDefaultPrevented = (src.defaultPrevented || src.returnValue === false || src.getPreventDefault && src.getPreventDefault()) ? returnTrue : returnFalse; // Event type } else { this.type = src; } // Put explicitly provided properties onto the event object if ( props ) { jQuery.extend( this, props ); } // timeStamp is buggy for some events on Firefox(#3843) // So we won't rely on the native value this.timeStamp = jQuery.now(); // Mark it as fixed this[ jQuery.expando ] = true; }; function returnFalse() { return false; } function returnTrue() { return true; } // jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding // http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html jQuery.Event.prototype = { preventDefault: function() { this.isDefaultPrevented = returnTrue; var e = this.originalEvent; if ( !e ) { return; } // if preventDefault exists run it on the original event if ( e.preventDefault ) { e.preventDefault(); // otherwise set the returnValue property of the original event to false (IE) } else { e.returnValue = false; } }, stopPropagation: function() { this.isPropagationStopped = returnTrue; var e = this.originalEvent; if ( !e ) { return; } // if stopPropagation exists run it on the original event if ( e.stopPropagation ) { e.stopPropagation(); } // otherwise set the cancelBubble property of the original event to true (IE) e.cancelBubble = true; }, stopImmediatePropagation: function() { this.isImmediatePropagationStopped = returnTrue; this.stopPropagation(); }, isDefaultPrevented: returnFalse, isPropagationStopped: returnFalse, isImmediatePropagationStopped: returnFalse }; // Checks if an event happened on an element within another element // Used in jQuery.event.special.mouseenter and mouseleave handlers var withinElement = function( event ) { // Check if mouse(over|out) are still within the same parent element var related = event.relatedTarget, inside = false, eventType = event.type; event.type = event.data; if ( related !== this ) { if ( related ) { inside = jQuery.contains( this, related ); } if ( !inside ) { jQuery.event.handle.apply( this, arguments ); event.type = eventType; } } }, // In case of event delegation, we only need to rename the event.type, // liveHandler will take care of the rest. delegate = function( event ) { event.type = event.data; jQuery.event.handle.apply( this, arguments ); }; // Create mouseenter and mouseleave events jQuery.each({ mouseenter: "mouseover", mouseleave: "mouseout" }, function( orig, fix ) { jQuery.event.special[ orig ] = { setup: function( data ) { jQuery.event.add( this, fix, data && data.selector ? delegate : withinElement, orig ); }, teardown: function( data ) { jQuery.event.remove( this, fix, data && data.selector ? delegate : withinElement ); } }; }); // submit delegation if ( !jQuery.support.submitBubbles ) { jQuery.event.special.submit = { setup: function( data, namespaces ) { if ( !jQuery.nodeName( this, "form" ) ) { jQuery.event.add(this, "click.specialSubmit", function( e ) { var elem = e.target, type = elem.type; if ( (type === "submit" || type === "image") && jQuery( elem ).closest("form").length ) { trigger( "submit", this, arguments ); } }); jQuery.event.add(this, "keypress.specialSubmit", function( e ) { var elem = e.target, type = elem.type; if ( (type === "text" || type === "password") && jQuery( elem ).closest("form").length && e.keyCode === 13 ) { trigger( "submit", this, arguments ); } }); } else { return false; } }, teardown: function( namespaces ) { jQuery.event.remove( this, ".specialSubmit" ); } }; } // change delegation, happens here so we have bind. if ( !jQuery.support.changeBubbles ) { var changeFilters, getVal = function( elem ) { var type = elem.type, val = elem.value; if ( type === "radio" || type === "checkbox" ) { val = elem.checked; } else if ( type === "select-multiple" ) { val = elem.selectedIndex > -1 ? jQuery.map( elem.options, function( elem ) { return elem.selected; }).join("-") : ""; } else if ( jQuery.nodeName( elem, "select" ) ) { val = elem.selectedIndex; } return val; }, testChange = function testChange( e ) { var elem = e.target, data, val; if ( !rformElems.test( elem.nodeName ) || elem.readOnly ) { return; } data = jQuery._data( elem, "_change_data" ); val = getVal(elem); // the current data will be also retrieved by beforeactivate if ( e.type !== "focusout" || elem.type !== "radio" ) { jQuery._data( elem, "_change_data", val ); } if ( data === undefined || val === data ) { return; } if ( data != null || val ) { e.type = "change"; e.liveFired = undefined; jQuery.event.trigger( e, arguments[1], elem ); } }; jQuery.event.special.change = { filters: { focusout: testChange, beforedeactivate: testChange, click: function( e ) { var elem = e.target, type = jQuery.nodeName( elem, "input" ) ? elem.type : ""; if ( type === "radio" || type === "checkbox" || jQuery.nodeName( elem, "select" ) ) { testChange.call( this, e ); } }, // Change has to be called before submit // Keydown will be called before keypress, which is used in submit-event delegation keydown: function( e ) { var elem = e.target, type = jQuery.nodeName( elem, "input" ) ? elem.type : ""; if ( (e.keyCode === 13 && !jQuery.nodeName( elem, "textarea" ) ) || (e.keyCode === 32 && (type === "checkbox" || type === "radio")) || type === "select-multiple" ) { testChange.call( this, e ); } }, // Beforeactivate happens also before the previous element is blurred // with this event you can't trigger a change event, but you can store // information beforeactivate: function( e ) { var elem = e.target; jQuery._data( elem, "_change_data", getVal(elem) ); } }, setup: function( data, namespaces ) { if ( this.type === "file" ) { return false; } for ( var type in changeFilters ) { jQuery.event.add( this, type + ".specialChange", changeFilters[type] ); } return rformElems.test( this.nodeName ); }, teardown: function( namespaces ) { jQuery.event.remove( this, ".specialChange" ); return rformElems.test( this.nodeName ); } }; changeFilters = jQuery.event.special.change.filters; // Handle when the input is .focus()'d changeFilters.focus = changeFilters.beforeactivate; } function trigger( type, elem, args ) { // Piggyback on a donor event to simulate a different one. // Fake originalEvent to avoid donor's stopPropagation, but if the // simulated event prevents default then we do the same on the donor. // Don't pass args or remember liveFired; they apply to the donor event. var event = jQuery.extend( {}, args[ 0 ] ); event.type = type; event.originalEvent = {}; event.liveFired = undefined; jQuery.event.handle.call( elem, event ); if ( event.isDefaultPrevented() ) { args[ 0 ].preventDefault(); } } // Create "bubbling" focus and blur events if ( !jQuery.support.focusinBubbles ) { jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) { // Attach a single capturing handler while someone wants focusin/focusout var attaches = 0; jQuery.event.special[ fix ] = { setup: function() { if ( attaches++ === 0 ) { document.addEventListener( orig, handler, true ); } }, teardown: function() { if ( --attaches === 0 ) { document.removeEventListener( orig, handler, true ); } } }; function handler( donor ) { // Donor event is always a native one; fix it and switch its type. // Let focusin/out handler cancel the donor focus/blur event. var e = jQuery.event.fix( donor ); e.type = fix; e.originalEvent = {}; jQuery.event.trigger( e, null, e.target ); if ( e.isDefaultPrevented() ) { donor.preventDefault(); } } }); } jQuery.each(["bind", "one"], function( i, name ) { jQuery.fn[ name ] = function( type, data, fn ) { var handler; // Handle object literals if ( typeof type === "object" ) { for ( var key in type ) { this[ name ](key, data, type[key], fn); } return this; } if ( arguments.length === 2 || data === false ) { fn = data; data = undefined; } if ( name === "one" ) { handler = function( event ) { jQuery( this ).unbind( event, handler ); return fn.apply( this, arguments ); }; handler.guid = fn.guid || jQuery.guid++; } else { handler = fn; } if ( type === "unload" && name !== "one" ) { this.one( type, data, fn ); } else { for ( var i = 0, l = this.length; i < l; i++ ) { jQuery.event.add( this[i], type, handler, data ); } } return this; }; }); jQuery.fn.extend({ unbind: function( type, fn ) { // Handle object literals if ( typeof type === "object" && !type.preventDefault ) { for ( var key in type ) { this.unbind(key, type[key]); } } else { for ( var i = 0, l = this.length; i < l; i++ ) { jQuery.event.remove( this[i], type, fn ); } } return this; }, delegate: function( selector, types, data, fn ) { return this.live( types, data, fn, selector ); }, undelegate: function( selector, types, fn ) { if ( arguments.length === 0 ) { return this.unbind( "live" ); } else { return this.die( types, null, fn, selector ); } }, trigger: function( type, data ) { return this.each(function() { jQuery.event.trigger( type, data, this ); }); }, triggerHandler: function( type, data ) { if ( this[0] ) { return jQuery.event.trigger( type, data, this[0], true ); } }, toggle: function( fn ) { // Save reference to arguments for access in closure var args = arguments, guid = fn.guid || jQuery.guid++, i = 0, toggler = function( event ) { // Figure out which function to execute var lastToggle = ( jQuery.data( this, "lastToggle" + fn.guid ) || 0 ) % i; jQuery.data( this, "lastToggle" + fn.guid, lastToggle + 1 ); // Make sure that clicks stop event.preventDefault(); // and execute the function return args[ lastToggle ].apply( this, arguments ) || false; }; // link all the functions, so any of them can unbind this click handler toggler.guid = guid; while ( i < args.length ) { args[ i++ ].guid = guid; } return this.click( toggler ); }, hover: function( fnOver, fnOut ) { return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver ); } }); var liveMap = { focus: "focusin", blur: "focusout", mouseenter: "mouseover", mouseleave: "mouseout" }; jQuery.each(["live", "die"], function( i, name ) { jQuery.fn[ name ] = function( types, data, fn, origSelector /* Internal Use Only */ ) { var type, i = 0, match, namespaces, preType, selector = origSelector || this.selector, context = origSelector ? this : jQuery( this.context ); if ( typeof types === "object" && !types.preventDefault ) { for ( var key in types ) { context[ name ]( key, data, types[key], selector ); } return this; } if ( name === "die" && !types && origSelector && origSelector.charAt(0) === "." ) { context.unbind( origSelector ); return this; } if ( data === false || jQuery.isFunction( data ) ) { fn = data || returnFalse; data = undefined; } types = (types || "").split(" "); while ( (type = types[ i++ ]) != null ) { match = rnamespaces.exec( type ); namespaces = ""; if ( match ) { namespaces = match[0]; type = type.replace( rnamespaces, "" ); } if ( type === "hover" ) { types.push( "mouseenter" + namespaces, "mouseleave" + namespaces ); continue; } preType = type; if ( liveMap[ type ] ) { types.push( liveMap[ type ] + namespaces ); type = type + namespaces; } else { type = (liveMap[ type ] || type) + namespaces; } if ( name === "live" ) { // bind live handler for ( var j = 0, l = context.length; j < l; j++ ) { jQuery.event.add( context[j], "live." + liveConvert( type, selector ), { data: data, selector: selector, handler: fn, origType: type, origHandler: fn, preType: preType } ); } } else { // unbind live handler context.unbind( "live." + liveConvert( type, selector ), fn ); } } return this; }; }); function liveHandler( event ) { var stop, maxLevel, related, match, handleObj, elem, j, i, l, data, close, namespace, ret, elems = [], selectors = [], events = jQuery._data( this, "events" ); // Make sure we avoid non-left-click bubbling in Firefox (#3861) and disabled elements in IE (#6911) if ( event.liveFired === this || !events || !events.live || event.target.disabled || event.button && event.type === "click" ) { return; } if ( event.namespace ) { namespace = new RegExp("(^|\\.)" + event.namespace.split(".").join("\\.(?:.*\\.)?") + "(\\.|$)"); } event.liveFired = this; var live = events.live.slice(0); for ( j = 0; j < live.length; j++ ) { handleObj = live[j]; if ( handleObj.origType.replace( rnamespaces, "" ) === event.type ) { selectors.push( handleObj.selector ); } else { live.splice( j--, 1 ); } } match = jQuery( event.target ).closest( selectors, event.currentTarget ); for ( i = 0, l = match.length; i < l; i++ ) { close = match[i]; for ( j = 0; j < live.length; j++ ) { handleObj = live[j]; if ( close.selector === handleObj.selector && (!namespace || namespace.test( handleObj.namespace )) && !close.elem.disabled ) { elem = close.elem; related = null; // Those two events require additional checking if ( handleObj.preType === "mouseenter" || handleObj.preType === "mouseleave" ) { event.type = handleObj.preType; related = jQuery( event.relatedTarget ).closest( handleObj.selector )[0]; // Make sure not to accidentally match a child element with the same selector if ( related && jQuery.contains( elem, related ) ) { related = elem; } } if ( !related || related !== elem ) { elems.push({ elem: elem, handleObj: handleObj, level: close.level }); } } } } for ( i = 0, l = elems.length; i < l; i++ ) { match = elems[i]; if ( maxLevel && match.level > maxLevel ) { break; } event.currentTarget = match.elem; event.data = match.handleObj.data; event.handleObj = match.handleObj; ret = match.handleObj.origHandler.apply( match.elem, arguments ); if ( ret === false || event.isPropagationStopped() ) { maxLevel = match.level; if ( ret === false ) { stop = false; } if ( event.isImmediatePropagationStopped() ) { break; } } } return stop; } function liveConvert( type, selector ) { return (type && type !== "*" ? type + "." : "") + selector.replace(rperiod, "`").replace(rspaces, "&"); } jQuery.each( ("blur focus focusin focusout load resize scroll unload click dblclick " + "mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " + "change select submit keydown keypress keyup error").split(" "), function( i, name ) { // Handle event binding jQuery.fn[ name ] = function( data, fn ) { if ( fn == null ) { fn = data; data = null; } return arguments.length > 0 ? this.bind( name, data, fn ) : this.trigger( name ); }; if ( jQuery.attrFn ) { jQuery.attrFn[ name ] = true; } }); /*! * Sizzle CSS Selector Engine * Copyright 2011, The Dojo Foundation * Released under the MIT, BSD, and GPL Licenses. * More information: http://sizzlejs.com/ */ (function(){ var chunker = /((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, done = 0, toString = Object.prototype.toString, hasDuplicate = false, baseHasDuplicate = true, rBackslash = /\\/g, rNonWord = /\W/; // Here we check if the JavaScript engine is using some sort of // optimization where it does not always call our comparision // function. If that is the case, discard the hasDuplicate value. // Thus far that includes Google Chrome. [0, 0].sort(function() { baseHasDuplicate = false; return 0; }); var Sizzle = function( selector, context, results, seed ) { results = results || []; context = context || document; var origContext = context; if ( context.nodeType !== 1 && context.nodeType !== 9 ) { return []; } if ( !selector || typeof selector !== "string" ) { return results; } var m, set, checkSet, extra, ret, cur, pop, i, prune = true, contextXML = Sizzle.isXML( context ), parts = [], soFar = selector; // Reset the position of the chunker regexp (start from head) do { chunker.exec( "" ); m = chunker.exec( soFar ); if ( m ) { soFar = m[3]; parts.push( m[1] ); if ( m[2] ) { extra = m[3]; break; } } } while ( m ); if ( parts.length > 1 && origPOS.exec( selector ) ) { if ( parts.length === 2 && Expr.relative[ parts[0] ] ) { set = posProcess( parts[0] + parts[1], context ); } else { set = Expr.relative[ parts[0] ] ? [ context ] : Sizzle( parts.shift(), context ); while ( parts.length ) { selector = parts.shift(); if ( Expr.relative[ selector ] ) { selector += parts.shift(); } set = posProcess( selector, set ); } } } else { // Take a shortcut and set the context if the root selector is an ID // (but not if it'll be faster if the inner selector is an ID) if ( !seed && parts.length > 1 && context.nodeType === 9 && !contextXML && Expr.match.ID.test(parts[0]) && !Expr.match.ID.test(parts[parts.length - 1]) ) { ret = Sizzle.find( parts.shift(), context, contextXML ); context = ret.expr ? Sizzle.filter( ret.expr, ret.set )[0] : ret.set[0]; } if ( context ) { ret = seed ? { expr: parts.pop(), set: makeArray(seed) } : Sizzle.find( parts.pop(), parts.length === 1 && (parts[0] === "~" || parts[0] === "+") && context.parentNode ? context.parentNode : context, contextXML ); set = ret.expr ? Sizzle.filter( ret.expr, ret.set ) : ret.set; if ( parts.length > 0 ) { checkSet = makeArray( set ); } else { prune = false; } while ( parts.length ) { cur = parts.pop(); pop = cur; if ( !Expr.relative[ cur ] ) { cur = ""; } else { pop = parts.pop(); } if ( pop == null ) { pop = context; } Expr.relative[ cur ]( checkSet, pop, contextXML ); } } else { checkSet = parts = []; } } if ( !checkSet ) { checkSet = set; } if ( !checkSet ) { Sizzle.error( cur || selector ); } if ( toString.call(checkSet) === "[object Array]" ) { if ( !prune ) { results.push.apply( results, checkSet ); } else if ( context && context.nodeType === 1 ) { for ( i = 0; checkSet[i] != null; i++ ) { if ( checkSet[i] && (checkSet[i] === true || checkSet[i].nodeType === 1 && Sizzle.contains(context, checkSet[i])) ) { results.push( set[i] ); } } } else { for ( i = 0; checkSet[i] != null; i++ ) { if ( checkSet[i] && checkSet[i].nodeType === 1 ) { results.push( set[i] ); } } } } else { makeArray( checkSet, results ); } if ( extra ) { Sizzle( extra, origContext, results, seed ); Sizzle.uniqueSort( results ); } return results; }; Sizzle.uniqueSort = function( results ) { if ( sortOrder ) { hasDuplicate = baseHasDuplicate; results.sort( sortOrder ); if ( hasDuplicate ) { for ( var i = 1; i < results.length; i++ ) { if ( results[i] === results[ i - 1 ] ) { results.splice( i--, 1 ); } } } } return results; }; Sizzle.matches = function( expr, set ) { return Sizzle( expr, null, null, set ); }; Sizzle.matchesSelector = function( node, expr ) { return Sizzle( expr, null, null, [node] ).length > 0; }; Sizzle.find = function( expr, context, isXML ) { var set; if ( !expr ) { return []; } for ( var i = 0, l = Expr.order.length; i < l; i++ ) { var match, type = Expr.order[i]; if ( (match = Expr.leftMatch[ type ].exec( expr )) ) { var left = match[1]; match.splice( 1, 1 ); if ( left.substr( left.length - 1 ) !== "\\" ) { match[1] = (match[1] || "").replace( rBackslash, "" ); set = Expr.find[ type ]( match, context, isXML ); if ( set != null ) { expr = expr.replace( Expr.match[ type ], "" ); break; } } } } if ( !set ) { set = typeof context.getElementsByTagName !== "undefined" ? context.getElementsByTagName( "*" ) : []; } return { set: set, expr: expr }; }; Sizzle.filter = function( expr, set, inplace, not ) { var match, anyFound, old = expr, result = [], curLoop = set, isXMLFilter = set && set[0] && Sizzle.isXML( set[0] ); while ( expr && set.length ) { for ( var type in Expr.filter ) { if ( (match = Expr.leftMatch[ type ].exec( expr )) != null && match[2] ) { var found, item, filter = Expr.filter[ type ], left = match[1]; anyFound = false; match.splice(1,1); if ( left.substr( left.length - 1 ) === "\\" ) { continue; } if ( curLoop === result ) { result = []; } if ( Expr.preFilter[ type ] ) { match = Expr.preFilter[ type ]( match, curLoop, inplace, result, not, isXMLFilter ); if ( !match ) { anyFound = found = true; } else if ( match === true ) { continue; } } if ( match ) { for ( var i = 0; (item = curLoop[i]) != null; i++ ) { if ( item ) { found = filter( item, match, i, curLoop ); var pass = not ^ !!found; if ( inplace && found != null ) { if ( pass ) { anyFound = true; } else { curLoop[i] = false; } } else if ( pass ) { result.push( item ); anyFound = true; } } } } if ( found !== undefined ) { if ( !inplace ) { curLoop = result; } expr = expr.replace( Expr.match[ type ], "" ); if ( !anyFound ) { return []; } break; } } } // Improper expression if ( expr === old ) { if ( anyFound == null ) { Sizzle.error( expr ); } else { break; } } old = expr; } return curLoop; }; Sizzle.error = function( msg ) { throw "Syntax error, unrecognized expression: " + msg; }; var Expr = Sizzle.selectors = { order: [ "ID", "NAME", "TAG" ], match: { ID: /#((?:[\w\u00c0-\uFFFF\-]|\\.)+)/, CLASS: /\.((?:[\w\u00c0-\uFFFF\-]|\\.)+)/, NAME: /\[name=['"]*((?:[\w\u00c0-\uFFFF\-]|\\.)+)['"]*\]/, ATTR: /\[\s*((?:[\w\u00c0-\uFFFF\-]|\\.)+)\s*(?:(\S?=)\s*(?:(['"])(.*?)\3|(#?(?:[\w\u00c0-\uFFFF\-]|\\.)*)|)|)\s*\]/, TAG: /^((?:[\w\u00c0-\uFFFF\*\-]|\\.)+)/, CHILD: /:(only|nth|last|first)-child(?:\(\s*(even|odd|(?:[+\-]?\d+|(?:[+\-]?\d*)?n\s*(?:[+\-]\s*\d+)?))\s*\))?/, POS: /:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^\-]|$)/, PSEUDO: /:((?:[\w\u00c0-\uFFFF\-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/ }, leftMatch: {}, attrMap: { "class": "className", "for": "htmlFor" }, attrHandle: { href: function( elem ) { return elem.getAttribute( "href" ); }, type: function( elem ) { return elem.getAttribute( "type" ); } }, relative: { "+": function(checkSet, part){ var isPartStr = typeof part === "string", isTag = isPartStr && !rNonWord.test( part ), isPartStrNotTag = isPartStr && !isTag; if ( isTag ) { part = part.toLowerCase(); } for ( var i = 0, l = checkSet.length, elem; i < l; i++ ) { if ( (elem = checkSet[i]) ) { while ( (elem = elem.previousSibling) && elem.nodeType !== 1 ) {} checkSet[i] = isPartStrNotTag || elem && elem.nodeName.toLowerCase() === part ? elem || false : elem === part; } } if ( isPartStrNotTag ) { Sizzle.filter( part, checkSet, true ); } }, ">": function( checkSet, part ) { var elem, isPartStr = typeof part === "string", i = 0, l = checkSet.length; if ( isPartStr && !rNonWord.test( part ) ) { part = part.toLowerCase(); for ( ; i < l; i++ ) { elem = checkSet[i]; if ( elem ) { var parent = elem.parentNode; checkSet[i] = parent.nodeName.toLowerCase() === part ? parent : false; } } } else { for ( ; i < l; i++ ) { elem = checkSet[i]; if ( elem ) { checkSet[i] = isPartStr ? elem.parentNode : elem.parentNode === part; } } if ( isPartStr ) { Sizzle.filter( part, checkSet, true ); } } }, "": function(checkSet, part, isXML){ var nodeCheck, doneName = done++, checkFn = dirCheck; if ( typeof part === "string" && !rNonWord.test( part ) ) { part = part.toLowerCase(); nodeCheck = part; checkFn = dirNodeCheck; } checkFn( "parentNode", part, doneName, checkSet, nodeCheck, isXML ); }, "~": function( checkSet, part, isXML ) { var nodeCheck, doneName = done++, checkFn = dirCheck; if ( typeof part === "string" && !rNonWord.test( part ) ) { part = part.toLowerCase(); nodeCheck = part; checkFn = dirNodeCheck; } checkFn( "previousSibling", part, doneName, checkSet, nodeCheck, isXML ); } }, find: { ID: function( match, context, isXML ) { if ( typeof context.getElementById !== "undefined" && !isXML ) { var m = context.getElementById(match[1]); // Check parentNode to catch when Blackberry 4.6 returns // nodes that are no longer in the document #6963 return m && m.parentNode ? [m] : []; } }, NAME: function( match, context ) { if ( typeof context.getElementsByName !== "undefined" ) { var ret = [], results = context.getElementsByName( match[1] ); for ( var i = 0, l = results.length; i < l; i++ ) { if ( results[i].getAttribute("name") === match[1] ) { ret.push( results[i] ); } } return ret.length === 0 ? null : ret; } }, TAG: function( match, context ) { if ( typeof context.getElementsByTagName !== "undefined" ) { return context.getElementsByTagName( match[1] ); } } }, preFilter: { CLASS: function( match, curLoop, inplace, result, not, isXML ) { match = " " + match[1].replace( rBackslash, "" ) + " "; if ( isXML ) { return match; } for ( var i = 0, elem; (elem = curLoop[i]) != null; i++ ) { if ( elem ) { if ( not ^ (elem.className && (" " + elem.className + " ").replace(/[\t\n\r]/g, " ").indexOf(match) >= 0) ) { if ( !inplace ) { result.push( elem ); } } else if ( inplace ) { curLoop[i] = false; } } } return false; }, ID: function( match ) { return match[1].replace( rBackslash, "" ); }, TAG: function( match, curLoop ) { return match[1].replace( rBackslash, "" ).toLowerCase(); }, CHILD: function( match ) { if ( match[1] === "nth" ) { if ( !match[2] ) { Sizzle.error( match[0] ); } match[2] = match[2].replace(/^\+|\s*/g, ''); // parse equations like 'even', 'odd', '5', '2n', '3n+2', '4n-1', '-n+6' var test = /(-?)(\d*)(?:n([+\-]?\d*))?/.exec( match[2] === "even" && "2n" || match[2] === "odd" && "2n+1" || !/\D/.test( match[2] ) && "0n+" + match[2] || match[2]); // calculate the numbers (first)n+(last) including if they are negative match[2] = (test[1] + (test[2] || 1)) - 0; match[3] = test[3] - 0; } else if ( match[2] ) { Sizzle.error( match[0] ); } // TODO: Move to normal caching system match[0] = done++; return match; }, ATTR: function( match, curLoop, inplace, result, not, isXML ) { var name = match[1] = match[1].replace( rBackslash, "" ); if ( !isXML && Expr.attrMap[name] ) { match[1] = Expr.attrMap[name]; } // Handle if an un-quoted value was used match[4] = ( match[4] || match[5] || "" ).replace( rBackslash, "" ); if ( match[2] === "~=" ) { match[4] = " " + match[4] + " "; } return match; }, PSEUDO: function( match, curLoop, inplace, result, not ) { if ( match[1] === "not" ) { // If we're dealing with a complex expression, or a simple one if ( ( chunker.exec(match[3]) || "" ).length > 1 || /^\w/.test(match[3]) ) { match[3] = Sizzle(match[3], null, null, curLoop); } else { var ret = Sizzle.filter(match[3], curLoop, inplace, true ^ not); if ( !inplace ) { result.push.apply( result, ret ); } return false; } } else if ( Expr.match.POS.test( match[0] ) || Expr.match.CHILD.test( match[0] ) ) { return true; } return match; }, POS: function( match ) { match.unshift( true ); return match; } }, filters: { enabled: function( elem ) { return elem.disabled === false && elem.type !== "hidden"; }, disabled: function( elem ) { return elem.disabled === true; }, checked: function( elem ) { return elem.checked === true; }, selected: function( elem ) { // Accessing this property makes selected-by-default // options in Safari work properly if ( elem.parentNode ) { elem.parentNode.selectedIndex; } return elem.selected === true; }, parent: function( elem ) { return !!elem.firstChild; }, empty: function( elem ) { return !elem.firstChild; }, has: function( elem, i, match ) { return !!Sizzle( match[3], elem ).length; }, header: function( elem ) { return (/h\d/i).test( elem.nodeName ); }, text: function( elem ) { var attr = elem.getAttribute( "type" ), type = elem.type; // IE6 and 7 will map elem.type to 'text' for new HTML5 types (search, etc) // use getAttribute instead to test this case return elem.nodeName.toLowerCase() === "input" && "text" === type && ( attr === type || attr === null ); }, radio: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "radio" === elem.type; }, checkbox: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "checkbox" === elem.type; }, file: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "file" === elem.type; }, password: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "password" === elem.type; }, submit: function( elem ) { var name = elem.nodeName.toLowerCase(); return (name === "input" || name === "button") && "submit" === elem.type; }, image: function( elem ) { return elem.nodeName.toLowerCase() === "input" && "image" === elem.type; }, reset: function( elem ) { var name = elem.nodeName.toLowerCase(); return (name === "input" || name === "button") && "reset" === elem.type; }, button: function( elem ) { var name = elem.nodeName.toLowerCase(); return name === "input" && "button" === elem.type || name === "button"; }, input: function( elem ) { return (/input|select|textarea|button/i).test( elem.nodeName ); }, focus: function( elem ) { return elem === elem.ownerDocument.activeElement; } }, setFilters: { first: function( elem, i ) { return i === 0; }, last: function( elem, i, match, array ) { return i === array.length - 1; }, even: function( elem, i ) { return i % 2 === 0; }, odd: function( elem, i ) { return i % 2 === 1; }, lt: function( elem, i, match ) { return i < match[3] - 0; }, gt: function( elem, i, match ) { return i > match[3] - 0; }, nth: function( elem, i, match ) { return match[3] - 0 === i; }, eq: function( elem, i, match ) { return match[3] - 0 === i; } }, filter: { PSEUDO: function( elem, match, i, array ) { var name = match[1], filter = Expr.filters[ name ]; if ( filter ) { return filter( elem, i, match, array ); } else if ( name === "contains" ) { return (elem.textContent || elem.innerText || Sizzle.getText([ elem ]) || "").indexOf(match[3]) >= 0; } else if ( name === "not" ) { var not = match[3]; for ( var j = 0, l = not.length; j < l; j++ ) { if ( not[j] === elem ) { return false; } } return true; } else { Sizzle.error( name ); } }, CHILD: function( elem, match ) { var type = match[1], node = elem; switch ( type ) { case "only": case "first": while ( (node = node.previousSibling) ) { if ( node.nodeType === 1 ) { return false; } } if ( type === "first" ) { return true; } node = elem; case "last": while ( (node = node.nextSibling) ) { if ( node.nodeType === 1 ) { return false; } } return true; case "nth": var first = match[2], last = match[3]; if ( first === 1 && last === 0 ) { return true; } var doneName = match[0], parent = elem.parentNode; if ( parent && (parent.sizcache !== doneName || !elem.nodeIndex) ) { var count = 0; for ( node = parent.firstChild; node; node = node.nextSibling ) { if ( node.nodeType === 1 ) { node.nodeIndex = ++count; } } parent.sizcache = doneName; } var diff = elem.nodeIndex - last; if ( first === 0 ) { return diff === 0; } else { return ( diff % first === 0 && diff / first >= 0 ); } } }, ID: function( elem, match ) { return elem.nodeType === 1 && elem.getAttribute("id") === match; }, TAG: function( elem, match ) { return (match === "*" && elem.nodeType === 1) || elem.nodeName.toLowerCase() === match; }, CLASS: function( elem, match ) { return (" " + (elem.className || elem.getAttribute("class")) + " ") .indexOf( match ) > -1; }, ATTR: function( elem, match ) { var name = match[1], result = Expr.attrHandle[ name ] ? Expr.attrHandle[ name ]( elem ) : elem[ name ] != null ? elem[ name ] : elem.getAttribute( name ), value = result + "", type = match[2], check = match[4]; return result == null ? type === "!=" : type === "=" ? value === check : type === "*=" ? value.indexOf(check) >= 0 : type === "~=" ? (" " + value + " ").indexOf(check) >= 0 : !check ? value && result !== false : type === "!=" ? value !== check : type === "^=" ? value.indexOf(check) === 0 : type === "$=" ? value.substr(value.length - check.length) === check : type === "|=" ? value === check || value.substr(0, check.length + 1) === check + "-" : false; }, POS: function( elem, match, i, array ) { var name = match[2], filter = Expr.setFilters[ name ]; if ( filter ) { return filter( elem, i, match, array ); } } } }; var origPOS = Expr.match.POS, fescape = function(all, num){ return "\\" + (num - 0 + 1); }; for ( var type in Expr.match ) { Expr.match[ type ] = new RegExp( Expr.match[ type ].source + (/(?![^\[]*\])(?![^\(]*\))/.source) ); Expr.leftMatch[ type ] = new RegExp( /(^(?:.|\r|\n)*?)/.source + Expr.match[ type ].source.replace(/\\(\d+)/g, fescape) ); } var makeArray = function( array, results ) { array = Array.prototype.slice.call( array, 0 ); if ( results ) { results.push.apply( results, array ); return results; } return array; }; // Perform a simple check to determine if the browser is capable of // converting a NodeList to an array using builtin methods. // Also verifies that the returned array holds DOM nodes // (which is not the case in the Blackberry browser) try { Array.prototype.slice.call( document.documentElement.childNodes, 0 )[0].nodeType; // Provide a fallback method if it does not work } catch( e ) { makeArray = function( array, results ) { var i = 0, ret = results || []; if ( toString.call(array) === "[object Array]" ) { Array.prototype.push.apply( ret, array ); } else { if ( typeof array.length === "number" ) { for ( var l = array.length; i < l; i++ ) { ret.push( array[i] ); } } else { for ( ; array[i]; i++ ) { ret.push( array[i] ); } } } return ret; }; } var sortOrder, siblingCheck; if ( document.documentElement.compareDocumentPosition ) { sortOrder = function( a, b ) { if ( a === b ) { hasDuplicate = true; return 0; } if ( !a.compareDocumentPosition || !b.compareDocumentPosition ) { return a.compareDocumentPosition ? -1 : 1; } return a.compareDocumentPosition(b) & 4 ? -1 : 1; }; } else { sortOrder = function( a, b ) { // The nodes are identical, we can exit early if ( a === b ) { hasDuplicate = true; return 0; // Fallback to using sourceIndex (in IE) if it's available on both nodes } else if ( a.sourceIndex && b.sourceIndex ) { return a.sourceIndex - b.sourceIndex; } var al, bl, ap = [], bp = [], aup = a.parentNode, bup = b.parentNode, cur = aup; // If the nodes are siblings (or identical) we can do a quick check if ( aup === bup ) { return siblingCheck( a, b ); // If no parents were found then the nodes are disconnected } else if ( !aup ) { return -1; } else if ( !bup ) { return 1; } // Otherwise they're somewhere else in the tree so we need // to build up a full list of the parentNodes for comparison while ( cur ) { ap.unshift( cur ); cur = cur.parentNode; } cur = bup; while ( cur ) { bp.unshift( cur ); cur = cur.parentNode; } al = ap.length; bl = bp.length; // Start walking down the tree looking for a discrepancy for ( var i = 0; i < al && i < bl; i++ ) { if ( ap[i] !== bp[i] ) { return siblingCheck( ap[i], bp[i] ); } } // We ended someplace up the tree so do a sibling check return i === al ? siblingCheck( a, bp[i], -1 ) : siblingCheck( ap[i], b, 1 ); }; siblingCheck = function( a, b, ret ) { if ( a === b ) { return ret; } var cur = a.nextSibling; while ( cur ) { if ( cur === b ) { return -1; } cur = cur.nextSibling; } return 1; }; } // Utility function for retreiving the text value of an array of DOM nodes Sizzle.getText = function( elems ) { var ret = "", elem; for ( var i = 0; elems[i]; i++ ) { elem = elems[i]; // Get the text from text nodes and CDATA nodes if ( elem.nodeType === 3 || elem.nodeType === 4 ) { ret += elem.nodeValue; // Traverse everything else, except comment nodes } else if ( elem.nodeType !== 8 ) { ret += Sizzle.getText( elem.childNodes ); } } return ret; }; // Check to see if the browser returns elements by name when // querying by getElementById (and provide a workaround) (function(){ // We're going to inject a fake input element with a specified name var form = document.createElement("div"), id = "script" + (new Date()).getTime(), root = document.documentElement; form.innerHTML = ""; // Inject it into the root element, check its status, and remove it quickly root.insertBefore( form, root.firstChild ); // The workaround has to do additional checks after a getElementById // Which slows things down for other browsers (hence the branching) if ( document.getElementById( id ) ) { Expr.find.ID = function( match, context, isXML ) { if ( typeof context.getElementById !== "undefined" && !isXML ) { var m = context.getElementById(match[1]); return m ? m.id === match[1] || typeof m.getAttributeNode !== "undefined" && m.getAttributeNode("id").nodeValue === match[1] ? [m] : undefined : []; } }; Expr.filter.ID = function( elem, match ) { var node = typeof elem.getAttributeNode !== "undefined" && elem.getAttributeNode("id"); return elem.nodeType === 1 && node && node.nodeValue === match; }; } root.removeChild( form ); // release memory in IE root = form = null; })(); (function(){ // Check to see if the browser returns only elements // when doing getElementsByTagName("*") // Create a fake element var div = document.createElement("div"); div.appendChild( document.createComment("") ); // Make sure no comments are found if ( div.getElementsByTagName("*").length > 0 ) { Expr.find.TAG = function( match, context ) { var results = context.getElementsByTagName( match[1] ); // Filter out possible comments if ( match[1] === "*" ) { var tmp = []; for ( var i = 0; results[i]; i++ ) { if ( results[i].nodeType === 1 ) { tmp.push( results[i] ); } } results = tmp; } return results; }; } // Check to see if an attribute returns normalized href attributes div.innerHTML = ""; if ( div.firstChild && typeof div.firstChild.getAttribute !== "undefined" && div.firstChild.getAttribute("href") !== "#" ) { Expr.attrHandle.href = function( elem ) { return elem.getAttribute( "href", 2 ); }; } // release memory in IE div = null; })(); if ( document.querySelectorAll ) { (function(){ var oldSizzle = Sizzle, div = document.createElement("div"), id = "__sizzle__"; div.innerHTML = "

"; // Safari can't handle uppercase or unicode characters when // in quirks mode. if ( div.querySelectorAll && div.querySelectorAll(".TEST").length === 0 ) { return; } Sizzle = function( query, context, extra, seed ) { context = context || document; // Only use querySelectorAll on non-XML documents // (ID selectors don't work in non-HTML documents) if ( !seed && !Sizzle.isXML(context) ) { // See if we find a selector to speed up var match = /^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec( query ); if ( match && (context.nodeType === 1 || context.nodeType === 9) ) { // Speed-up: Sizzle("TAG") if ( match[1] ) { return makeArray( context.getElementsByTagName( query ), extra ); // Speed-up: Sizzle(".CLASS") } else if ( match[2] && Expr.find.CLASS && context.getElementsByClassName ) { return makeArray( context.getElementsByClassName( match[2] ), extra ); } } if ( context.nodeType === 9 ) { // Speed-up: Sizzle("body") // The body element only exists once, optimize finding it if ( query === "body" && context.body ) { return makeArray( [ context.body ], extra ); // Speed-up: Sizzle("#ID") } else if ( match && match[3] ) { var elem = context.getElementById( match[3] ); // Check parentNode to catch when Blackberry 4.6 returns // nodes that are no longer in the document #6963 if ( elem && elem.parentNode ) { // Handle the case where IE and Opera return items // by name instead of ID if ( elem.id === match[3] ) { return makeArray( [ elem ], extra ); } } else { return makeArray( [], extra ); } } try { return makeArray( context.querySelectorAll(query), extra ); } catch(qsaError) {} // qSA works strangely on Element-rooted queries // We can work around this by specifying an extra ID on the root // and working up from there (Thanks to Andrew Dupont for the technique) // IE 8 doesn't work on object elements } else if ( context.nodeType === 1 && context.nodeName.toLowerCase() !== "object" ) { var oldContext = context, old = context.getAttribute( "id" ), nid = old || id, hasParent = context.parentNode, relativeHierarchySelector = /^\s*[+~]/.test( query ); if ( !old ) { context.setAttribute( "id", nid ); } else { nid = nid.replace( /'/g, "\\$&" ); } if ( relativeHierarchySelector && hasParent ) { context = context.parentNode; } try { if ( !relativeHierarchySelector || hasParent ) { return makeArray( context.querySelectorAll( "[id='" + nid + "'] " + query ), extra ); } } catch(pseudoError) { } finally { if ( !old ) { oldContext.removeAttribute( "id" ); } } } } return oldSizzle(query, context, extra, seed); }; for ( var prop in oldSizzle ) { Sizzle[ prop ] = oldSizzle[ prop ]; } // release memory in IE div = null; })(); } (function(){ var html = document.documentElement, matches = html.matchesSelector || html.mozMatchesSelector || html.webkitMatchesSelector || html.msMatchesSelector; if ( matches ) { // Check to see if it's possible to do matchesSelector // on a disconnected node (IE 9 fails this) var disconnectedMatch = !matches.call( document.createElement( "div" ), "div" ), pseudoWorks = false; try { // This should fail with an exception // Gecko does not error, returns false instead matches.call( document.documentElement, "[test!='']:sizzle" ); } catch( pseudoError ) { pseudoWorks = true; } Sizzle.matchesSelector = function( node, expr ) { // Make sure that attribute selectors are quoted expr = expr.replace(/\=\s*([^'"\]]*)\s*\]/g, "='$1']"); if ( !Sizzle.isXML( node ) ) { try { if ( pseudoWorks || !Expr.match.PSEUDO.test( expr ) && !/!=/.test( expr ) ) { var ret = matches.call( node, expr ); // IE 9's matchesSelector returns false on disconnected nodes if ( ret || !disconnectedMatch || // As well, disconnected nodes are said to be in a document // fragment in IE 9, so check for that node.document && node.document.nodeType !== 11 ) { return ret; } } } catch(e) {} } return Sizzle(expr, null, null, [node]).length > 0; }; } })(); (function(){ var div = document.createElement("div"); div.innerHTML = "
"; // Opera can't find a second classname (in 9.6) // Also, make sure that getElementsByClassName actually exists if ( !div.getElementsByClassName || div.getElementsByClassName("e").length === 0 ) { return; } // Safari caches class attributes, doesn't catch changes (in 3.2) div.lastChild.className = "e"; if ( div.getElementsByClassName("e").length === 1 ) { return; } Expr.order.splice(1, 0, "CLASS"); Expr.find.CLASS = function( match, context, isXML ) { if ( typeof context.getElementsByClassName !== "undefined" && !isXML ) { return context.getElementsByClassName(match[1]); } }; // release memory in IE div = null; })(); function dirNodeCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) { for ( var i = 0, l = checkSet.length; i < l; i++ ) { var elem = checkSet[i]; if ( elem ) { var match = false; elem = elem[dir]; while ( elem ) { if ( elem.sizcache === doneName ) { match = checkSet[elem.sizset]; break; } if ( elem.nodeType === 1 && !isXML ){ elem.sizcache = doneName; elem.sizset = i; } if ( elem.nodeName.toLowerCase() === cur ) { match = elem; break; } elem = elem[dir]; } checkSet[i] = match; } } } function dirCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) { for ( var i = 0, l = checkSet.length; i < l; i++ ) { var elem = checkSet[i]; if ( elem ) { var match = false; elem = elem[dir]; while ( elem ) { if ( elem.sizcache === doneName ) { match = checkSet[elem.sizset]; break; } if ( elem.nodeType === 1 ) { if ( !isXML ) { elem.sizcache = doneName; elem.sizset = i; } if ( typeof cur !== "string" ) { if ( elem === cur ) { match = true; break; } } else if ( Sizzle.filter( cur, [elem] ).length > 0 ) { match = elem; break; } } elem = elem[dir]; } checkSet[i] = match; } } } if ( document.documentElement.contains ) { Sizzle.contains = function( a, b ) { return a !== b && (a.contains ? a.contains(b) : true); }; } else if ( document.documentElement.compareDocumentPosition ) { Sizzle.contains = function( a, b ) { return !!(a.compareDocumentPosition(b) & 16); }; } else { Sizzle.contains = function() { return false; }; } Sizzle.isXML = function( elem ) { // documentElement is verified for cases where it doesn't yet exist // (such as loading iframes in IE - #4833) var documentElement = (elem ? elem.ownerDocument || elem : 0).documentElement; return documentElement ? documentElement.nodeName !== "HTML" : false; }; var posProcess = function( selector, context ) { var match, tmpSet = [], later = "", root = context.nodeType ? [context] : context; // Position selectors must be done after the filter // And so must :not(positional) so we move all PSEUDOs to the end while ( (match = Expr.match.PSEUDO.exec( selector )) ) { later += match[0]; selector = selector.replace( Expr.match.PSEUDO, "" ); } selector = Expr.relative[selector] ? selector + "*" : selector; for ( var i = 0, l = root.length; i < l; i++ ) { Sizzle( selector, root[i], tmpSet ); } return Sizzle.filter( later, tmpSet ); }; // EXPOSE jQuery.find = Sizzle; jQuery.expr = Sizzle.selectors; jQuery.expr[":"] = jQuery.expr.filters; jQuery.unique = Sizzle.uniqueSort; jQuery.text = Sizzle.getText; jQuery.isXMLDoc = Sizzle.isXML; jQuery.contains = Sizzle.contains; })(); var runtil = /Until$/, rparentsprev = /^(?:parents|prevUntil|prevAll)/, // Note: This RegExp should be improved, or likely pulled from Sizzle rmultiselector = /,/, isSimple = /^.[^:#\[\.,]*$/, slice = Array.prototype.slice, POS = jQuery.expr.match.POS, // methods guaranteed to produce a unique set when starting from a unique set guaranteedUnique = { children: true, contents: true, next: true, prev: true }; jQuery.fn.extend({ find: function( selector ) { var self = this, i, l; if ( typeof selector !== "string" ) { return jQuery( selector ).filter(function() { for ( i = 0, l = self.length; i < l; i++ ) { if ( jQuery.contains( self[ i ], this ) ) { return true; } } }); } var ret = this.pushStack( "", "find", selector ), length, n, r; for ( i = 0, l = this.length; i < l; i++ ) { length = ret.length; jQuery.find( selector, this[i], ret ); if ( i > 0 ) { // Make sure that the results are unique for ( n = length; n < ret.length; n++ ) { for ( r = 0; r < length; r++ ) { if ( ret[r] === ret[n] ) { ret.splice(n--, 1); break; } } } } } return ret; }, has: function( target ) { var targets = jQuery( target ); return this.filter(function() { for ( var i = 0, l = targets.length; i < l; i++ ) { if ( jQuery.contains( this, targets[i] ) ) { return true; } } }); }, not: function( selector ) { return this.pushStack( winnow(this, selector, false), "not", selector); }, filter: function( selector ) { return this.pushStack( winnow(this, selector, true), "filter", selector ); }, is: function( selector ) { return !!selector && ( typeof selector === "string" ? jQuery.filter( selector, this ).length > 0 : this.filter( selector ).length > 0 ); }, closest: function( selectors, context ) { var ret = [], i, l, cur = this[0]; // Array if ( jQuery.isArray( selectors ) ) { var match, selector, matches = {}, level = 1; if ( cur && selectors.length ) { for ( i = 0, l = selectors.length; i < l; i++ ) { selector = selectors[i]; if ( !matches[ selector ] ) { matches[ selector ] = POS.test( selector ) ? jQuery( selector, context || this.context ) : selector; } } while ( cur && cur.ownerDocument && cur !== context ) { for ( selector in matches ) { match = matches[ selector ]; if ( match.jquery ? match.index( cur ) > -1 : jQuery( cur ).is( match ) ) { ret.push({ selector: selector, elem: cur, level: level }); } } cur = cur.parentNode; level++; } } return ret; } // String var pos = POS.test( selectors ) || typeof selectors !== "string" ? jQuery( selectors, context || this.context ) : 0; for ( i = 0, l = this.length; i < l; i++ ) { cur = this[i]; while ( cur ) { if ( pos ? pos.index(cur) > -1 : jQuery.find.matchesSelector(cur, selectors) ) { ret.push( cur ); break; } else { cur = cur.parentNode; if ( !cur || !cur.ownerDocument || cur === context || cur.nodeType === 11 ) { break; } } } } ret = ret.length > 1 ? jQuery.unique( ret ) : ret; return this.pushStack( ret, "closest", selectors ); }, // Determine the position of an element within // the matched set of elements index: function( elem ) { if ( !elem || typeof elem === "string" ) { return jQuery.inArray( this[0], // If it receives a string, the selector is used // If it receives nothing, the siblings are used elem ? jQuery( elem ) : this.parent().children() ); } // Locate the position of the desired element return jQuery.inArray( // If it receives a jQuery object, the first element is used elem.jquery ? elem[0] : elem, this ); }, add: function( selector, context ) { var set = typeof selector === "string" ? jQuery( selector, context ) : jQuery.makeArray( selector && selector.nodeType ? [ selector ] : selector ), all = jQuery.merge( this.get(), set ); return this.pushStack( isDisconnected( set[0] ) || isDisconnected( all[0] ) ? all : jQuery.unique( all ) ); }, andSelf: function() { return this.add( this.prevObject ); } }); // A painfully simple check to see if an element is disconnected // from a document (should be improved, where feasible). function isDisconnected( node ) { return !node || !node.parentNode || node.parentNode.nodeType === 11; } jQuery.each({ parent: function( elem ) { var parent = elem.parentNode; return parent && parent.nodeType !== 11 ? parent : null; }, parents: function( elem ) { return jQuery.dir( elem, "parentNode" ); }, parentsUntil: function( elem, i, until ) { return jQuery.dir( elem, "parentNode", until ); }, next: function( elem ) { return jQuery.nth( elem, 2, "nextSibling" ); }, prev: function( elem ) { return jQuery.nth( elem, 2, "previousSibling" ); }, nextAll: function( elem ) { return jQuery.dir( elem, "nextSibling" ); }, prevAll: function( elem ) { return jQuery.dir( elem, "previousSibling" ); }, nextUntil: function( elem, i, until ) { return jQuery.dir( elem, "nextSibling", until ); }, prevUntil: function( elem, i, until ) { return jQuery.dir( elem, "previousSibling", until ); }, siblings: function( elem ) { return jQuery.sibling( elem.parentNode.firstChild, elem ); }, children: function( elem ) { return jQuery.sibling( elem.firstChild ); }, contents: function( elem ) { return jQuery.nodeName( elem, "iframe" ) ? elem.contentDocument || elem.contentWindow.document : jQuery.makeArray( elem.childNodes ); } }, function( name, fn ) { jQuery.fn[ name ] = function( until, selector ) { var ret = jQuery.map( this, fn, until ), // The variable 'args' was introduced in // https://github.com/jquery/jquery/commit/52a0238 // to work around a bug in Chrome 10 (Dev) and should be removed when the bug is fixed. // http://code.google.com/p/v8/issues/detail?id=1050 args = slice.call(arguments); if ( !runtil.test( name ) ) { selector = until; } if ( selector && typeof selector === "string" ) { ret = jQuery.filter( selector, ret ); } ret = this.length > 1 && !guaranteedUnique[ name ] ? jQuery.unique( ret ) : ret; if ( (this.length > 1 || rmultiselector.test( selector )) && rparentsprev.test( name ) ) { ret = ret.reverse(); } return this.pushStack( ret, name, args.join(",") ); }; }); jQuery.extend({ filter: function( expr, elems, not ) { if ( not ) { expr = ":not(" + expr + ")"; } return elems.length === 1 ? jQuery.find.matchesSelector(elems[0], expr) ? [ elems[0] ] : [] : jQuery.find.matches(expr, elems); }, dir: function( elem, dir, until ) { var matched = [], cur = elem[ dir ]; while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) { if ( cur.nodeType === 1 ) { matched.push( cur ); } cur = cur[dir]; } return matched; }, nth: function( cur, result, dir, elem ) { result = result || 1; var num = 0; for ( ; cur; cur = cur[dir] ) { if ( cur.nodeType === 1 && ++num === result ) { break; } } return cur; }, sibling: function( n, elem ) { var r = []; for ( ; n; n = n.nextSibling ) { if ( n.nodeType === 1 && n !== elem ) { r.push( n ); } } return r; } }); // Implement the identical functionality for filter and not function winnow( elements, qualifier, keep ) { // Can't pass null or undefined to indexOf in Firefox 4 // Set to 0 to skip string check qualifier = qualifier || 0; if ( jQuery.isFunction( qualifier ) ) { return jQuery.grep(elements, function( elem, i ) { var retVal = !!qualifier.call( elem, i, elem ); return retVal === keep; }); } else if ( qualifier.nodeType ) { return jQuery.grep(elements, function( elem, i ) { return (elem === qualifier) === keep; }); } else if ( typeof qualifier === "string" ) { var filtered = jQuery.grep(elements, function( elem ) { return elem.nodeType === 1; }); if ( isSimple.test( qualifier ) ) { return jQuery.filter(qualifier, filtered, !keep); } else { qualifier = jQuery.filter( qualifier, filtered ); } } return jQuery.grep(elements, function( elem, i ) { return (jQuery.inArray( elem, qualifier ) >= 0) === keep; }); } var rinlinejQuery = / jQuery\d+="(?:\d+|null)"/g, rleadingWhitespace = /^\s+/, rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig, rtagName = /<([\w:]+)/, rtbody = /", "" ], legend: [ 1, "
", "
" ], thead: [ 1, "", "
" ], tr: [ 2, "", "
" ], td: [ 3, "", "
" ], col: [ 2, "", "
" ], area: [ 1, "", "" ], _default: [ 0, "", "" ] }; wrapMap.optgroup = wrapMap.option; wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; wrapMap.th = wrapMap.td; // IE can't serialize and

OpenId login:

Email login:


ikiwiki-3.20160121/templates/inlinepage.tmpl0000644000000000000000000000412312650125230015442 0ustar ikiwiki-3.20160121/templates/googleform.tmpl0000644000000000000000000000042212650125230015465 0ustar
ikiwiki-3.20160121/templates/feedlink.tmpl0000644000000000000000000000061212650125230015107 0ustar
id="" class="feedlink"> RSS Atom
ikiwiki-3.20160121/templates/emailauth.tmpl0000644000000000000000000000035112650125230015277 0ustar To log into , just open the following link: This link can only be used once to log in, and will expire in one day. (Please disregard this email if you were not trying to log in.) -- ikiwiki ikiwiki-3.20160121/templates/editpagegone.tmpl0000644000000000000000000000031612650125230015762 0ustar

The page you were editing has disappeared.

Perhaps someone else has deleted it or moved it. If you want to recreate this page with your text, click "Save Page" again.

ikiwiki-3.20160121/templates/editpage.tmpl0000644000000000000000000000531612650125230015116 0ustar


Attachments
" />

Page preview:

Diff:
ikiwiki-3.20160121/templates/editfailedsave.tmpl0000644000000000000000000000041112650125230016274 0ustar

Failed to save your changes.

Your changes were not able to be saved to disk. The system gave the error:

Your changes are preserved below, and you can try again to save them.

ikiwiki-3.20160121/templates/editcreationconflict.tmpl0000644000000000000000000000044612650125230017527 0ustar

While you were creating this page, someone else independently created a page with the same name.

The edit box below contains the page's current content, followed by the content you entered previously, to allow you to merge the two together before saving.

ikiwiki-3.20160121/templates/editconflict.tmpl0000644000000000000000000000033312650125230015775 0ustar

Your changes conflict with other changes made to the page.

Conflict markers have been inserted into the page content. Reconcile the conflict and commit again to save your changes.

ikiwiki-3.20160121/templates/editcomment.tmpl0000644000000000000000000000226312650125230015642 0ustar

(optional, or signin)
(optional)

(You might want to Signin first?)




Comment preview:
ikiwiki-3.20160121/templates/commentmoderation.tmpl0000644000000000000000000000222412650125230017053 0ustar


No comments need moderation at this time.

ikiwiki-3.20160121/templates/comment.tmpl0000644000000000000000000000370012650125230014771 0ustar ikiwiki-3.20160121/templates/change.tmpl0000644000000000000000000000322612650125230014557 0ustar [[!meta author=""""""]] [[!meta authorurl=""""""]] [[!meta title="""change to on """]] [[!meta permalink=""]]


ikiwiki-3.20160121/templates/calendaryear.tmpl0000644000000000000000000000011112650125230015752 0ustar [[!calendar type=year year= pages=""]] ikiwiki-3.20160121/templates/calendarmonth.tmpl0000644000000000000000000000041312650125230016144 0ustar [[!sidebar content=""" [[!calendar type=month month= year= pages=""]] """]] [[!inline pages="creation_month() and creation_year() and " show=0 feeds=no reverse=yes]] ikiwiki-3.20160121/templates/blogpost.tmpl0000644000000000000000000000131712650125230015162 0ustar
id="" action="" method="get"> ikiwiki-3.20160121/templates/autotag.tmpl0000644000000000000000000000020512650125230014770 0ustar [[!meta title="pages tagged "]] [[!inline pages="tagged()" actions="no" archive="yes" feedshow=10]] ikiwiki-3.20160121/templates/autoindex.tmpl0000644000000000000000000000007512650125230015331 0ustar [[!map pages="/* and ! /*/*"]] ikiwiki-3.20160121/templates/atompage.tmpl0000644000000000000000000000164612650125230015133 0ustar <TMPL_VAR TITLE> ikiwiki ikiwiki-3.20160121/templates/atomitem.tmpl0000644000000000000000000000241412650125230015147 0ustar <TMPL_VAR TITLE> ikiwiki-3.20160121/templates/archivepage.tmpl0000644000000000000000000000065412650125230015612 0ustar


Posted by
ikiwiki-3.20160121/templates/aggregatepost.tmpl0000644000000000000000000000102612650125230016162 0ustar [[!tag ]] [[!meta title=""]] [[!meta permalink=""]] [[!meta copyright=""]] [[!meta author=" ()"]] [[!meta author=""]] [[!meta authorurl=""]] ikiwiki-3.20160121/t/0000755000000000000000000000000012650125230010676 5ustar ikiwiki-3.20160121/t/yesno.t0000755000000000000000000000101712650125230012222 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 11; BEGIN { use_ok("IkiWiki"); } # note: yesno always accepts English even if localized. # So no need to bother setting locale to C. ok(IkiWiki::yesno("yes") == 1); ok(IkiWiki::yesno("Yes") == 1); ok(IkiWiki::yesno("YES") == 1); ok(IkiWiki::yesno("no") == 0); ok(IkiWiki::yesno("No") == 0); ok(IkiWiki::yesno("NO") == 0); ok(IkiWiki::yesno("1") == 1); ok(IkiWiki::yesno("0") == 0); ok(IkiWiki::yesno("mooooooooooo") == 0); ok(IkiWiki::yesno(undef) == 0); ikiwiki-3.20160121/t/wellformed.t0000755000000000000000000000171712650125230013234 0ustar #!/usr/bin/perl use warnings; use strict; use Cwd qw(); use File::Find; use Test::More; plan(skip_all => 'running installed') if $ENV{INSTALLED_TESTS}; plan(skip_all => "XML::Parser not available") unless eval q{use XML::Parser (); 1;}; use IkiWiki; ok(system("make >/dev/null") == 0); chdir("html") || die "chdir: $!"; sub wanted { my $file = $_; return if -d $file; $file =~ s{^\./}{}; return if $file !~ m/\.html$/; if (eval { XML::Parser->new()->parsefile($file); 1; }) { pass($file); } elsif ($file =~ m{^(?: # user-contributed, contains explicit
plugins/contrib/gallery | # use templatebody when branchable.com has been upgraded templates/ | # malformed content in
 not escaped by discount
			tips/convert_mediawiki_to_ikiwiki
			# user-contributed, content is anyone's guess
			users/ |
			)}x) {
		TODO: {
			local $TODO = $@;
			fail($file);
		}
	}
}

find({
	no_chdir => 1,
	wanted => \&wanted,
}, '.');

done_testing;
ikiwiki-3.20160121/t/urlto.t0000755000000000000000000000544312650125230012241 0ustar  #!/usr/bin/perl
use warnings;
use strict;
use Test::More tests => 31;

BEGIN { use_ok("IkiWiki"); }

$IkiWiki::config{srcdir} = '/does/not/exist/';
$IkiWiki::config{usedirs} = 1;
$IkiWiki::config{htmlext} = "HTML";
$IkiWiki::config{wiki_file_chars} = "A-Za-z0-9._";

$IkiWiki::config{url} = "http://smcv.example.co.uk";
$IkiWiki::config{cgiurl} = "http://smcv.example.co.uk/cgi-bin/ikiwiki.cgi";
is(IkiWiki::checkconfig(), 1);

# absolute version
is(IkiWiki::cgiurl(cgiurl => $config{cgiurl}), "http://smcv.example.co.uk/cgi-bin/ikiwiki.cgi");
is(IkiWiki::cgiurl(cgiurl => $config{cgiurl}, do => 'badger'), "http://smcv.example.co.uk/cgi-bin/ikiwiki.cgi?do=badger");
is(IkiWiki::urlto('index', undef, 1), "http://smcv.example.co.uk/");
is(IkiWiki::urlto('stoats', undef, 1), "http://smcv.example.co.uk/stoats/");
is(IkiWiki::urlto('', undef, 1), "http://smcv.example.co.uk/");

# "local" (absolute path within site) version (default for cgiurl)
is(IkiWiki::cgiurl(), "/cgi-bin/ikiwiki.cgi");
is(IkiWiki::cgiurl(do => 'badger'), "/cgi-bin/ikiwiki.cgi?do=badger");
is(IkiWiki::baseurl(undef), "/");
is(IkiWiki::urlto('index', undef), "/");
is(IkiWiki::urlto('index'), "/");
is(IkiWiki::urlto('stoats', undef), "/stoats/");
is(IkiWiki::urlto('stoats'), "/stoats/");
is(IkiWiki::urlto(''), "/");

# fully-relative version (default for urlto and baseurl)
is(IkiWiki::baseurl('badger/mushroom'), "../../");
is(IkiWiki::urlto('badger/mushroom', 'snake'), "../badger/mushroom/");
is(IkiWiki::urlto('', 'snake'), "../");
is(IkiWiki::urlto('', 'penguin/herring'), "../../");

# explicit cgiurl override
is(IkiWiki::cgiurl(cgiurl => 'https://foo/ikiwiki'), "https://foo/ikiwiki");
is(IkiWiki::cgiurl(do => 'badger', cgiurl => 'https://foo/ikiwiki'), "https://foo/ikiwiki?do=badger");

# with url and cgiurl on different sites, "local" degrades to protocol-relative
$IkiWiki::config{url} = "http://example.co.uk/~smcv";
$IkiWiki::config{cgiurl} = "http://dynamic.example.co.uk/~smcv/ikiwiki.cgi";
is(IkiWiki::checkconfig(), 1);
is(IkiWiki::cgiurl(), "//dynamic.example.co.uk/~smcv/ikiwiki.cgi");
is(IkiWiki::baseurl(undef), "//example.co.uk/~smcv/");
is(IkiWiki::urlto('stoats', undef), "//example.co.uk/~smcv/stoats/");
is(IkiWiki::urlto('', undef), "//example.co.uk/~smcv/");

# with url and cgiurl on different schemes, "local" degrades to absolute for
# CGI but protocol-relative for static content, to avoid the CGI having
# mixed content
$IkiWiki::config{url} = "http://example.co.uk/~smcv";
$IkiWiki::config{cgiurl} = "https://dynamic.example.co.uk/~smcv/ikiwiki.cgi";
is(IkiWiki::checkconfig(), 1);
is(IkiWiki::cgiurl(), "https://dynamic.example.co.uk/~smcv/ikiwiki.cgi");
is(IkiWiki::baseurl(undef), "//example.co.uk/~smcv/");
is(IkiWiki::urlto('stoats', undef), "//example.co.uk/~smcv/stoats/");
is(IkiWiki::urlto('', undef), "//example.co.uk/~smcv/");
ikiwiki-3.20160121/t/trail.t0000755000000000000000000002663112650125230012211 0ustar  #!/usr/bin/perl
use warnings;
use strict;
use Test::More;
use IkiWiki;

sub check_trail {
	my $file=shift;
	my $expected=shift;
	my $trailname=shift || qr/\w+/;
	my $blob=readfile("t/tmp/out/$file");
	my ($trailline)=$blob=~/^trail=$trailname\s+(.*)$/m;
	is($trailline, $expected, "expected $expected in $file");
}

sub check_no_trail {
	my $file=shift;
	my $trailname=shift || qr/\w+/;
	my $blob=readfile("t/tmp/out/$file");
	my ($trailline)=$blob=~/^trail=$trailname\s+(.*)$/m;
	$trailline="" unless defined $trailline;
	ok($trailline !~ /^trail=$trailname\s+/, "no trail $trailname in $file");
}

my $blob;

ok(! system("rm -rf t/tmp"));
ok(! system("mkdir t/tmp"));

my $installed = $ENV{INSTALLED_TESTS};

my @command;
if ($installed) {
	@command = qw(ikiwiki);
}
else {
	ok(! system("make -s ikiwiki.out"));
	@command = qw(perl -I. ./ikiwiki.out
		--underlaydir=underlays/basewiki
		--set underlaydirbase=underlays
		--templatedir=templates);
}

push @command, qw(--set usedirs=0 --plugin trail --plugin inline
	--url=http://example.com --cgiurl=http://example.com/ikiwiki.cgi
	--rss --atom t/tmp/in t/tmp/out --verbose);

# Write files with a date in the past, so that when we refresh,
# the update is detected.
sub write_old_file {
	my $name = shift;
	my $content = shift;

	writefile($name, "t/tmp/in", $content);
	ok(utime(333333333, 333333333, "t/tmp/in/$name"));
}

# Use a rather stylized template to override the default rendering, to make
# it easy to search for the desired results
write_old_file("templates/trails.tmpl", <


EOF
);
write_old_file("badger.mdwn", "[[!meta title=\"The Breezy Badger\"]]\ncontent of badger");
write_old_file("mushroom.mdwn", "content of mushroom");
write_old_file("snake.mdwn", "content of snake");
write_old_file("ratty.mdwn", "content of ratty");
write_old_file("mr_toad.mdwn", "content of mr toad");
write_old_file("add.mdwn", '[[!trailitems pagenames="add/a add/b add/c add/d add/e"]]');
write_old_file("add/b.mdwn", "b");
write_old_file("add/d.mdwn", "d");
write_old_file("del.mdwn", '[[!trailitems pages="del/*" sort=title]]');
write_old_file("del/a.mdwn", "a");
write_old_file("del/b.mdwn", "b");
write_old_file("del/c.mdwn", "c");
write_old_file("del/d.mdwn", "d");
write_old_file("del/e.mdwn", "e");
write_old_file("self_referential.mdwn", '[[!trailitems pagenames="self_referential" circular=yes]]');
write_old_file("sorting/linked.mdwn", "linked");
write_old_file("sorting/a/b.mdwn", "a/b");
write_old_file("sorting/a/c.mdwn", "a/c");
write_old_file("sorting/z/a.mdwn", "z/a");
write_old_file("sorting/beginning.mdwn", "beginning");
write_old_file("sorting/middle.mdwn", "middle");
write_old_file("sorting/end.mdwn", "end");
write_old_file("sorting/new.mdwn", "new");
write_old_file("sorting/old.mdwn", "old");
write_old_file("sorting/ancient.mdwn", "ancient");
# These three need to be in the appropriate age order
ok(utime(333333333, 333333333, "t/tmp/in/sorting/new.mdwn"));
ok(utime(222222222, 222222222, "t/tmp/in/sorting/old.mdwn"));
ok(utime(111111111, 111111111, "t/tmp/in/sorting/ancient.mdwn"));
write_old_file("sorting/linked2.mdwn", "linked2");
# This initially uses the default sort order: age for the inline, and path
# for trailitems. We change it later.
write_old_file("sorting.mdwn",
	'[[!traillink linked]] ' .
	'[[!trailitems pages="sorting/z/a or sorting/a/b or sorting/a/c"]] ' .
	'[[!trailitems pagenames="sorting/beginning sorting/middle sorting/end"]] ' .
	'[[!inline pages="sorting/old or sorting/ancient or sorting/new" trail="yes"]] ' .
	'[[!traillink linked2]]');
write_old_file("limited/a.mdwn", "a");
write_old_file("limited/b.mdwn", "b");
write_old_file("limited/c.mdwn", "c");
write_old_file("limited/d.mdwn", "d");
write_old_file("limited.mdwn",
	'[[!inline pages="limited/*" trail="yes" show=2 sort=title]]');
write_old_file("untrail/a.mdwn", "a");
write_old_file("untrail/b.mdwn", "b");
write_old_file("untrail.mdwn", "[[!traillink a]] [[!traillink b]]");
write_old_file("retitled/a.mdwn", "a");
write_old_file("retitled.mdwn",
	'[[!meta title="the old title"]][[!traillink a]]');

write_old_file("meme.mdwn", <badger<\/a>/m);
ok($blob =~ /This is a link to badger, with a title<\/a>/m);
ok($blob =~ /That is the badger<\/a>/m);

check_trail("badger.html", "n=mushroom p=", "meme");
check_trail("badger.html", "n=mr_toad p=ratty", "wind_in_the_willows");

ok(! -f "t/tmp/out/moley.html");

check_trail("mr_toad.html", "n=ratty p=badger", "wind_in_the_willows");
check_no_trail("mr_toad.html", "meme");
# meta title is respected for pages that have one
$blob = readfile("t/tmp/out/mr_toad.html");
ok($blob =~ /">< The Breezy Badger<\/a>/m);
# pagetitle for pages that don't
ok($blob =~ /">ratty ><\/a>/m);

check_no_trail("ratty.html", "meme");
check_trail("ratty.html", "n=badger p=mr_toad", "wind_in_the_willows");

check_trail("mushroom.html", "n=snake p=badger", "meme");
check_no_trail("mushroom.html", "wind_in_the_willows");

check_trail("snake.html", "n= p=mushroom", "meme");
check_no_trail("snake.html", "wind_in_the_willows");

check_trail("self_referential.html", "n= p=", "self_referential");

check_trail("add/b.html", "n=add/d p=", "add");
check_trail("add/d.html", "n= p=add/b", "add");
ok(! -f "t/tmp/out/add/a.html");
ok(! -f "t/tmp/out/add/c.html");
ok(! -f "t/tmp/out/add/e.html");

check_trail("del/a.html", "n=del/b p=");
check_trail("del/b.html", "n=del/c p=del/a");
check_trail("del/c.html", "n=del/d p=del/b");
check_trail("del/d.html", "n=del/e p=del/c");
check_trail("del/e.html", "n= p=del/d");

check_trail("sorting/linked.html", "n=sorting/a/b p=");
check_trail("sorting/a/b.html", "n=sorting/a/c p=sorting/linked");
check_trail("sorting/a/c.html", "n=sorting/z/a p=sorting/a/b");
check_trail("sorting/z/a.html", "n=sorting/beginning p=sorting/a/c");
check_trail("sorting/beginning.html", "n=sorting/middle p=sorting/z/a");
check_trail("sorting/middle.html", "n=sorting/end p=sorting/beginning");
check_trail("sorting/end.html", "n=sorting/new p=sorting/middle");
check_trail("sorting/new.html", "n=sorting/old p=sorting/end");
check_trail("sorting/old.html", "n=sorting/ancient p=sorting/new");
check_trail("sorting/ancient.html", "n=sorting/linked2 p=sorting/old");
check_trail("sorting/linked2.html", "n= p=sorting/ancient");

# If the inline has a limited number of pages, the trail still contains
# everything.
$blob = readfile("t/tmp/out/limited.html");
ok($blob =~ /a<\/a>/m);
ok($blob =~ /b<\/a>/m);
ok($blob !~ //m);
ok($blob !~ //m);
check_trail("limited/a.html", "n=limited/b p=");
check_trail("limited/b.html", "n=limited/c p=limited/a");
check_trail("limited/c.html", "n=limited/d p=limited/b");
check_trail("limited/d.html", "n= p=limited/c");

check_trail("untrail/a.html", "n=untrail/b p=");
check_trail("untrail/b.html", "n= p=untrail/a");

$blob = readfile("t/tmp/out/retitled/a.html");
ok($blob =~ /\^ the old title \^/m);

# Make some changes and refresh. These writefile calls don't set an
# old mtime, so they're strictly newer than the "old" files.

writefile("add/a.mdwn", "t/tmp/in", "a");
writefile("add/c.mdwn", "t/tmp/in", "c");
writefile("add/e.mdwn", "t/tmp/in", "e");
ok(unlink("t/tmp/in/del/a.mdwn"));
ok(unlink("t/tmp/in/del/c.mdwn"));
ok(unlink("t/tmp/in/del/e.mdwn"));

writefile("sorting.mdwn", "t/tmp/in",
	readfile("t/tmp/in/sorting.mdwn") .
	'[[!trailoptions sort="title" reverse="yes"]]'); 

writefile("retitled.mdwn", "t/tmp/in",
	'[[!meta title="the new title"]][[!traillink a]]');

# If the inline has a limited number of pages, the trail still depends on
# everything.
writefile("limited.html", "t/tmp/out", "[this gets rebuilt]");
writefile("limited/c.mdwn", "t/tmp/in", '[[!meta title="New C page"]]c');

writefile("untrail.mdwn", "t/tmp/in", "no longer a trail");

ok(! system(@command, "--refresh"));

check_trail("add/a.html", "n=add/b p=");
check_trail("add/b.html", "n=add/c p=add/a");
check_trail("add/c.html", "n=add/d p=add/b");
check_trail("add/d.html", "n=add/e p=add/c");
check_trail("add/e.html", "n= p=add/d");

check_trail("del/b.html", "n=del/d p=");
check_trail("del/d.html", "n= p=del/b");
ok(! -f "t/tmp/out/del/a.html");
ok(! -f "t/tmp/out/del/c.html");
ok(! -f "t/tmp/out/del/e.html");

check_trail("sorting/old.html", "n=sorting/new p=");
check_trail("sorting/new.html", "n=sorting/middle p=sorting/old");
check_trail("sorting/middle.html", "n=sorting/linked2 p=sorting/new");
check_trail("sorting/linked2.html", "n=sorting/linked p=sorting/middle");
check_trail("sorting/linked.html", "n=sorting/end p=sorting/linked2");
check_trail("sorting/end.html", "n=sorting/a/c p=sorting/linked");
check_trail("sorting/a/c.html", "n=sorting/beginning p=sorting/end");
check_trail("sorting/beginning.html", "n=sorting/a/b p=sorting/a/c");
check_trail("sorting/a/b.html", "n=sorting/ancient p=sorting/beginning");
check_trail("sorting/ancient.html", "n=sorting/z/a p=sorting/a/b");
check_trail("sorting/z/a.html", "n= p=sorting/ancient");

# If the inline has a limited number of pages, the trail still depends on
# everything, so it gets rebuilt even though it doesn't strictly need it.
# This means we could use it as a way to recompute the order of members
# and the contents of their trail navbars, allowing us to fix the regression
# described in [[bugs/trail excess dependencies]] without a full content
# dependency.
$blob = readfile("t/tmp/out/limited.html");
ok($blob =~ /a<\/a>/m);
ok($blob =~ /b<\/a>/m);
ok($blob !~ //m);
ok($blob !~ //m);
check_trail("limited/a.html", "n=limited/b p=");
check_trail("limited/b.html", "n=limited/c p=limited/a");
check_trail("limited/c.html", "n=limited/d p=limited/b");
check_trail("limited/d.html", "n= p=limited/c");
# Also, b and d should pick up the change to c. This regressed with the
# change to using a presence dependency.
$blob = readfile("t/tmp/out/limited/b.html");
ok($blob =~ /New C page >/m);
$blob = readfile("t/tmp/out/limited/d.html");
ok($blob =~ /< New C page/m);

# Members of a retitled trail should pick up that change.
# This regressed with the change to using a presence dependency.
$blob = readfile("t/tmp/out/retitled/a.html");
ok($blob =~ /\^ the new title \^/m);

# untrail is no longer a trail, so these are no longer in it.
check_no_trail("untrail/a.html");
check_no_trail("untrail/b.html");

ok(! system("rm -rf t/tmp"));

done_testing();
ikiwiki-3.20160121/t/titlepage.t0000755000000000000000000000057512650125230013053 0ustar  #!/usr/bin/perl
use warnings;
use strict;
use Test::More tests => 7;

BEGIN { use_ok("IkiWiki"); }

is(titlepage("foo bar"), "foo_bar");
is(titlepage("foo bar baz"), "foo_bar_baz");
is(titlepage("foo bar/baz"), "foo_bar/baz");
is(titlepage("foo bar&baz"), "foo_bar__38__baz");
is(titlepage("foo bar & baz"), "foo_bar___38___baz");
is(titlepage("foo bar_baz"), "foo_bar__95__baz");
ikiwiki-3.20160121/t/tinypodcast/0000755000000000000000000000000012650125230013237 5ustar  ikiwiki-3.20160121/t/tinypodcast/walter.ogg0000644000000000000000000007005212650125230015237 0ustar  OggSpXD|oJvorbisDwOggSpXD|L:vorbisXiph.Org libVorbis I 20070622
GENRE=Test
ALBUM=ikiwiki	DATE=2013TITLE=Wacky WalterARTIST=Amitai Schlair COMMENTS=Hi there, I'm a commentvorbis)BCV1L ŀАU`$)fI)(yHI)0c1c1c 4d(	Ij9g'r9iN8 Q9	&cnkn)%
Y@H!RH!b!b!r!r
*
2 L2餓N:騣:(B-JL1Vc]|s9s9s	BCV BdB!R)r
2ȀАU GI˱$O,Q53ESTMUUUUu]Wvevuv}Y[}Y[؅]aaaa}}} 4d #9)"9d 	")Ifjihm˲,˲iiiiiiifYeYeYeYeYeYeYeYeYeYeYeYeY@h*@@qq$ER$r,
Y@R,r4Gs4s|sH\V1Sv)@e~Я^vi[)Pȴo/&$nsDkv|cD"$c;r}5]I~СD<lqE _weR(LYGp&;,X_*KC$QB: #3iV- $Y"ݚ/XsX%P4az0Ih%`c?>+kljq{BnJ(d7_G&iTq1
i/]CE,(yǟ~~|m(jv_(ǧ{%4.[EmVfHq#iRř)O
p9sYnUta(b}?cڗ?b/!+2$~fLx?wj5@/MՂX[_ʣD"<tJLșb+7BA%c?ʁ
o>p%^6'O9c
m9+Tk?M=kZR-BbDK1JB
hh2e*,?;
n"#f~~9.ʍ^ wĒ2\Qs>ؘgO[s2FE
ڏ%OG\2!?LřV
{]RJHUf{#1$c.V
|DLľʂ?+i8‡\9,OFfJZ@?e*hk6
S~NIEf7n\H|֬ΖDY>18ȱwKkR+emOB&Kzr4Zfub6	Im-B]V+<7C0eBP3)^ʃQ=DT{T7MTYؔwblP`ǹQIl	V;;Z[Y
v=̬g#mտqMc
6; C8[i"O}?2+mwNFlء'UTfOZ 飜HnI*M46>Y_wY0}^L`ЁŖN[:=w&96߿7)С@/ ɶglБaE
F<&a/=VeûK3M$x4
_xVQXߜY筅]jg`[F+$I*]<܌F)\f9vnV[_yW`шF:wAx%s_c|
͉ۘLZfNPm͔ohaxe[UٳDk	#(/pC9|շv;:ʐ@u;ƂѲ/ޏ-icbF;/ݜ8Kq)&uȀm:C` 1j|`hg SlQd`ȺUp{mnj݈,1iQ0doZCwG/3kVblZ#@}}X}p3i
b
*h2Y-1%cϞjџ2;#}Nw
jЍg(?3(1tf`_ST
iJtE>lnu75W+ hvFL[A$K=hfiUz+K
phR+_Rpۛ4㶵W~mږùyl3p0G0')5sFnCHfߙO(C요Ts\BO_da|y[;>nvqoSst)"YdRo#m%7Hs%W~[R;W|9@J]mw=00o{umh@+Q;	[~YsXmVIsxՉX0W,+bwFr.w&H:]QSX9{);`Ei3ZQ7zG=s,OViГZ/e*\KŝʆZˣu;B9ijTBdx??Ðce)ㄢ p4ŵ#oǾ0g5fMF4?@Qj`7'/[FneB+
27?L}oԜچcz&&T;@oiٝKzL0ݩ7ZTGDT3KbTtKXWtq= WwN![mm@Ӳt^7f?2?G#E4gM	1I}r)@ibK|wzwg`
!yj+gD)
桮լ1`h	/]۴47Lrr /F=1f5j&z]c4!w7k@ى)_;3q0>JM.ƣw]^W;Y`-XRE!5zGFy*,!6o0yc
}Ġ	Lڤ2\pw|FCÃk7|ϯE篻Y
>Dܓ4W.qTxsr<.zEr^4m:F:V8|>`Z5T!/z^*f1^/@|]Yn1bŽ&`!2 E'mnт+IVa?L	ÉjURr&4rmRm\	8d4W6.R\98V*	}hżG,~7zƔ񙤼q@5WP''#I,=_Jzalrl/j:ĭǃ~
FND4z[m-KkRncJf_h:
=#Qm([ B:/5V	!vddRQ/Xʲ
Kh5
 j_o'OƆDŽ٩O::l1-5&V.e,ť0=;Q5>n%5x>u=jx ȍ9	OYLN[L%ZEl
cpa-6JoDݣOAbe[	yU:4GB]d>sgrE%e>MWXV֤hym`
XB&H\eNZg\7e_ߢ\b{SS~1rwi.OggSpXD|Fû&&&1765)zxVzI9	d0I+Ĭ(>z{{WL
|&y,r|Sru)Yh[
^h=+9VM臈WzvZV0[2@|[9a~1YHiVxNr2}Ms0OELniVȿE{aTk
_?b}dEUtn42j4Gu
ЛtrS
01,w5b\eMml]jLw5H:)ei
g2).6,c2˛4-`ןQK樉9S[7dp3l O+8էf
va7 ëqft&5&4
F;%6Ikwd,@2-rL`Z'gl'^[r:nyp*~nV+PwPkt(e
Y`.	prK#M44tzy[FS(t_r`)Amɓܤ \:T-%Ni#a}#B
6nY+
jmY*i69w,_I)Nfmd閕b6F΁lw{yM\'߯+&
>0acHӅ4E=f<3$$Y[VzYU9:$AIj:ZJܽ>n0bm*޿@uQ\eW~I3TY?ပvdyt?=F'^x|I*Yo#QW@W^ŕ[`"I%T{qAzڛ5mK/2I
;>K۲(DT(^/ч2gARR4At0![%Xm;җ;)m5-
@kS_;fhVX`d4wWevuuwm>
=#EuH \zdU+7:kŶ0K3+.%Euk'
\Ԋb~>KWz{cǺ]+N`+W]쩙3f:a^)]m
#;oŶ$1
LCa7G1}Js7h*N؇xf/A-Rp]k"uje6֫|o~RR_򔆬\Q>H,2{G>t͇͟y(**r^
Ow窄m2>I#TЃPx!3T'ɤ5WzTٶ7%i5qGѸ|5A<6G"GO[f6eOLGj
Cd,cJ3e
6:$WGJj7JJT(cn
ׇ^C+\jizɳJ4~
~H7jtR GoÉ[)_zՏS=n8^= N"j	ydx:N&u\׊@g$Ѹe{IuK%3[kUZ(яZ>%Hnvj:5*w[o913n80VgAwH՛]!0n[tFx6v;ӗ}͍C0e"t횴?T;	at2Ⱦ
o'Zjڕ:_
b$&;k6ÚJ4MZ_׍pJBJX<,,GL/tئ!JSivU%dLIc43'+L;W O&J`k)R]8
v
>Per6F~DW؂4y8+6J36YJ2Ւ>fO8DJ=3&u҇ɑKCah/Ng6B*)^@@viYoxS%g0ʋ"fO.B$5:']p}[~^
ǽLD%*MGX[ل|reFPn}bL> tPcZQP4nuxkH^c-ުK8<pdSdIGY
D)<<
MDE;_M@wGSEpVJfC8{-*V @)3% sD:aUjUF;
Ru[;ڀXop{lL޷%7
9hѦ6ҕ~.
f1Hl>,:+6'K:鬗=uBﴖĖ9g2=
FC`|ay?HV[}Աm*ms2}Y
>hCȬzѾ~suN6JclCbdHkCɗy?yDHJptNԒà.CL=<*enʕ9@g~[: ދ	p|.i׵jH]1hz
8(F"A˜VQtc񚋣C|a
$e>"gFG{x
G%*j@Prak[7$7y~ea5yDɹt1%nM`DXv7ķ*VP?>g%4@0$ɲsY.H;hc,e-rY 
i/jH{e^$G[4-$4dg[eV>IUx{j[PcF`$5i!brqwNzt>߻y$e ogtzwa|Lo8ðDXKOQRu2adf
	nz[ a(뜝S枤nڹJ9�nKݼ"}EӉ[{he焍DŽ6OiG#*_6RuTbV$Vd4E&Fxes[KLr
Ҕ8d3c;}E/;Hr苠hcwQT$+.h\3H*_5hV@
K^V-EKlBJfZ5v2Pʄ]I%./ouVn|	&\"hx9~&R­{rFMSߪ.nBfT"`78cHm0R6h۹+,S0V[Ω5S{#{WDY1~84לHALs7pOy(&E<_$i^.Lnlx#&~NHh5Y?0ңFK5>4;T5ljm%فn
jeCzu? m<,T+9B1(b]Csp{DӕF[r"oMtTW V)C=zžDdlFQlG|VfT/'a?:W}Q0Vc^ѶH:7[7zr4&9mZD{rd{ZJ\j+mL&մqV1b|bst~Y-Ǧ+2FhG`Vz1YYhCz	?dҍNAyYt}Z{"z+!9?~~$uupfsD%"y@oҟ:s&U&xx
> nTE>yG\bI>nӽwʧFyRPTeݽZ/7\1sw~ja"0]c{C<Ґdh#m_e]Ȉ#o=,-*OUmEp*P9f8nM8qLugŞ``|3;@y-86fLLsIOggSpXD|Zĸ'&$'020M#BH~
цIikGq0[IY6ZM.;A[?k] %h1]8@ȀlANdkPXq[G3f	.;p{IL*.8f,YN3<:Rڜ3޼w\DZHn:íRJ^Y]Kj7['d`A׷BGSsI5]:)ō>dzYwnu|#
WSђ.I<4p4[ZLퟷat,pNkRuAҩ؇LeՉ!R]-'mV#=#ڀrKB+Pn({Jfގ
ы
<9<LZ&&$o3zy5gA.KzyMDm19jDҜtZ!s*-q\PILi(Q
kaΩ7ȞJP,{lb-~D	(D":&axҖ8%<
L32qm%B0y%bI><%2`ʹJ
|Pcq}Lv_Z
ˈ?MU-%b[_ސe}HʸN"i+^499[PEm9a)geuhj8oNFYIȍ_lF.t
%-RIqhUQVfpV{a^7.X2>S2xp2absqThthnikNuquČ^8w>KBg`H)AzXI1h,Ot22&8T#ۍPeECoMMOCu(͂gv[v-6Cx)>gz4ܙ3AHۘUHsU&
Ʊ*)f-|\mwf6𱶎}%%~N.HO	'rZިtA3ͧF]ؚƜl

>>0^84(P{腇;|$4=HjK	lF F@x>ыK,`#KLo}6lڈVP7
(pc5Pb[	fYf1TQ#MR2IjU?zW[EIQ2큢lB|JcB#j=sFfQlRvV]WxRml=
ɄW]7p
2#0kei-S]ya~S[6ا<;v((镖䜂	1͒ƗsURcq)waex-͒F,jSN6asu/ڋPsUʜxjhe	z(KЍ'\\\-SfkedG0p0,/G!J!\w^gt6H^,N>?3hkNa>>*]ܟ
 P@+f{_0ܐtDi'c+uJ**LdeURU.J̩D[Z]-Ӟ\M"M(ZH뇏_d}Be#.&ұ7DPGRLq}Z`(óQPy]mh5(rnc\)XZQ:UP	3ʽ뜮bpEB<|I.[L2:І+ʲqL@:2O2u泦?cM$dGչ4
εȐ}%Mǘ|8*~tq(klOUd[qYݲ}Py܆!-ik<(NĨ'
k@`m E!}2wKVt9*vu|~%m6SjTxl̝7!	'&zJ|k'VG{
[FtŊ[#tvD
ZY	B6Bk¿m0~ebNW S*-y?{ϮRB7hLm8&i5}3o*G@[k ۃzb[xSr߯FQrD<\=-F,*67߄3?&.rI)mnjߤC߯p9`}t;qWۭ{Szh;vh	gW$~`Mf[%.15'bP?9	OJtFG[AO%+jk_!}.CӦG+Y$p9?>QPXz:pk~`-bw*VD[dFAԷd#!&XGZZŒJ(ynBdz+(Z3h0(!\[X1NōoE	
e3	
0l{ʃzʰ"J>P_$dGb
N#6=FR//ӦN1(ޙYGOggS8pXD|ƽs*+)110'.058@izpp7hZ9m_2,̚~_kNb8LB;-A#+Y$8gG8]g<†".hV&B4;,rbglܬOttS3Q̆
xMƘl[6탺]jD^V1>G+4y	eNnsh|e3ra~THD%;b oy`	4_&|ſyٳ4?~xP*j]+ i@gAtD%i9UEm3(azώ}1kzbb	1aOt(]|DG'
4ArRnG()8;4rNB'2m37[)6B@NNuV>*U]m<7+_N}?d6K[vM` Z.)C
I&}P<
:-1kΊFbK
:Y~vX띣Y&F4\߄oraV״N~_;۔Z^ht`~#봌jC~=ŪAq_WF;\n϶JSLT&*8Yk\1
j◄1:8A陞.ֻjL:q݁xZAzoi(E@k@3	\ZNZe{{8Uڗ	 ZK@]D-
s kjLzFf`a>U^ }٭ڮw驮N`)xLWSND.ATZa1M@{NiDB«pEx]Y#XQt3-+ԳVxCgJD=GlvZ;H`D3k0J+dvfM~*5Tmi{ފ5,B&p;fj+F~!AIhQGSc9èVof6:W&hr{(.\MYZ#$9/#{?ȃZZ>I/|aY^(%f‡7?~EJްm33M+fI1=
NL8_bۜ+0=vfCB}uE:O/1y)+'hyXReloqJUZy
qWu?3f)W$&KH׌gx}]J¨Bz{.CDlV~(^R)OXi
	i*b(֮
A^Lj4-/}Og=j\H"j)֧9E-LaE%~SW5GMDNy
퐽D U/^acb\e4uĪ>4@VRSp>Eˆ;c)エ8	^kUD\L~$1>h۷޻3'LCl*Qi!hukIs1y9_܍p-v3#TvhobJ~gŞOٸY'ǯ4	YYJDvm*X-F|ڔ?lJ-b+z{M44r~8bAC8*liͬ#.d8%:.iNu@UNkB_ϔds^b|$hZѴW;:)馞pL.6K鑩4Jt">?$^w+}h]dݙΡQaCAWzre=]~ P,Q:NG+qiz#|.F1@G\9z9	t/OJMSMa39a9M“`ﲌFEu'ݛcEP-A,ΫEH2z!oˬHf{iJgl)(n]6A2j챩2Y1ē\@9KySsox>5>VYK0M?UȟM~dh{l(Jxk,݉Y\*[\DR=\9Q:n,F;"NVD}>.r
rtsN"۴JDK~2Ǵ9[C@jzyryOiv5>6́h.=H}ocY28$ed֒w}evd%ggR(9R6f%@mn6Z`2b]Vhxڤ'FPLOR5N.	V=̓A,?%pDm;z'o=o[;v"V)A&цkFj)?)n[7k<@iūAy?,w/?5)(/+
׹4U#kd0r4dʸbJ}]+t~Z95LonJF6{]H兗{]DFؒsA@1:p>w$k$c
GXLA\)80|F~?vr:F-cb}YEI!2
?b㓭ӓCrD,}g~!Lٟ~fHM!d׶).FaHfyey5,ɈyuIOb'+x#Y_ۮ%9?ItL`_vj:eG*J
[$P]V&\Np'/M
ꪓb*Ǒ ^dOggS@pXD|(Yϻо**)(++(024545ʿĞ933UF0s
^Z 8f
E;t\1!ܜ;{LkUU:Ix#W7Ʊjx\X1bwv}0T~?ӃV~Q(ul߼F[bdxKdZ|!PE~ɷ_ NE06
eUbxew䛔_6L:2NB>(y&jHm3_6S|"2Eu7Zk&%9BCn^HOUO+p4\W({b|		/>>ZzbV\VܼQq9Z A^}[.e+
#=jF߆+|AIqPoK-4֢b紶+08Zcf" AgESeαCbI^T}ʾ0q+@L))fy_1jD>\1Lc11	몾ԅg'ICЮUŒpt+03|j9b%SXŇu,#{ə˷#PBdžԆۧ^@o?YmËpaCQ1I3ոn;+igjKϛxzƢ^_bErfEξҦX()/U6&|N?fKSUK3mTeċM{t!8O:,CݐeN$Tj2#˄Ѭ#HߙsЧI77VSٝZ&9lmQXˁ΍>BK
sWw\
]V;߆l3Z9~8{:3`:*Y(\qNjmʎwcz$!4OYpyIW}/Jُ9޿DTdP3uEɐy%-qB-bK]c#fX6_:݈9KMu4HT3%G6|Z
fsV3bΣWV|e7ݺcUl,ÏPR8a/qAJuFwQy\_uE새%IDm<ղWu瘟Wie@h"v3e<1>ؘU
: ͵SZ:~iݐ#;-F~,icie@\VWUܠI?m-2[ļ>5@m1g*X䢂{ٌ(ij
}bIpuew1ԡ"Bn|C|7I2J.\ VHL;8ňbf5W30Fa=9x_sߍߓ9L2<1jc'
TK!	ʽߕ]N4\0/C"ҵ:uܒ+J5
D?5+%# ")5)!3;ԤI:5!#)T=.eƿS%Mn6Ode ^>teZLg$߾z5rŽz+I$-
4NwRmlX	䏂EcCh+5~D_zM;I`986#ӦSg܃mDW{>C)(h03e$y4}`_`׀Ƣ:[ԗkw;ď$+(D6ZpGA-
I9Ey&](c)$}2,<$gP&*i=`[(H|nY
W򕇜^U:
ӬlZ!lfqkӟl^7#oU-<.)
s*.y/o;PNDqF&A
w<~@G
l)x}FJ_njvVCđC	6F!{Sߏ4'CLyx:R_"3⦔gj ۼu6aAwQY?sxro\IcР1;O.8~ΚX-LI@F)0mϺ?NVz;/i/,݇00^{#ZiXԕŒDg.ЉUR*+1|Jx"Ҥx+9?ʘT*An
9{TZ2]I#ΤŎ;
NCfZȥ)l};y:\(0R[$,/tzm-8{$h8xH%݁3ʭ"}mtv&YӒִU*9Gp+HgP즊1A~k|ǰΥ2[V
bi'3	oӵ9;vWqTՕNJ6kzbJzܕu?x.*By	VXeePi	`b?HX-^aiszG=_ͽ^L0Y7)Y_'?<Ǘj"pw,3?:XMG}܉R;~6GLg
RJN&
ydr`aV}KhA‡F^_s
rg蕌>Dӫp

g"qWZ2,;vf(1w(km[>iį{z-*0p}[M@ _uŇN&MG9׳
i}A[VCDeYUWNMlسb%Ս=ڶ	N*C:/thX@\-CAX+n.T.%q*q83`?P4@ۼ+ĂVA^E~ԛ^;ebIa¹ۜFuyix97`:SguZ(
dMriM1)[РHDcPL=<:,BF6ߩF;)^EMKN[م~@8Ub\^PmP>>0P*P}}EڽDfڃlL]39N'SM7Tތk͌;=iN߄YP'ނW3^kOB3o(ňSQ,qLZnv{;K1CaϦ4^@x[, \Apn|0i"kKcu!=]MӌYG֋T4x1P{
ҵdbg<-q]S0>
oSܡn޿DagyX{\%q*QOV~)+da	RZk
.ZL9R!>3R&X6w,
tUi+˾o4p[#qgy>LdY	Xz	jɕF%ՍUVu'u~ۣBD(`F]}3aSsY
OB|J&AձI9#ԣR]
tMc^IzN0;>JI2 M]P16`O4=_Lo9`)+2d[ÁG{;|P>L)NJr*K9ԛͭ:*	y%vUκ	-%,d2O5
{Ėx]7/Pd"͓ճdSOB/s,)j5YФC|
	\d®zF4$$^0o_9,F3y%
jEQfҏwg"lW~
|V6\ytR۹u'E~%7`Fj>T[l/TpqǴeQʟN+&]#c(@@4:e]y¥'w.ΎTE骕z6g%%*ld;P
*F]Ze3
lbII2Ϻ
N?P`&స^z,	˂p{:,"iWI0x'
B*;%?Xۍ>#D[kN<u0~חPZ5e7qU匿F`ݼr/_Zw`̞U֓.%VĮ_qz5)QIX/@վ[dhsDqHopQayzYjEgI
׽
[jS..ܤhrL`7~hD<)-'Rɝޓ
wkU;:	fm&3'w>R)9  Dkveqk~Ƌ;JJ~~^48V<@Ua:ڷ}~/Ǭ,{`gХ֥L㦩7<ň6!jX(5s"QCo[fuIryld
%86wU$Tq_yP3Ž˾Z댡_޼kD~t9sϳJany*X"yUvdcP:2w$	ށ	8,bL6-@[%Uu
fv"C7z
s;bԝa]j\b}V閬(V|'.W0G2M5y_ySLX_uuuSH>7B4ixdӴЮ7z4FR5!_BBoez{3yo3>7ttxc6@5r%%g)}ޅ#u{n%ٞyyS v9IPL]YVL&Pr?5b}6Kz;Ā36g
xtJ5Nzo<-^Ta{
%0̪**ڵ6>3&
(:/!jB6d
=(t[8_TuUH)PwN?;'ifX
x֊d(p}ԋ	q5w47g1t,Ѵ^|ӪzX%.Ukӥ685(묭%XV[ߨ&cr_~v٠kPۃ|5Ǝo69yQg\V=C$1y}lF4Dܸ
0wLw:emyMI3p>&AV8+
׽(	stts 
stss@stscstsz qHCNzZRbec(
,xmo0stco
2+e-CQfreefreePmdatwidemdat%Q@N
Kܡ:ԛ'% !p8E~N܂-}OZL3@ʣpX-!XAͽc!dglu*B:}@=`[Sgv߾"uOu /zX:&
AW	*KNAf|e~CCGn~A"=-+O5*߯7Gg_[@?b3s^Q2#Mmx35H[Bܰ0%QtfƷ*ĢGIK|s`-^!4#z|1}l5;(-@c¶t{
<`-OqA_h>.cÜF;>G9+
:ܓH0\&ɓ!(i`Nsd}͢ޡmi9i;}
lImN-Kh$IOX5kCm/<>௩/TX@ԓ4O//vr10E"6{3^g/kZbG3#rӹ6o~RǕz4JמdE
?#L#=OqEh)U-c59|2N"~'P#oԀܵ^϶sz;>]	jۭԛ*i#7`XI|dHw_Zviu}j?ժEboS6|j:u}1O9I̅ŗJA#j0s
|ԉa@;U|zi&JCU}0FNf5cĩ9*Hѯh I"/mfsZ$6Sr|M}lY?Q#uhoPe'_,E)݌Roe9hґ_?|yF]&Ic9#>o#6lj&43OoRU
~%7G{f\1NZ'}8˜~1nD(aYQ@=R_=9h<@MVy!bŸ-pC;TrCilD[/Z~d
nZ+,]X7Oҍg%'f~'.~4c5SiM~)vv{?LX韄  2y>-=id@lOpR7Ao|z٥gh[B8c%_>_<֧b>8
(˞1˷O+2͛Cw"-!^v-;l`ulͫo❹}7ǿ5y, 5~?
+

~ ߩ9no;Y!
܏|ٷ8@ƙwP{ww[FZ2G[mc!{IN
Kܡ:ԛ!p!A@_+XK԰9mKx9mKԷ9mKԷ԰9mK԰9mKx9mKԷ9mKԷ԰9mK԰9mKx9mKԷ9mKԷ԰9mK԰9mKx9mKԷ9mKԷ԰9mK԰9mKx9zN
Kܡ:ԛ!	?9RKˤK.%O
!D_N
Kܡ:ԛD!
%;'Fu5`M#*4''|THT+&HĜF!G2S9/@1yAƥ-~:C(DNITj'FWĶ
X3!&P~k6O(s_Myr5Q|";okGb ;\kkZеִ-hZZеV-h~-֏<w "r	8C?@v"pq[N
Kܡ:ԛ!Z"e?5-TH.[kj_DemRb3ZkZ-}KU-H~X9m`uAkCmkW!`1	DJ?)	̾1S1Sg==t|d|D|F0|EٴY?(:Cd0ϒd)0ٞ.lD"ET\.8!CaU~DkSH=EkWh-"c-6>ˌ$Z_LF\Eh7`rE+1ܗ1ܐ; $?+B֏#eOs֏H:H0w$4^TS
T5
@5ewhZw&,DB$%NEB'(<9cj
(TTic_;0򈌳>ɍj(i
=>_Laj2rcjQXPÇP_`{/MAcD<ͨpk-C_
ق|8{XDCPRs1Ňk4ϯN
Kܡ:ԛM!%*%̓IS?›_P0
!)
1SS5-(P~e]K{7
]_fO}"+*_
׍C_ij
CDuE\Dv#-8שQ/1>xw$b"MZD\	Z׶|JB@z&xmKT
F@ICRduX.$- ]$EpSr%3[ւ{$4ʡI*e="m=_!A`XeA1̥T;j\:TIqYrr1="[lJZTCa #
2Dj
6Z:}GHw&%A)jU~+_@SP;:xs\w$ysN.!{O5^rA8bRG%Züm^/5=%f%5OI4#^-;/&q"~Syu3ưGjU@,<3$E@P.07"i/{ppEX׽݊@^X×1wA8{*Y[DFHoSRV_Ĉ>LC2QS~$Dnz/]JqN
Kܡ:ԛD!-	d0/|Cd"QF-CTe#xKv
xCZW,I?Oɩ%!B>O	5ʌKlJ*1PrT;e1Py	I}KVj'_כb
-~Q8f:|ju#%׵dOj:qHbxZdK%DQa.5{ʫN
Kܡ:ԛg!1Lr }N3RjZ%}56Sc5,th>~e_??rHLbNE^NԶ@c\p!FEe2~r.ĿAR{Vk|_/$EȽØy$1@P/"P-^7/$E/{NH^b_N
Kܡ:ԛ!52]{`^!bX>"eᰓ(Ke%,	0-PM˃)+:]VGUk(BZ|"/66.$''-:՞kgSmj%-bUIɆt~(}F%!MB5~ Hb_먗ʁ2'b%#?" qjRh1P5.I~.%JiiLO(wvH3
,DAߟ
h2%%O_t6Y=)Ag\Mw7LI<4I>Y2o*
-j:֮%'&-щ1%^E!A1օыiuIeX1`K.
`1QU5TUO'|i%ˋKk!)JrC'\U8|˃s.OQK>~H2fTϿԸdC@9w,>ФrY sQ$?@ ,?$樽,hNJox6[0e?$˰ox`}7()eQ!?MŃa&|9a>^\]`wGlcoh=O8/?j8z_}Z-g[AVj7Vct
UOfC?XNcP7zM/`weMj{VåK$BD%²ڋXeح]1mC{jkIkڃ|%HvBK/d
y$QRQI$ULq'	)	/3%F\=&APf2ҤbٴK5o&2.#62>>Н;h
Z#jSQ؏욬kRe#Q؏2_P5Xc_q}"?QYbF+ўI(LF."e\[CH!ju\F7jAD%J	nBL$ccя(eμdāM4A~Sc.j}ᖞ$$DgjHN@#mX5fFYC5eI-.r#Ѵee%2`ơ5!AI?M~_Fb؉HF"qbl=B^DyW^/Yu,RtO1h	xAӨpl$L0p~-f"b'IO6.y>%r@Ę6|Jql/jԞ*(
`yU|v/.}}䇚yuY-s_" +qݽR͌2ґu_Dc@kBaC\%@͜I0N
Kܡ:ԛ!EqPc
T5A
N6[	0QP%ECU
z
CSe3X-CU
CR	J*`A&U4r>(sćZ	Ol"F l#-#
nB5.aPIDj!QC5$U#XJ6d`jxrR[>-SFc!O{C,*
k^Al@Jyv$izrbWbؓx;bWbAf"zy>"O~- @{wb+cw刀N
Kܡ:ԛ!I
Cxʯ_%U_,=-OUk^ U|MV7=oU|wǭo"j0-a|Xz-Y{[V)79__1Inx?-6
܏u<F#!C΁B0,٠dT,.a1DBD\Q}
3(3mt
kK*YTxr==ZqdHqYur$y:C<!@|g"31˳!Rȳ`iw~I,>H>/4(h,[8_x#!$Ib$0sMg2>"9<~
1wIZfv^6|XN!HnyJ.!Rwur%t-NsrsUqZy;%/[pd>ߩ`
~q,wڋW̔nl7K?@;e)f7](;^(!@3SE(ԠDQmJ;X1}xg>vGxm?w*g#O^UN
Kܡ:ԛ!Q!a!Ͱl~&5imAJE
>?Kq׆W2KdjB8P8H,$MeE>#MghMX],JJprKf( {%SϒAȰ|)jZe.3Z
Z1q}ym"?#I/)p>cD#
KZfYTZʃ4Sx~2T#-z
.c[2)Vo4K/!Al^b3ɪx0w%y!ܑi7<DG$L59$^R}"fUmކ0ErAe-"?#zYb-"?"bR@Ac*Kp"-\w$J]:
TzOAqH0!l$"Kf3_)!!X35:K׋fjS5r\D=h"G
ioQᚖM,^rN
Kܡ:ԛ!U㾮j븈_9*f",/fO,DqOZN?_&-feD
<D'?HJHܓMa(g{(5ڎ.e×ő;m;IxJ4:R/-C{0j5NI'_D?b!pi
PR.\G&rxV籶2hh|,:~2f kZꜬ.zH2X4vd-Z{h2\#jGm>xFUsehkJ}>TҥiLN
Kܡ:ԛ!YCCBekW|K]x.т
c8
h(ʝ7N1SI#ߦM'ynG񝪼a`Fku_^k5"@8h^u
 w['Sa5[-{&aL5\G] g3;4(xt33Yb
A2+T܇n
ی`0Ņt|
KF;*5=C_ÂRR	H59H%  ^#RR	H59H%  ^#RR	H>#DhdLK RX?$p~K 11K"N$.$O 0Ѽ
4o !8/Ǚqғ.Fu^&mEY!hx?2)<
:k_Z59Vh*+.ek.euĈddK@pO#{FX69*ANȯD'7X1`3*
sQ7wjh:~D6?BH8R?9!ndA6flA9[
vf r\{5mU~>QڔvkBkێB[	["Jo~#caD1N_[_N
Kܡ:ԛ!]|lwNߘ6~ܫ3ѱr7kp;~"[@zh]ΛG+^arȇ}EqJ۵d,ڇii	>Sa;7y+[~EMqLmྰD(&oz4Oy$ut<@2;_*7#:`o~`QN<WC%b;i+X͟?׶/k-䗵y=lp[ ^k~^b9mo) p 1eޟ]r2>Cki@!@%KqÎ.Ȣ-0^rDnQSe3/_
\Aqlv d7߯I5~R۳_jL8\GբjGs-;_jjj>6hѾ2&~xhL6Y~÷,7-xjco-ק`!@m7g@A3K\G\tDВin\е3f~2Λ6ady4af\1)")"5P<I	IbR@Wlf͘cW^E+2ή"߈u'0݈sΛh"K7J=`7s=x|Iimc*(6TI9)LfGݐsN\#K_ᰀSKizHR~F[z"[_Q~ՐNb`Dw#OIKV~oFIM@w2"a捑1ɚwH10|7
gq~Tƪ~"@HGHR|F#1%}|qBעoV{^qxPl1o#hyQ/d[KoOT:yw7,er
ӓ\)pFۢ9&O*[#ofSd$o."D6޽6)iu?4yy4CD=>hF_X1?6:†~n֨&MN)m6dgE2{[3ud74S:ߨ]|*FsPpT{u^m"#FCPRuD+g8UM_"#[Z#fgo4z@14`K4{(]}} H%L[J<ܽĜftN 3;V^ƛ^_{y4b{Z%4{AV0fqF!JJ|y}sw
4MB3
DxBi{#’~F\إ);{&:apY9E=	iMx.ZSHcUw!{i#Ck9ΨQt9um;$ml53?O@AX`aݣ_kP*u;p0E]%Kf9/H-oL-OH-oL-AZ
[Fh,SWSMl|׃}_}=z*FgƂ*$'LvN ڶ8ϰfG~FRCH7<:c'ĻDc귛L?CIdQ3m('fZnxߒQg,z.sQ6߼nޜڞ:ZR@9#.JU^HLM/{'y'^ߗY!Mt
H+]],_=a^-cŦfI"/tO_PLy2)[i8Q*X/@nW@Ӷa_^	g^Z0rN3/
2} gG#ỴVsiXU_2.	UձI}(L}ƎV{48^?Ԫ<wMYb]A{ಯ9ٶAߔxwMo2HsP_/-ioijoM{

wې6@o۾t%c,R$<`Q^ZSI4}l aj0Rxe.*#,vCr"/,K0
._5^AArv11-TNL@7][p|vقG*v
&N?%?qA׃:q}haɜg"֮a7tp'MtMq@Cy?`7ʖh39\޷
VI֫,'Ń#n%_M@FkM4qCOQJKdZv7??B҉"
 	)	xVfP {mTNCR~Zuv`#_JZSiֿV^ot6ާ?o7aARB?M4uӲȳ|~	4iOg2!^N
Kܡ:ԛ!#~E7"?` E׌x3dD;lRde[c1k84;*!)2
DT-깸R`ܒIrPRڼ6ߌ-;NF%W|J;X<ڇKj'&Ĥ"C*3E:bR^2q&TԴ@pv䊻Zŗ;Q?&KKdt,}e>2IX>{,aKX3X1\QF!A@Gi(j`h~Fu"cߌj,-RG[uȔmYA=60fX.rscGQ,Ď`8^25g.t.O q-hĊq!ؗl`jT8C$C%c9#R,rG$T`_co5XĿ"e59#TSP⦀⦏*Az$rM$hy/$rQ"eHk|[:rG$$rT2<9$L$rHdL9#2$rJj@R[r_H2l@N
Kܡ:ԛ!	#y.E.1$QZE+֍G}iEHE
/KmLmm+eAkZ8#x:
N$\䙼3r^&L=-:c|`d#6F\c!l8d$eF$"7RqT2Y0!A?i)=Jڰ(aFԵXEyʹ"R%#
q\Tq(XN$^kYcIĖ4888x},hq-.[b_-`Cqؓ~2n`1Z|q%,\[
[}`մlj22mž`8qܒ_V
5.h`1:6\D:A~2մ4i%mo~VK&[.*
ן
6H/9!@ab&#!fTlC@Y_N(?"ͯ2ommoo--oo--oo--/$.qdzг*&Y2s9#yh?9l4,@PEX?sRQpwH4f`1v!q_1~>i>adeZXc/6
ke̚
AE4h($1xN
Kܡ:ԛ!#i!@AH..\k?N03Mq,|h3!9#SlU5)L,Ȩ6B1-}?mJEdOaMڙ0·3a.Qjj\ȃl?0N3Q*GJ& uBԼ7ok##0h`1Yd4RP7I7#E^`x;;߈jQGjpwwpc(rԣ9$!%I^0˂N-MPZH7$MJtYeI(eexU.'%% 5jPCXeG]Xn C.iRV^r`zyTx\`Y#iE>|3Y5EGOYI$I>	i$I$GOI$G$>YKqa8JRCxp궡'h/}F%tkmyA\*vt8&_bĿigjkQq!֊teN	Q uHDGQ uHDGQ uHDGQ uHDGQ uHDGQ uHDGQ uHDh?`<5:]e)V6"tm=B0_]Ԗ&+9jkO@^DH?k3y?jm
~6cv#AȶUʝu_{Ii>Yڷ
K/ΈS 
1)$2K
mL~%GE@xR|e*T!@gaAQ@pI@:I:46GAH)L	"h`M0'
!+ÐwY(!2rg92rgH-LD~D~GDp~DDp~DDp~DDp~DDp~DDpt`sm뚼@\vpEr
/Pw.R

FJ(o%F\ρ1j0E74(4ib@*7 pck6|
.!lPԗiOlqv/֍.u,kZ?ўhP9D=1^?5RWPLF|sg}vyyytt4O1ѷ%tr`Bj0|k~=I0l:zUstp`⵮
LGӃ8q+6
{<|l>~ŋGO}ǐ*De]qDa8@,ʢZ6(1>ѣG/$v*eU7v8D?s\qQ*~Uḳ+Ƒdz_!~w4=99ȵk?̮MpO;yi]o m8VwF\w[x5iƘ4m'	_nq9eH-N@"!L!$d[=klngkZ4Ga`ωUj4U=ޝ3&ՊKeuU?;<}ǻMd0q΋2 i&kJ@b4ti?s?p<^.i2!w$AX8kZ?d@!U +&Ib;aC>n[tzVl֭V-{Tƪy_a!Z$>
Jt29???;=Hܹl:aL7W~t!N%;"H.D?wE^6YNcI~/.I) 1,ųg}4qPո+r|նw_op8=8.h|}Wk5P1&	L]K!&i1_
,(JwI@BK,d(iƫ\SCJFpej2B<
-_Tesuoݻ
2h(Mf^Kח^$۬Ã)cHH'‡d4͗W
m~ ^)Q	ͯ v! ̢A6<==LvŚKc'Ljg0x|eB
KєjvS<9z__>t\$`||zvo%yEȣƚn@ʮDĕȇ<@qjHmGE(D:r鶔+gsлi+(3r~՝ztz&XK N_L0~R5秭g -o2^_̀0\XN5ݤZӢmZ,(ɒnVnwt8
,_ӂ8Ӧ8PVX ߀Ƅ' h8C?i@CVLٶEe8x
wIq*[a&)ބ>KkYEQ%LFcLB^[b^95~q.b11eR)& J%Of35~yC D4mn#v6;_w[ÇG;#t@Tp4⭷ނWaZ."é2+aQ0~lX4pݮ)GW8!h;'~b̹%Ob+&5-tleaq]U(ˆޜx.~Ѳt|xHXK0%x֬lxRz<9č8	=op`Yr0
!WL&CbYK^c_ǸiE`qPrX԰PfHgx@Lljl
w<"xUY~'23`(K8,k͵2h!ʇ	(bwyqU;^Np(etBpZ :=9|p2mIay-صٴ+=ul80y)0[-7PŸf R8N ch
a2Z?p;=>ƒ1&@"0h2ĵ 
S.SQp_tA0g9řh wld!OBi-F$L~񧺡f(o1-xrrla"e3	񸸸x.ݻw!X8tf	H
aU&yl@"L.9
%<81Yt*l] OxnrTiyT1}4aK1,Ԗ/HXP^~0!n̫E|ݸw)C|\Cg=th2!clCÃ)NbA)`Ql007XŚ"D1]t3ⴣAo sAO#H\]÷xLCy2oo^\\^\^0X&%.DL6lx489g?Ĉ}?d쳇=CiQTǣఫKhrB"Բ
`8)Mڪxt~rypW'	kvoHYwagz|ۆBNҳ{W՟7w?uX!,͒lyj43$xbqt} )/ajq~vY	;xbMd>YC[`	CFP[oA`c=ci[>ã	"Lb¬_~zS|ù/?ٌІ"~1BMxd
d(EԅsM-("X8 Iv?Mp~B IODk#e:ØKPeq|YXGI
/Ϳ;
'feQ(N
cx]\
yŇ0xĈ+7"4#c&*onb2⚮:>D~M%)D˜bYg	2
0
`į;(l2`.a{	C7
)r5QUl("?;;=}w	UBUƍo=Ldr ZLO:o	9O#/m;(\oc4%(&gUZ
i	+JnX*b@4+@Sg|Xg"RVWWi@L0{D=r89>O>(
㖰Ƅ2M>kL6$8ll2z(q1 >>>6i3E` OIrUuF̧'X1K̓{O?{cԡ_
hF*x3l£h"wo+E6R0qxw\׍2no&aoZ28>_}(V+Bۿ+?'k{wɷycK}582ӈel$j)^|SZxC*ڦV44䒇_
.8yJ}0H,_Ҳ=lR?FʶiK|#qbNi888:|ŋ+hE޿>I@IQ̮Cu^ode>/^bNl23qƣsCOyMFL4=4Y8=?\q'>w>4ծ\
?{P0]SR%k-:χ5M*ZbY4$釯Aלupl8%){P%2Y5P#:
#p9Bi+%j>硖ʲf1fEakZ`t!4
!>0ܻJ2yDxvo6;Beݐa}Bn,bLf+d92f
dTO]h8WWLzw|ɭ`ۯxAkHt<._\Wr	%>|dZ_ek. 0qvzhzħ"?|-^u)*OOhp!T&Pl1_DD,jx*8ӇO;ˋv{ptHRWX9I@eiC1fcX< lD	܆(yo2}C̹jKJdãBb
E	Je$Iq3W7hN-=ಆfj맅fbECB~׀'U1bb+fVKIn``5]_3w.2"j*!kn
,SZ-
~}=Wb[LP[!?SUkM)Nʄ{.hZU
E\zVEAdY2c.1l4JW2O&\B5ean7ŨBO`跛5
Û_zs<`ce޻UFG -n|q--ǂ5_)*S<{	,Jzn˩fbZE%RڛHaŕ}yA	
d&.
MW)@$U+8o36‰1iMGbn
lWk.	,qn{R|X[3X|xpHݯړJAԘKLiȇLE1O9$Yi/[
ݭ&jFl
VL?{/;_-WL7U
ȿD=D )I|`.8Krǣã#H&.hjJW92dvl]˱454#xzp@^
siY˪l2Ø/l(A<$d&YT3< U#S,mB*1m#2jRnWF7ħ#/zMhK`璌|g^aa85Rx{$٘I?Wj´@E4`.(+YДt6~nD㕗>ּUijB
ppzǿ?g֋%FAjGG]2
lKzBQXRBfR9ԚQlY<$$$&3Y0c6Hr#ރCcH)[I@fnlꙮE\'F2GME~u[Zv
;9;;d)zdac<1\CPf;4U ,!e01NZv<~Rs/pWkݫƎPKEfqT!3ͷ{obD1nz}3":)b1Xv*x$iIQR*AK{|{08?eȪP2\Q")*FOQfva+R %V_..<ctaI"&*o
ˬ!X
b6Zm${
kf4lMŋx$xov5czU	ςT'
/GT""1xXrEI<Gǚͮ"3&Xbz YB8^HooZ~4l_9uԎ*
b5L_!,ė|뭷q
\.	K2$<$#;d[U$t#f?onJa8>;= exǨpq){`@AC֖a,hC˺0CR}l~w݃Nqv{r|hoN4"j6f^17=~LNS9@g`Y2[*4g1Ha5-|23P0$ՖOŇ0oȴԔǯL(@Ha޳tH2
˘Pb4M"l7&
4
!CSq IDATYŸ
CxvvK@slު/X1жS=M&|>[]l6k2ˉ:p|zzg裏x\BK8~⋾L՘m9YC
`٪օD*B_|TZ5Kph8??Ɵdߦl"H9u[v{rY\[3InL7ەJ*}(1DEIgrk&^XaijI6gqQ-ONp8`=yV0l60,Qx@z怖X/8a69VΚLGWWPlG8d\tbgKA/5²%jofKWWm98XJ	=?0ԛ1iy$i2D	}$)z^^n1Ն5秄PxJh0ʵjh}!(oӯR  e;N֋9qzZK៩%hօ4+1`]cs7^*/ptS"bϠDKƇ/yU3@lrRi"؀@)%m`a=ˆ	t~amzlHOKFD|$h-&P\0FhxԴn[mWӃ1)b[ONȟ_^^{
U~9|f#Q?w$gXIlt^,6ڕTV}ɎE/1LqWbvK-a',;.T'|%~=o.J& SGe]c@D,h+sY)+h*:D	r˔1ZTψTCiK;
~)X3SvU,7"ĨyYh-AM*"58{[e`1r!}sJsi)iydQ(˄8f^Gj{+_3	Czj>0@j0>ʯ@=|W>!kIppy1Bpy6rU4]o
*?8q€|A*: 'O~h5_[g''CU1
(
h0xTIХ,UY,SPfKRЈÐD3x4!l2q0>D-X`#t	.`+b3u 9Ez4I±F߂TTqeǢxr'1dnA$3ho˂(!+::v⥁Bf͸QCnv^NA>{T_ZT`f	9H2fv"fT17MK^śCSz{;y'$M8'"
XBqea3ar Ue(L!Dd:R[,:' 49| HaRY)^4;"5yj<ȐbWJg7
Xm
+(Mڲp%Qښ(/I`0`Ɇ>X/Il{`x0p#bJX:V2>Ē%\oj%HxăÉ'\1Q a|G^
;K]5(d!X+*wEElQw
Ut,`ψ@a ?A;GnWJ߄(-,&M}d
j}}=8x<_($VV"]@/lq81VŋrU@%œD¢LJa!(;sIRHyK
+'EQ (b%R?wbFT
C	Qm$aUPf|IB,vi2f
ᄕ p}3tRNQE}Kƃn]芺<8$0&q{Y;>}$pϷr2=|zuzzz49msYxZ ĢB%5&u0@G܇JOURinЩՙE;^TS)vVe*,	WlOx̺XA@/TTUc՝;`vddO"Xb]'7FX
g=Pt`X2Yח/|חD9		
-zj~,)gc20#8e]*REkXbP6zPYn6Ў8rYMXїk,3JMU3iT\v޲U6uPքqt2Xϗt@}ipGq{@j|J(\R CѭLM>Wx?ۻA;@w.w)	/>{Q0I0&#zY2To
+=Gjg-
5pLq(>fW;VD'C=h~vk(Z/i}p+
E&D\0[G	uerP?Xjψ3`nv\]]e.lcgVpriG4
gA, x8t]!]\\X)Z' ɝ-z+ޯ7>{u#OI/A@r88	S71e,!ˍ	KuWBW-|"[nѪ패Wճ˫Ǐ>OhX
l51Q(9u#JJ
<zphOqjZ	d=Z޵cҬ)=o03Jp6zaS,w]'Y siYBHW7֖hqi?TE&d
1\L?Qa0nPزw&d
+
U+,C+=$TyVMm;$Tu'lcDF PyW^]Φ삄D4a`Pڋ^DNGg,JmL!0>2p}&Ko	lݬ /T~)u+q]ΙЍk*5r]b AF'GPو)ba=4NBܑQu0bȓ1𥫪"sLuצF'\@w26q3SBEc,ͅSmv[Mbmq]nФY6 
&4&mKSŋAN,ms#I
{gdeޔDSb[+HIzﺼ/=xvA\=0U4yY}KJ!
ۖ],O0pm̿cC	lL~X?ôȷ'2]BBgrx)+	#'gcXfĥj4oUFHu"rD YTΛ(d	-ʵݰzW7eoқ>@K(apupnJUUl'$w9Z-DAY%Bf`d^WjF}FCȲV
"pr2Ո1JԦFG
E[yB[[=I
1a32egYǟ]n~eF1(cL'^9wKhłd(g5GSXn5F;$F'de:{ڭ)KA.<,8u3fJ?>I[uhev98 р}l
70!ϟ=2Lkת$hl#Ʒ2B[EZң:f%+(dci,>;Asa:aⰎS›
Ve:¦9U}ۃ¥
EKw-R]ʼnח6
sySd2&IY25uF50DOZ/0]ƤCxtZqIf
|b ToϯW7xL6AF8/&Z2e^cN܎_W|u[ᷥ=j&cL;W+ȭ[rG|3Z6#ezI0_fҩ./cMR[
ؚ>۸fn`/j
UWTHYPEIӫyTV7*˜E%4481a@>jb*sq֥ȜYΦ\&R1.;(LʺUFEs?8f",1EńJp$7w3U/+1ҟn7DN|֌lۢ޲Os}3EwvW?Xc?8m'M0BX}40W;;fSG 5ަ-VY}5XWoZ.ߥ&lY=X~6=V=(X(݇jx0hږ-7@vXgߊ4e$wP/jIӚP$p8J#;v/oԦ`aR	pmm&YxIF``<%O`ǿg
xWlֹ&i7OkQvŋlEaJ}A(07S
k|~ݬH*(aP_~Cƃ7&	-,3+Fpn"Bo=Jr?.$&c(*̔V^ӄɾEGPS#u(?qMU!ͅ+%Y
<#pwKYѝ0\n`6;7zp?yh5Pj#Ra
{їx:bE/˭γT6(l'M$7Yل0)	Y8J˴޷zhvDEAA;d֭bsڐ'rs 3ђI0#lvZ3nC5
GɣQO`Q%0knj3z`l)J>1υtW>
{kQJ#šNOSe0n\"Dtim|ܽݰ
SVZ͈y``3Ŷfɚvsi?0,)7A9~-E&IK%4ȺTI*؊snFX3ZbTa>0}q]g3Oş3md26O O'&).Eiz4!6`N]mQyPSPkfX@!4vƘހ\q-&I)cf6 IDAT,MwVN~.l%jogb[CR
wuUFgVNU]p<Ϡҝ-dzP`\=rH'KP	/5>SM&f7C>&Sص,F1Z]&kj'KV6}u<};+uc~,}<vS
*hHYΩ!BXQd]\\M&O#ݿ&{\pŔDGt{ jvK"wњ.O,uK`bݾW4»\uAȭ
XhrMy0@!1OF>_FҼn{_#SN2Ĵw	z(vJcBO͠ŶX2uB:zv5#Z
L	Bջ2׽S<ߙˆSlb*|MGW+,chuV4n29;u!
Hjc6;!MJIާTՊ}i,BXL#iC?[fѨ]#&Ɠ"ޓ-.a9(!~5-IPyJ+o؀JjIHbDN%-ڔ'9(u#bV&`Ȗ>9&O$4c\,@b0{/PȀV) zݞZ =Fp)&qdX\tVزXFi3:kWd˓u_<7HnC~~H6ڡ0paZfaʱ;suɪsÀ_SnO%bfGNv}.BRęʶ(k7Q4~ܬ,c]
ˆrAoeoIi|
!)+!r[ydéĤZXz'VHJR3LfYr >cgKB	þeCWҨsF$.II߻1.-4aFZ]^u;MFCuf5pnxwQj!*P1
"/	ʾuQ߶c@3zP*x=7U!v(5QҤ1ޟm_f+<8>l5{=&mvMmJch1YڱSי\};CHDܰݧB="`܈ue$ޜ3RU0u`N	q#֛CeawCR2QnxbT
Q4OZ<{J\
&v
p$tcٳl9-FxAB:e(˩U-^K?W<@'P+Q{x^4֭u=@IN='7]Gl\%)&onbIo~-,~
g}68Y,%EZuzxgFJMԑGח7Bd+omC0c6Y31XHEq=gu5ȄCh2ۍd4ZŚ$D˫8*6ޞ`}M
4^شL~KN:-5n+a[_DcoUFTjױT2?9R7JLwÂCօɋiDP	QY҈;`	Vi[+~WQq,~
(^[p`aEwV݈"&xLv:)pW[.zRbzǃM`jo'bG)wÈtm^rr89:9	.xx}2lt٘Yoe[*+Ţmga]oP{d^J^ѸVZݟq}sXǻH);V#%ܒBo
srr3X_*
=d_ªP_|!*x^\\d*iSvn3=$+~mݞeZ9ml$4킢2.xВgv=U!gV`bDssxxݐU7{߷y/nC::(M?")tg>t1+/X@#7}$[71}qJ$pXxZ8]P}ԫRym[>4.&x>'ِ,8O5._VV@yawr/Ө;@2wQEό,jKvlk3&(7Φl<꾵981VlQYA c	Rm؎
5Lm/:y2xZ10d]>pq3wI|{yv,65@f!SxSχ1#I |Gv5 iI*jqvqHM2CU;/@J؇Cw0K*Z|okڮݿl;[X9V҃øxOT ܾLƽce!f	5I&JBy8lL{Uc@*D^J=V)
vKf^]xz$n:j6j
',2ZMEl
Phe4):duwz2};\x[uhv"pĖe3#"؊r}9i6Z,*TD7b\ZKe%
J<Ъ78o^RS>5,x(ihٰz}heǒέ$MZi:bVOs*˓Vc֛MYնa9܍U݀TE^h<tXsyGZ~wp42V|4!l
J3q</)7%ig8/Ux2꨹
J++aO8'
2>qz};:r?cz@&KBx֒؅My{V^[^߲
Sۅ*F+xSgTLw%UZB-Θ{*M&I^خS}]-7؈!}!ńLS4~QSfU?wRU狅yy~sAuǢV՞]J+I'VStoj4Zh¶%̡}K>l{IMfvOvdžUmu㯳'E_7l.Hd1r	M8әFƀKMlM[ױntn;q\BkYC*|㢆gzrE'HVkd~:mb-RoJ6՟;k	
q|$A.2gk)R՞[7ӳg>&r[akupwPyp1J~Rcp3v"os`|+_rZfO?]\/(lVkߩʷ(M%(XngIYT"7"TC,hO:^VbpyiiP
b͕kJ~MiكVkח
׷Kڒ}A^#}=PC۾{듛;!!.ۦg8T0}l?V7A'7&o&z0I1$awp8~Iϭ/ڦخuEI*,۴B"S7$|̜O2Ԛ
:
GHqм@]K+4`
XHJѵ8cYYkHR0wD4l鰖QւR:춄\:-}j:Dqd'4?̜vai,dBS;eb\$wpc\CnKAz]Cz<l+_>zvo|[
pMxdz)pʡ[+LXDµ[(-阻#yY@|Q:,SPs8q,+.r]JݔTr@{_aۅ~,HԦ$ûE9uǞ/fQۦ>3kW=o\>?zhLCNv!c,SCH'mXKPQkpl&AU}IG-rj@*yrp+VܥJi%=^gR/YO|we
DUn	nϦw
ƬwT3]nvĴ| 9
8DJY~&%6a59bgr,p1n5poyWnO:&ސJ6qa|mr^,g!4(ݣgkgwꡤ0AĆ-fSFf[zkSN}Ѣ!l۷ڣ:s%,^YW+Bnc=	z	xk6MޖvкE8ela_o|QPI-35cwZւwT4R@QވCEMc}}ǰb>ɉbb:?]G(a̸˔Q4
#5ۖ
E1E;6+ޢKv^%vk.kb<$d؅-6Bd}b+

oގƝJ7QzPSybIeIi2~O[IAHfC;ns*3K[Ksu,DŽΛPEItwRWlSI[O>_kK泡D[u+!ٳ?{i9K
wǭb}fS+z-0P7ܯ'|l?|༂hE*Y,~]}t;ؽOwsQfȫhti3fP5Elӂнa3, VŰox
mn2FE{mĦ{*4F|T}37%0ˌ!pgF@OF
p
1R5קz/\,fimީ%l6EkF$	uߘ5ݪ/7,'"^.LC,ҐR?"!X&lF4{Ywd3j?|quI渟%mCY`u)0Բ1t?jOsRxpxVM*YF`DHŜqۓӭPӨ]D%Xr]v{2n8J@C
F#x>xX	cU'6jont)1?[mw	"$-ϙmtv-b{ɂDn\CVF@+om[/qO!yJ	L9B<~GȻ3*EcV=༗=VQADFm+󽧏
h4B!U0#;ngձ@M.W_o~aq]VۑOͯ_)oQ)zm_'
c<=q5-٧iۇ$-"l15Dl|`;\l4U-M3 :D]+ϓv
X~;46UwБj;&>	_yizFþٽo5F|DU׭ŋUgھq}kG(-{dYQso<w`޸g8jnÐFuWɰwuH-X3z&}Ë/
]?#^}mOIgt
&6]1Xm;uŪ'[f԰dxByj7VQho7~}boQv~WfpRAiyewi:[ak۷`Ū]0ۮ
zoƵv^}փ|ds"fvȪZùmGYCfz#Rļ":ʶ|H~g]Y`[X1^s}bɗ[J׊Ue
v9EѾr&Uc#N"RݨVzzYaF@[ӁE|E({4M{f}(fsY7Ag$b_)jO9Coe56GQagW
G|ҵS f憻9*>FCӧOM5sd2/uB7
+m"Ex-/*B#{uj˨8slUc6uq*BX妰]<7eDIR]آ*y'b=m$Mi$~y74Ϫžy:Sr2- Bƀ{e0,/~2l
P]CfUfg{ND5|j߰{\{rp&ms:m=k2w:(j/~Y@b$y\d핫̎t1*ݎ:@f&Lb+96%1@;hUHg%]?孔縇T^V`"X[7nBZc9kfU䩼{(+, IDAT
*?
3)zVwze\$V`v`>r|6B&[3md>.Rme@B'XzѢOayq@
)CHdJz-]'M-Ž&HطWcs`g$%{fZ(Q#դ+ԏrASe	jeƣ!RN^50|R=t`bH]ߒo|ⴰ,)zuK7Q8Zmz +Y6\[o+G{N,wnQ75+܊-sruYY&yx%(
tbŒ9mqug9$c-aX +AAO:&VYuGP)9J6=RnJӧBM-{;T:ǎDCl6;!:I'iX%صdAnBR33i nDZ	
.$E4DU9qpiڣ~׋V׬-D-_~ݹ\zAi*'&n"<}-櫶l\'ȲW"QylP3M4Kҭ_	8U |<cۦzoC$O	zB?+)BYaqSK~*6kF[I>j
:'TgETfva|]兟Pe*;WhaRU@\5tnj\pYMIHaΙ.O+K=8_(]O0bvCI)qHOG3&+۷n'zƏo%LfzIWPbb\Ю^cq"O (cQWkXE.?Qs1SL̢
%ef^)5A>
K.% 7aEc$maE1diT6S-22)܈ZL`,~LvȺW
8Z$f7~?UdfgS"|75q9~c 2q;V,JDv$c/5z\|Ib" Ti+Z*KCffONʀMsx,X٧SwIϟ=]Pzq~U_0ȅ?ehYDu1SjȨ+5Zz
tJ3O6M5T_62A(lچ_TIɱ
FުGa(y+8RV72vGng;d.W5
9UVm7yUk=c=[
b%Knֻ_b[;Vj;VJ^jYՇՆSoGNoa)7ŅxehgG5I"߿C_p8OOS]ΟpxTqR,t=ӭXo@jZTVJ7=놢Ql;`Yx8,Pdsܶk`,G$YaKVd ;5Dnj}h;3K.*v^ʂdrt}=ۛj)m-=<>
37޾}s8J8yE
aNwe~yn(x?Z҅tۚAWVE]8)̖+kI`+3lR=\ף jy=h.	gk^+z]qS9~7)8([,& k.bTƂO*5*l	Ľ#0HjNU]KWjԲԄk7gZjM[7ƾn]7M@7T)by9!0hg@ǻlgY?	%Y@qrt0|"KDj6Y|S+@x:f6`?FI+xY\l<o;ێeY=vx`NB0Q}VՠWUC2-]DH*z
B+2ֳ24lr5P0dTt9us
x!v`lDYH'y0pu% gۉC!ø5Hb=AliFvjU^Hr	*N˹Pr;,M)өՇKkEmC'.xSeYe&a$)0j=guҒ]xVcbGE9A
&GvFgV=<}4kmyRP,k}iqdp>)hH*[r	kx=y+̕[fU;IAvu@rge`puQP{
JIhBKEfo*
9ͭ^0ѐZ}gX}"r7b	L~&wv&Z87a|ƊOͷ1QοlYoMh;;;E
dW=bgOeԳdTއ
׸L|ADX7#@^3b:utqU~a6'U嵳ee[,%GӶcH\uqE6fۘlamg_y(kNaͽźՁŁJC$_抌qfXd1C^1,z2^y2PGw[b@)Dm/gݾ)x0H0-K48stfv~@ԜCԯ(i9tpjԁ8/Hܬ\nb<=4eޱZo44Fg֫l9֨`0M~& 71ah|h{`3??ӖRVd16h:SmDo]ګ[̂cG}/,r$Hii}=nۯ#{/ʔJĦzQJ`ٹYYm@8"Rk]ܲ/=yraW۵,vJ+
ML:Yx!5)aBj%zhI,\I9`)JvX'iqnhϬ~WmJmIKzrX~-
b?]lO58/D\DE\ mua7RW.ي԰ƷRW2JӐUm4?׺P9f%]|	o\xQʪ\Sq`~۶Bʴ3a&	6cjā!&$S`4jx#Nh^Zw8	Ow(M,4ލBPVEUB(J䧧]ګTTffUg4g
m5Ef>&$?{2a:_&a 3Dd9_WYKj}6!lZ.G<" 1&]'@13<;b)~PI
7;& ؄@Uҩ=t{4::o{h< /cX!ۦM9cU>?VVU]R	Uř7Fyrr‡5.m_e&|%zY[_Tna[fE	ŅADIZm坜힪ni3&](s<:#|LlcgGEЗq8BBݬ ˆaߡ8I߰첕d6lz:u(Ϙ
Wa碓d[bR%zW^g (${r,<7N+u&	.H)+L fNAT Qt`3A3H]T[6ۛKBh)v-wG4_^^!{;[y\.X'a|%up`Mvq>WCl
H*+Hw&IQպ'$	0n)#mȺj]~osNYk]4鐅Sv(/h1Hfg-R}(TO:5h,+
Eh(/c),a&:9'[Ada`_aAhˏ;oȺ[ҎqVl$SC?;gmPȽkic]^Yƒ3]^^ȝm6Ű.gs4)Zh~j]pOU'0fe//g~ށ]50[[C㷐 p\ݻ{ǧ'&4wo7"*&6ros~q:_Ζ4[i: -7Pϣbʲ?#BSqI+WCoa.ԨγI"R[\%CYQ)?9\Sc;mKu•!)<_nR.V}oǨ]]oR>DW^Wt@Lj̞=?-%̳:&k0=VYI,:鸯 /U4-,Lړa1}Uu~Zrj/x4y]zr>ٞ'~ۅN!ٷ
`ڜ(,5PW\pý]읉3i39vqaO%OOgō}xm:@U.7P|OuvoLcBs@%1l7uQnjaa(?D=ї!bNYb_+61V'u?3ыwWn߾{o'D8;fH7ygϞ	<,A,Uo
V24CP)]a_y:ؗ+g
gUfXq8PIg(co%!T^ԣZ}㟜=;b{:^^.
drG2KtcfѣW59Ħd1tNL7UG>Hy޽۷n+dip1H5絖:\;lv/ˋFRdEq{8PȵG$EMayb\JzN`5[
圓7$v#:vZ-.."_>w}+op`S K]yM5n7f]}|mHdR%^϶WaF}΢|߭.9gN}صE!liZ\-+.Hkw4o=}vtq|*$)VU.6>b=In$Z$ܺ57ND	H=doENb*qĽJd4&pi,'a`cH~|vI$撖-|\lEDqAqHu#A)bsZgRwrxK~oJY<{>=>d	˳РFq3N&RƼÛF2.2nDN)8+Œh>xgy;>
|xEl5p?}w;M[B
h8N@a#-<V>-_=&omE*uh(]kyJD{OxBOc΢̎ZiP!LuF]9f޾bQlV|O~?PνѩԒ,]|\/Η'rϟ=y0ݿ5Jbm3Hw۰etnCi#st>(HRVY\_*/$f&!E(ñK[#`q~qs@"ͼjπ
+X9rP)ڈ&$vेbp.nZdLr4ɓ6
xo}r~,n`W.=+tIX-#߫h+t։&Hս*JJxlEPͻAfTv;mV%+/{AI~yX2"*r)GQ<=[F:{nywb1qz.A]_+qه?y_ko{WvI,mF(Dp_EQb7T,h3N/6)k׳*`)otmOw10)RTf:޻iג&η}ZI>J(YrgQdyڑ24iadANHh 9z99=)b^Bɾ
"	HS5ݝ޹}K$Ͳptpxc:ݙL0UBL*UYT&+{jX<[=l53$sa /lf

K+p5ެ~
@\GSͥFNzؒ;UXf*K|b(KkμPY!7
 X,&өW_}mI?cҽ([X穸,Ŋ`Uj9lqO97ږjAG("0j%XVEDz]&(b9?8;?2fx
~[A֖BH#[
\U՚F28+csI'K}>z^W#0Oq8L&5bdsܽ=,@5
8>CD(``?:uS"S-
Wj7z..ߔ
-b)^1˓CXүtXt(DMh<a	?D#6ku~0]XTxKi\$ŦL7Vo޺x<m!tۤkMCET_!LRrU׭[wİN/䔛)J)lHO0k:O*y@ P@yLJIMG*43@1<ܼPNʤMm: /ꬫ(r۷"X8BN7e\Va,.f
v^hi>~$˔atMב[Yڭ"Ԥ4‚aJ28ʐvq6[vfUm$Q=Ey[AqG1J6*@
A8mw3Cdەͺ@Ae|-5ŋI2D}O! $IU,/bJNWXUͦh3)0{~./.s9
FCİ<.5cfO=
N߇rؠt˧<>=:wwW8{7ߜd"7e'ʂȔ+nV7\xQp^<+M&WQ$zAeeJAd@$ƕ/ \[o=zjt0t-o&i?яx4D5K9pɳ;e$SޖTEMJz &Y\3#V["GloI׫9Jp4-.٬% P 5a`Ĕ`V@TxgKq^zYL^lp[CʍJdo
X=뗃<ۺI$%9U`.P]H˕L~W_}4;?;PS:[R;=
|G,,!ݡ?爯+)Ҕ7f%_j6AY!so\MiBG(&QZՃ`4O"`Z6dhWݍdoj͗K^Po"UA,;\%'s&"V:
˾:i.Fx=͆ܪ
踥N(5dDFLTcF!y[%?	 FrsSB	^]^
R~Wp$9@6EGuT-ƽEAz]<(F7I;ny,QAtv~|qyIS9#1RRL>襗^
qTXlff#f
'p~|fe
t@(b4ʺⱴnt
V?P"@	4⚬$^}4`ĭa/{YSE @'I;adjeXN dfJ]+n !0y[]_pkoT!L4;KJ|3k/Kp5pJ]IE\Xۈ+T
T1Wreю=e5& !Q
X. %dL~\v`<Shfc#ëM4TU1ۥ`}bzr!%T2W+~7_|VĜ,O0tCD݅NdP`UH	&G+V"VU$~+-֠{|(! G%NNrG.np.T;URN,)]5e,%8Ë!Lu2OhraˋϗvDbNB=]- xq(]BŚRQȵA_FGJ%{>]U(eJnPġ%WgOӝ=32.5j1-x$	|؝CݦUܾq_8\,M-<$b^ROԒW`t%QP5ԯ=5
@D
N;O[٭TOgvM_w"IQanXҶUޏ$	"sݨ!wKj.;FF5	=c>d^_E}xc_|"_{nozŽ(Tuk"b\'{Ȳf$UY/̖X]-SR򾇃$BeuI<8V-d#a^J"q0#
mM7Y!R޻kop2.RɆá8yE:'gb7G'vs~mWH_\_*~o	o/}]&iVOÃHh@lu@^+A[.Z5y8CGwq,.*J/P3$*HhHYEptv|RGNp#I^E(%F	ȰrW#yWA}/O+5$VqL5Q]Ca]RZy-a$CfӬ+[X,-)+@'Ah8@ ˋ^yUE-(*(GH:ŶN,2O<sܬ2/td۴\<;$dn3ukLg u$Ta[@OB_jK0nzg4oK'b^tDRbDVLV0B|P
cZƤ8hYgkq+̗+yRe1H*?)u_:P("8Zu`ը	z\"YFTn)̶8\p5S|<Gh-g*OlUQ`PRAF1WG3+6ڭ:ߠ5w,9H0ıeG(2KRtO^㧟/+6u(瀕VadԜUn5Lӻwߒ XV%\)8W~+cX$¬{IE͍=ɒ2
l٥x!Uq䢴kX^YU{l^yz~~.y9_|ٍ۷,C$Vv?rGAy6Zr
S!aV
T,b
g9Rj&yX)kf?{^6a6ڈʡ]ud08dRtWFRQUd!0mp{@Nb0tXl$'ű@ (GaT7G]GNN>I>"_ym}ZH5n>=&Z̭\!gmү~_k$XppyVԲBaв跜Ⱦ}l>[΢/6KyheݑAziV8ؒ{#(4Ϟ?]䛗R`Ȼ/ԝ޸N)sVAF5Pn۞aޮ'Pun*rVܭÞmOy[PKIeb6N`JG6G0W-ӐkoUAD4=<*AQ81vv KWX$)qs?+&mN={{d\IY!UKeqiY֯
j$`Zr)_~"wn5\gXKj6}[w=s~[nBHz׹t=S2sSl@aQZdg?okJU<&$lfUm

qYutM-SCxEF0npD,0C1,-
PU3ҿ"q-kI֙呧Y]:	܄Mme5ãX'=(C		>$XG!2Ju*2 IDAT1PξK$at2-gbvY59Lk)0,$Xh{¢}]O3߻wPO>NGr0/^GC|VWk楜V]8\
AJ^>}W^yΝ;өDb%u9W8H
L1e;\Eދ4Y=`yCF2K)5\b-͛-?VmLLwlv&U@[jP˰c:de@ۈ8;kjB4Ap޽+ūicݯK{ͷoxWtluۥE.IINRPjݺ60MP4B2_|	ڲ5V|8NXy7_i*%4DgtAe|X;PL\<Qj\h4z7>|R-6ٟUMnЄu%Cp@4>Gb'^~+$tEaG[k!ITF-[MOLHĞX䅑x:f{
-<3܆CO{1
ynGq
M-/Hl1:?$a3RuɛwwYc>;;Uz^p0ޛo[/Rʣl*(*6x:34gmUȶ*6J‘7anG\}k#_O?$~ǟ<)ͫ^L&%vڞY];p9eoHp)'O1ڝklovjXO!d<3Ѿ?+|FƎ6嶧7	
h!Al'eH@TPbF\fgB3^(U54uQggB_r	;;{w~w%mJDJl!{2;\-/o޽+{;oh0}WR
r]uòzZg,*kG{MI?԰$n
	3xJ뭯/~7}OTLwЭ"hEfYM#FP
=|o~Sٳgs=I@5v*^EUǁߴ:c͵
RW[N
Udڜ2W+O`oj#b 'dCVRy+{ f!
E5=qʖɦ9ǴrC8;#piK\}#٭[7Q=|eyzа6>_~/%;O=v|܀zn,>{di ;TK3w<=/&XVZ
R]Mȋmc=}}?f)
q0Wl;ɷ_G^w}sm'S8˵I[!*yvõ͖wuV[D&T`S35A)lc_Ȉ('֐SMa`:ptuKRppl(=
E(Y}[{===O'^xw^|̓ϟV%Yd}vpZlCĉbMqڊeٯcmyl饗T1Z
_:۟𩁰 ˳'>|>_dyh|w488=yVb0Ymqۃ0;H:Q}QTp4w6 $u*v&gC *i@C^/K^ݚKm=fpu\
ط*nMi'KX	Zkȅ
w(8M}_X ʔHÅPge&p9ip(9,7FM
ōH5BbߛE JY_+~\P
ٟK\Gyzvޏ?t#a%?F;?򕈡UzGz)81MyDRgV/ǂ~aٟY!äM#'0:k?xK_wx']??]a\,+mup?w5w4hiѕXd6`p~y!+H^%:Hlˢ3
tKa2d)z%MԂ{Tv1qNtǠ#,ʷE^"b#/f^O\!V'p5rӶ;߉Ű$1\w-}
˹?ܓ֍;"|67U'M%gU]3Qحr(́wO@@BHn!F5c	k~y1OvCM")T3#h74#\u=c-h6c0à,ũOI 2ѴAo+MkUHN2Ճr\?On[r/K/1%TG\xxdT`88DO/K?NZ6wdUM՝B=P]9!1SF7@k:I5!? %a^r7AI4,^Rx|IE9Ė2mW; +&F8Hs9G؆Z(E`d겱t@+Z1,J5mJ\`ՆDa`,
wYK.Aۏ8֍8ݣmQ
(8Vn=F!y$LǓ}yVf)8kv?hKյ˧OKjǏO/gR5vP89NChqM~^T8<5B٥I5؉0kRl5a8o&1K{Jlo"\T.5;p<׋[56p6l<j31DŨ;tJf+wvϟ͖

-v
Z&
#Z\tH
ncǾԗTN,01IA
.oq|^~Jk|¤/	=^֔כ
)rcYJg]#Wb?%O΃;TsϢ@rE UC,RGns\9Ym֒{?8Cwq{'fÓBGeRv)\YC6s:¯UU	+H\ha\Z)s0r}Ԁ}bB9*ɵf)k#=nk
+rԣW}6(G/ZƗn !Y]e^VXWՠd;ؿ}򒳗*3BBdGt"mXSRW^p8sZQ(T$[bB{+7YQ$rp-=z*+0.Ax)^%dCPnt{V"*w%IX>bwV}@-{[ȀjX\ZVIf`YDH~3P8X;PV9	Ԕ=cUB!2X\\e+&/Yd|6;::e]?={~|qYZh7YnS^8FkKRQ̤g+MVGaN
NU(Ww[IcVץ[H2Wݼq'9,f+h˕
Tm[;P.A?~ %L]QP5\JFdd &lFJi½F& 1Td<-#tA$/2]gqJ]]bd0~'Nħ.櫓l-6Vn%n6-0ZGGv4l
?@&%I/B<$EXѣWVK)']ЦM^9EN8s{Q40 4D5d96|TcBZX/ba}/l2EbbqÑmUaumD<À4Elh-Vh4tmė))`/ܳ׀VΫܩ
S2L4j3;ɭ̖T
NUUPEh+ J{dU$ZwNHK&6q4dfw4iߞ  BJFe~\ynϟCID{6x^EM%0:Vziv/liiwݽmߐY޷
k
d_U鷑oiC2Z[uW"OKl&w"Q6\WK!^Istϻk"qY
uDž^yΔt:>&dzn9s؛6/oX%8*=ֽޏ䮦^-IB?~'lv!zDL-y8KqsbXMCƎ	v!yAS;m7*\}OdFKzIԨh^63gzvt$eqD˩O5֏C/ZypV
B7[A
yCRgQ2;c]ruُrT3RIIrZBjzl$
4J0зR8-X9RsIF7,2oC}1P-~\.rD,YWk{~e
KR6ǣUVq;=;o}G<vB.z<~ԏp @2%: ):!dc~@r#Ң|WUED 
`n^JS(#$LvZ	nxVN8w#Z]A%v@z
jQX'4oR$s#P{J\ʽt/iU+l˱qhm-5pls?'3]5dՋ*C_+Y?ðGpXT$wnLv$_ڤsqޑkDzyO&XflY?M^JfEjʅVv˗ə3S5^umuvoHw$.ᆱx2wvtˡ:0Y	3>N7u%e^d=u ^oXܨk3Kc@)ǃNN΀w1fXZt%v|2-$OO;=aptN믞K	L4b\m=:b0zO?>Hu]5zS)\dEswI#%bprFc@`/
Frg>{rX٪A,\1FGۂnbie;fY#:.^߭;`!'a
vҮLcͬ˧=qɀwXdMMջ1aVZޔϽăZ錫*5HgX]'8#{6lO0%I?18}<#;@l3x/%sxMHd
fd`򚫋>/
829z'+ W(5t-C(eA Jn|zWQm;ٱ(d$1Yz)륫E0ҧh]#:oNcZTͫGǑ/?=Z|Ow|*={1.+]_L	S(L6FBp??tA0:Aas!jfVl[Qmq>+#tS(gsY[gNvWmA*AAPu|0ʀjwS*XM	9@颡X$T3Ά%z{c):f{ qoΧoqu5%ɘjLpuLX@lCƀTdZgg5_)Ѹ@q9
pV\HqT֍,P[Vf;B^߼ͼ1Gz/Ap@46;m)-Xa:=zB|G~ai?PF%#:[࣋{ߟggTP=:98?_|GOmL"a?"Rcs<_\\D]"}V[Ύ4,~ۆBa6(RnCEb@8+Kyt#	۶O|.qbl&=ͮ]c@j*vz"->mہ^&ЖfR!XbjVjL%
q1
5J`	Ji	`SR9Ϸ)Й{
=L t,IҪ6ݶ[@K<
ZۥQ?}{GYEL߶s>԰sB+A܄Z\+\h2VvT- L˧j<alA'AV0tJBy2P CwaĮ"4@ׇE7ݣecu/Y'bRN2&uǏy-JAV<1Chl۵l2FdR{'>Ȧg_~J^drt2>=Hn
VhN)?p
aYS˥OAUf2EMHxI0,f1:s3%[J_n'L!{8F)-oK:֣\Ƅ' |a`OKk$Y|V޹k^aapeqe7w,pCzR"pJ k wӧ~!tMDG
yрlvz|?}uM-8(]	&+kz΃{8=,nJ,pDe!UAV%
zpܤB$K҂pB}:2#CbnMo
O8ް~N˕Mp{vn]JT>Ai FBQ=e \}n	}?_ǀ3ԞsK@"aiؘ߱4#Z'+hqR;`~7Rܣێ4M$4~F8' 'Z͎Bx4":/nVjT%̊"a@Jl
ɶˡ-%I4kMD7'qSuUDž*EixSH-й$EP0Byi콲̗4<Žvzړ}7*NY.AN0#qԎYXΝv
&l
`eA!2a
 yU%eQFOgF^,Ush8%%E~}HڨHj1.}xs7pqj}Su#fS4ّxUz:
~M<^{Xb\1B,[tȔf	Pϩ9VGaqM{{I,qޤ<0onQSX& e޽{Nq&$~;V
@$Xڰ*۬G>XЄu<ߦtS!`v ^5f]6e]8v	uuU}ԈX0A"Eq[8g^ǪhR¹׏پ}މN,Je%-I> O<'L$ҕ8#X)ݑ`
$~2t)%wn)129W$^Jb,MpP~kyV.{,Bt?Zm6ʼ2Ff7m5
|,{0@A,`
*ᴚBE~5kgy^3>]b*栢;L0`:$z%IjURx߻uu/tn˫RpŰA(|H&>lSުVI~07BgZgmТqg}Em,.+:VMŊk)M9O&ѿ^Z
"/b1a'~拑?	U (G9Eձff60Vb:l +V>ҩqc1.p^~`rhS}g*n+FWKķi:
S{yoeݍGl_SJiQiOOVLPxQCJ.
6s=n@8IsfӢ){5yPr\lD]GH#
nG(2a;89c;[(}2&soxOwoYfyd.e2)ξE@@EߑUuزkV{8#0)rZnxa A=8HqY{.MCD`0]uCզoǝ{2}Ŷo5Q(:vC r׺c͔cbF䣣#R>rqA
o-Dw6, pmOw-د61hbO*f .=	#y( & 0Ftp
u!/s`}tXnsQ@iJcs01#|TTBI̩zpO:SCL$p	ybK%p1zømdu`g5W) P?o
Uч:7o[]
"5IL̿ݦ(65+\mGh4!A|V"-'^,
c[2	ID,t`T;x՟JSq{YVxYˆ@$
z>
{C:Yxfv0w#1Nj03pgr^H诘X/Hq
J	=zd8z3|pﲹǨ䪅B1:+d:ɋjș73u{ZH̰-Vb,[+/\!GjJH,5jcE(_e%[_b
x!<&or\n3Zds!
}q
qwضP]_%gcNCݸZ-m!0s(
ˍ_6EuN3/I''i[0ؑu
!wлh2vrSeNB6yqNsa06#}cfqԝ*1:KlS!ZȖL8P>;`G+~IS8eNP2fՒkRu麾[w]n7L3~l6tu6b6k*9cp+-+Q##zC1L.2dɹԠGG?-ж;l`,I3,ghvtdjFu!SBƩ0jQK1ef`v<4+F,'C"B%)
ֺc{pLaH4;R=}\AT+ղoxv2~ِXӏq}[UNZ~~BvW:4#~N
["~ LU|\ptǟUZ`o%ՊYFώ&m[%Iͨ;=7U{`ШgW.ԏ8n
k%0l#PzcaE.<+\	O-WTcuCOv_ebM^nΈiFYF{tnTyCH	u=hǨ.]NE鷖u4ReȐc(e];!48}0a+~4)ێkYMk~tz{?oi6-]{o#ݷOGaWNowJ2qi<̆\P[M%mŔDןQXZZ!C
,czMcz>B;i|wZ畟B2?"#J2)ɩ	/ɸPK
|];nI JxJ%,#g?Ə$q
Bf%>>%GEdD@D>=ÖR(<'%>ʘ^`f*2s0?vK8t0@Gt+=t0JXqdGKgY䯇`U2s=0/y^|7Ne#K]WU^¶#eA@Ս"Nʿ|ˁ
C
9~xPܟ:)yss!Rb_mMӟSzNjYxKL/~WϾL2s1y¾oNi$:b8M~!bN?r4= -_`oGչe{=8
A-
#7߮o']FQty(YexOڃ'|E`s(%ea:zAl>kJ|\HOc:PI𵓭2TQ4~z><9}xׯ^i&uV
S+;MgS(CNJLVˍ_
݃zIy|tduWk?}n}U˯///w(~%RTSRV5]DgG:]TdQ=HV`}iri^T5h΋爘4dU1,i!P+q>XT+JQ\q is"^cDV_@ޯ3zt8@;HAx+휤'=$_Vs]VbpܘP}[Nܕ儰,8Ra#!OSZie)h@d}ZCYL5n7YYЍlƫvn(o)V/^(_ &`︞DnG%RgyxoVELZh6;I|cAFJ.Ÿɇ hS]fMV4_qf
)t@H&/TB)tAp (Dc{\o~qOJn:hw=5|}PdO;؟lq`
A_s:N!b]E	bU.;xdB)([ܖw+jr<}? d[JiTQHxRtvvvBƓۛ;X˥/y|MW?|/tus{Y7rv}Smmwg/ݛD*^Peg!Ԫ*FK2JgT֍rl$LFm$WIwڤi͵
y]iɲb	OknwխU(<21Mxp
lƩnKxsA+G
/(Qx{/
;pUy:	lHX|>8XSRo"z]N-&^R˛ɻ_FT9U9Ԅ=P dm]\\͏($Zm2
QO?*]XT=xMdf/	+6ەPy1Ľ<
Fc8@C8{zA@fvjWR,-7P*EI[nWZA\9jdMZ.[o_}q6!y
|gW\p$FiU-:G8.4"Q\UA0DZI`+糥dz_TV'GUZSҞL'Q~u?{Gڟ^vۚ'NNyzXYhtzzo21q%[L]{o79?y$:7N'~QFOM&T,$׼y)9:IF1
Q
4s-ܘvxsm	x3Envn6t
[η[?߱zMn6*:-odcy۳H--i̲(hRn}JҰ`dB5CN'RurRSxȌ,$TzBlJZ"(cc(pxr~H+;Oy
VBA&?pzjպ
FgOq@ IDATO9vJ !oz@s_bt.kޮ=8^*LXxw?x䇋9}Un{e)ZION5opV_A*FSRT[?oT˴^m8:y[~enE5D랽x~te甬25=L\bDs=$yEGWc[U4]Qgyÿj ~&wcΦ";/tr*ѴGLB>rO79ؓż>7(ͧ*zL9,6wjyBü\폇 %36dh:]{O(,:Vwi^
쉠L5:"'qJ&Y)Xp" xw[zoANG*3
TC&*c]xa#k<6Z*2^rtXjG-
ŻUZ6+ꐓ
ekn]񽳢	pR]C><2|EXJ3Ow}@w;Ca&pޒ;[2Z-~qȁRXxȻ#$d>tk5yMraF+}z~~񻣣mZnԧ̻*:62Ŧjߑ1^7]JA,A3=͝yykȧ3 3E|z2ϗe~O]eWmjn̒	Ȩ]N7s^]:,YJp5Zp
>ͨ6KGUNlj	.q
{K"Ub8lX[Ae8
bcXA%מu*'ʛXIapi;.FIT
%tl̚#N_{i_r]Wu>	~-_n9\	11b!2֧_=PRvU+!YUʁWr5pA°Zj8}"Xq@VT3̍^M#æDLZ˖-|t>Ȍ ThVlB{Lh&_ Еnâ
!Kަɴz(a13P ReVv\upӦvkJx_=1N&/(ndN6YfU)Nf'27é?{yPWQ	b8a**"U`Ps0h	Galؾ&sFI>}r[l>
eǣ<(''0ˊza$F$;-J]Mf)ì3&OyvKys_x%ETTu0a0"hpfP>s"X6"vLH{lIֽreWQػA>dĠc2z<A*;8gv+1"'(d/>,(cс_2uH6e Qt-P,,ȻĜٽ3/gn\,{w
iÑ6P-[ԁJ-MO`X[sf-	zx9Ou%diã4OŲL>`a?֡o6Z$nS^ݤ!R!=eأڸ;N+W[?w/O?l2P~w*,:9,n/=.])ط̽Y+2@lEa(K MjE.\#p(
TCPq̈N;q9,Z;ސ<;2,lLR:q-n'!3=GPvdpz}\Qx#^69.bPz!y]'ڬ]Ze2v)Zԫu땭-(d 2ߑ[r*
""F#pYmɶM_,4PA[$pYC˷b̓ÒvKpC4N;Wtvou"ٌC=%X!ȓd2dQJ_+YR_DfTp'}xq
`vNBVYw,[n:ʆ>yz	bLGU^M7g[j/JyX@EAt{s^vm
28
r|38z&&,}xCz'V҇&ϱt>I2N.oeEErlyzsu$8S*vV_ܽ|W/\_.MJ錎1W3L^IJfW9'Q[pbHm@Vqsh%<`}ꞷ2/Ʌvf[[C0HBIC#^!<_9gN6 QRm/5TՖWHM-&+Y-[JoOFGmId./խ8XVrdL9
ܓ1Hi*VeS:]o\޿wCvW7ns^__GIxrft:OO?tr}D_תsQb;8W;UBK)ȖKOVk%C7Uu'^X?bE	7ef/Barsh@wz̔[V!.

,wu	d^إP2)NǣQxŽ'N>x-9xݷHh=ޙ3-&ʴiśmPnjVViǁ7uSno\=t#S6܉][9wlfn*ycpۮމJޢ'Z@*eSɘoVo]Ek[b-}O/.Nw\)U)o~Gf>;Wo7=haWT
0T'5UpQA2(߂OD&gWjs%X?#nQئ1 44$ڎ-mdm.>TЙ3`SL0Jzu:mgTTyj032O]Uç'*$	2J!8m5(ƨ`uڵBU@WQL~S(w}׫q؍)"z]@OJ+ԖlfRq_qy@ʘ)ݡK0xŗWMWzhrrv2ɄaeYnWJ\S"=}>.*n:p@΍'?:r"\뇑q@eUu pGY&f;)F%c"CŢђ4PqR&t{$>:p2),hǧ8q7ݯ0~n%kV(];Y5aјuK;xN4}B&(**J#MUO&]̢i.od+-TLB2^$cG$.@4[5ܻN
M5,Eן}F>~iO''ggQx`=cf)@iz\Ч+RWќ.񔞀13]N%)TM_ԿqȏY:`mZ:"	|27TpO1JAdi4O`ޙa֢"`]/ՉNhTD17;ҭhRlWY	"4𣁅<)jWͦzMYUf5ʵ{]uMEIF[߼Y_~kOqx4--9(٪ذY-E&n"| U6$}ܖ'{9.Ȥ=\SܩQ^HKLWonOн}w??d1Lft5ɘKVRjOYhPjG7YJ>_|%q
H,3]ZCGD+f#/fo c4Zϯ7;
jCZ!2/lazƋjPh kO>*ϴڐVq]#ڤ*k6{aպ(!~3O҈*]'QO"MsM
3ʢF`:Fa|>B0iw|WaǧT'n1	WQY>-ŜM.雟Sb?ooϞݴ*BY2=w6OW.r
*(
I<]ӗ.
b@Z:5fcIDATs4tТǓGK)nB1`1A	9n05v]tQt%8dXL]f*(̪r睬#&lL.9)#0gmq4>jP5QEi7Yͺ]e(xjD!֖Rq@by+^E8QBհe&鹾]qUĎ{8<2W"!ï(Eu{4:G?Ͼ]o:+~?BG^UQK&cz (b<,'aYSTȥ(W߁vn#Vt3o3;VM,5r OYFm#~4,Tz:nf81TF(OPރZx
2-X#hi,b_CT/CvnZPY]ݮQwԖyV]镵O!x#?)7XVeYV*붷oVwWqM'(v;-IRqPn#fM/-^7\@v[I1q޷i@P6TI[YF=I4gg邒PrWaR
@%O~[RFm`+UjMIaяШUãẫz[clHql"Xc?딺`i|'cy_-RA$gS[yndTʫZS*?k]e6MџUE_&QIe(jr_u5klۮ.oD*f
^˂
"azA*e!0 aQ80%Uc<"U)F'P	\"޿.J[Td̦t2wk@O>CShxå.׃}@Y:z(@|1?AH50y
_Pt_]nneivJGVE'T]Boq%ݮ_~3ݦěLQ- dH%s0,	
oE %Cgv<[P
bǴ#VL-!hfRӧO6/fQ?&Y(
*t~scwۦo	^qٜ..N:??SA9(+RQ<-REFt,e%cv3-(
_xFj(+NGł|sQ%(6⫌hN	t
]8Z)|iƵInc
y5U>.L%fA1!5
bsneŠeudO|J+BL3
oV}9{qDJ"e㠁X'~vhϟ?2ӧwyvI7
8f?Ͽ㧏6MK|u+EVq[2,qmzu-+^B膲,+G,+@DI@q!뛛HHd*8T`4kixSWy3/BzPrBucP.Y4&\+[o|U(#e$(QSh!jԝ-Kp5ʏ"y"'η8/x*oЫslfa5o:]L	^d]YqoǿO>Y.}{|g/.O\HG#J]NNoxwY_nYM(۟4}oӏ}|gQ&ɤmC	"º y)JwW2T7>YM6}\
g܏_d+]+v\^~^/<5];;IkVdQ穆^Rr*F\[)=Sͷ(~pq~4$#\~,Wێ^RIۥM-қ,Q]Me`oRДTԸRM3P*zFL$<G/^,#dIx^t/Ţy%\ۚt!h1
ytƂ>TN5x؃o?6ϪϿtW_/OyscWjBk*SdJn90/:фVixYL9w|.,8"'׸ݮtaQdEsrJ7,K([tHM@\ޠnmdg[4

pb"̦'j5&L5!R䫺LjB4);ze5n8;e?lW
bpAb~CˮrJ
᮪3B9wtݥ
BpbsIENDB`TIT2ASkazka in F minor, Op. 26 No. 3TPE1!Schlair, AmitaiTALBPiano SocietyTDRC2007COMMXXXRecorded 2007-02-02 on Yamaha baby grand in Wien Hall of Columbia University with Edirol R-09 (internal mic). schmonz-publishableTRCK1TCONClassical@WaGkZBx&YkH[O5E`˹-nSw `yfk—	Rjͨw-HRc6&d0qv'[殷03k$Cs˷[ss>cvEWUsͦm6X*&F!ָ_T/#41ɫ%Y{u{/mr%_5#0jPU)q}E=,f᭿9wE-Z2jCZ4M?,j@ܒ9#iU N[P/i"{M3=C۸k4Z?LfG7gǹ}8,+~紣mfy76zËwG&-N}(W5RK*&rF!<=ɾ Ǿ5J&VoWNe6xqTE^FVfO>)X{@*^,Œ
S]%D&sCH#W)U5?㒡tkJ?_J\TδT@B2fE-c[}I`˱Ĝhnq㈁EqS-	ԑ%adZk9UYx߿s?ܕ[A I([#hLRp/+2q2Hx5--nUb^JSՊz^MH:>晍9ٺ5j\w,9#iIQVn?VP<[Ժ_u•q>7O
-O5SVΗj1 L@CGkJkHic]E`˹գ(vZT>EWu>-h1_n;k덹-m"8FV)~H)𺒔\IIw1fm[ᙢh6P@$Faoz**xxn*ﴝn**zwum\.(ިl$Ӷ9#h15փjtja*X+ͪK(yOԧ6EȰ^,bxD~-Ff}>fKpT[FvTlU'~h},SI#iAGBliPBӱ['i!)O~7#
Cilv.}X0YC's*I
017Giv?y=f<|mJNF d",R1\F{5Mn+gg}[Bl%RL3'JCc{IonskSc2HqqQ0ĉЋ>JnI$܋qrkX/s
8~ꔕjR9Z0"vu[?k^uVd*WNq=֯r˵9+xd{x{]\*bMG^{niݒYMx9iV~V]ɥի@HL6*w+.}<2[va鱄%<ݕ8T:r(>+m*-40fs[Q4M@Ϊ>P2u9c&\A=e+#'}tʖaLFڂ'yuѸ"
C4Bbh@A/'SYUKx-Kџ&$_|I35y^$3a<ܒiUU 
H$C澹;=ZΑ#j>в9 F.5Bj'؏aeet˂,[ֳ6J{]guCw\I#[EjlbRRU.TVkhˏ+gԢ*-mI/M4:x-x}啢}-2&ZSfL)I_Zt~@T#G2mEIc	[O;L`˙ '}uI5!u,\;D
,#+7"N%BMT.*}YLd%Jq@hWkZ8݁5jR|\ޗ+2i{DK_U۬Օ1-:)(m,0V(TtGh]e`S>z;yu\*	얛߳{/E	^àr!XS)IHe"ݓ0Mb*08֢Шs9]nVV Xou)#L$ŠK?P[UnX3a8ߕHݜĝ	51
|=%qhDDsݱ1J,bārċezKHQ$~#bhmpE779#Bݧ
tU%P6z[.Nr~[ԑ3M$ď54 f[&\99G9'C)@
FkBpYc[P9`˩ǣut8,ڋB'~~5'1˿Byd&)Ċ}!/Z]do~yW22<7׻eӋ*cikBrrfk3<&KվŻ׮l]c9lmFH(m$ht`{Yg`F3mӱ`[9cݿ([QɞE\u`"
EEȋT?^n{FnuؙLNvvֶgtcm ҄v
a]TzCZ5q2LPeЇZ9c=Ab#
ĺGC8Rڽ"~k39e2$'nUj:QIpĥ|r)(~\ynq?bw<{V"j8"[έ\yKR
eOKȩrLփ
IͤY$Ɔ%x|@mANk2rc]P5;͙g5!lvkin[
59̾ʄJVC`'Ty+%|]rh2[fp*`A<(Ta'R!Sd0IX)݆B)OH?JPyHe
z
1KflLhZpw܈RCPkb]u]U~:[F)A;IDkɊyH@O/H SiA=)r=2Ki:8rhe	yoFwZ"לNGeH$9^*sLBG
ĀY{@nYWVvŹ56v,9tiÚGD+Qg/ӆ*UoxkJ5jksxI|J|é4e.r.S'Xω{\,q0a?c/uF軹Ai,nr*sRL\RJ5Q4pR(.y8dL<:}"AnL	1B,A@$"Jk3luGc[M9`˩'=uU7gq|]ܷs(`Q.&Jlh7LIH$|rYOMf}!Ա5kn.S:o|ݩ(L9"3WSw:*'rk}Q8M-d9#U
LHN3:"`?B$L4v\%Uw\͞EcZfr0)z=Jj1jzUʠ8/LZܦ.Zec0aA:Beq4mRIi(,$m?׍_RV:SҎG2-lSi|DYU٧$~4v>]7F߂YFlbZë'7uw>ì[չY
k@Fk8Jo*c&[Ceyu!o\u^"-6I6$F!3Oj"U᷌}nCUul^P rXVq;=
v9(܇["CMx-7&e S{#XU+	ŀq$0b&QylBx.,*f﬚*&p2,)vRݗe"5,}opJkq/dw4VM(
.z/;+(
oZy\ӑ۶I#
#l<kU%)Nyr>W 1u[=ƈWS{c
!8RD6LhTϬML\q
7.u_K+mp1D/"M
>O1l;lVHu
V)UgHckHdٳE9VuP>gv[/O'P*g5hb%ۺʭmceYJ*[":HKj*ksQ{r""Ȓ5M,ԈTu[wul땶S'}ԶzC~ݯiO7u)
Xh1BZF2q{
A{@ā!,8Bᚨ AV1nPG%aP9 DhN}6lΝS(TĈ*Bh}H4H䍒ٍ~MCupFa-
\r
e
c2<5\c+y/R
qh7r"KFz/U[UHfژ@P+]9#@өFk2tYa]Ae+h5o8~Q?P/YӸ
CFk>jwӖF6HD؛!7Sx
eF-0O꺊sG-),[4ɕK*7{Ūe#
NJ:wì'ۖ
7$9#d+Prukf>q:49$$Ric+>3j\24H9PHúTiL z@Oc@FQ6jm$С5rg )+C+3 M'9U#!yP~*2k_U	X1Vj\Kb\k&w<ӜÈbXv|d1
y'o[yr"(H]R}kgBQd@"%"ao

f; }o4˺U%콟v>mۗ׵KH(N*E҄4H@;J#:l5f5ثtn7Qk @ݎ>k2qc[Aa+מ(uvz.}m=:O}-9##hV	O߭/{uYC\w{U5&by߻v㴄:pb}`#[deګVj
r]sfS199-J5 DT㊸8(
e#h_"G'
Qq0ܒh^
kFyA7f!zbZ}Q׾+O.wɭ/NjZm:=K&WV84!(w-6dЖTf&!RYdH$'e
*Ԃm41quvH1&]۠DBRP]#r٩Yؠb^\ۣmp[lz^\˜YkBհNI-K$@XBZ8KlF`a(bt&~4{ck&0f PEh*L*AC;TS9^01@6
88bsZc&[OETV-,`	
8h"x\ۧjYfLJc'e:q9xsMl}
_ZnI,H;2R}&.縲Yuq&OHO@/a4Oe;
-'uqaҎrJoW噛2hN}2sKR]ˮ"+"	qT4fvJv2v+ 3X&Ltԯ0YWq]>x{MZ(ʬ4if`ԑɺ3Yfikiwiki-3.20160121/t/tinypodcast/fancy.mdwn0000644000000000000000000000007712650125230015232 0ustar  [[!inline pages="pianopost or attempted_multiple_enclosures"]]
ikiwiki-3.20160121/t/tinypodcast/attempted_multiple_enclosures.mdwn0000644000000000000000000000021412650125230022267 0ustar  [[!meta enclosure="piano.mp3" enclosure="scroll.3gp"]]
[[!meta enclosure="walter.ogg"]]

this article has content _and_ only one enclosure!
ikiwiki-3.20160121/t/tinyblog/0000755000000000000000000000000012650125230012525 5ustar  ikiwiki-3.20160121/t/tinyblog/post.mdwn0000644000000000000000000000001212650125230014372 0ustar  only post
ikiwiki-3.20160121/t/tinyblog/index.mdwn0000644000000000000000000000004112650125230014516 0ustar  [[!inline pages="post" rss=yes]]
ikiwiki-3.20160121/t/textile-double-escape-bug.t0000755000000000000000000000204012650125230016021 0ustar  #!/usr/bin/perl

use warnings;
use strict;

use Test::More tests => 4;
use utf8;

BEGIN {
	use_ok('IkiWiki');
	use_ok('IkiWiki::Plugin::mdwn');
	use_ok('IkiWiki::Plugin::textile');
};

subtest 'Text::Textile apparently double-escapes HTML entities in hrefs' => sub {
	my $text = q{Gödel, Escher, Bach};
	my $text_ok = qr{G(?:ö|ö|ö|&#x[fF]6;)del, Escher, Bach};
	my $href = q{https://en.wikipedia.org/wiki/Gödel,_Escher,_Bach};
	my $href_ok = qr{https://en\.wikipedia\.org/wiki/G(?:ö|ö|ö|&#x[fF]6|%[cC]3%[bB]6)del,_Escher,_Bach};
	my $good = qr{};

	chomp(my $mdwn_html = IkiWiki::Plugin::mdwn::htmlize(
		content => qq{[$text]($href)},
	));
	like($mdwn_html, $good);

	chomp(my $txtl_html = IkiWiki::Plugin::textile::htmlize(
		content => qq{"$text":$href},
	));
	TODO: {
	local $TODO = "Text::Textile double-escapes the href";
	like($txtl_html, $good);
	unlike($txtl_html, qr{

Gödel, Escher, Bach

}i); } }; ikiwiki-3.20160121/t/test3.mdwn0000644000000000000000000000001512650125230012623 0ustar

ikiwiki-3.20160121/t/test2.mdwn0000644000000000000000000000004512650125230012625 0ustar
  • ş <--
ikiwiki-3.20160121/t/test1.mdwn0000644000000000000000000000004612650125230012625 0ustar ![o](../images/o.jpg "ó") óóóóó ikiwiki-3.20160121/t/templates_documented.t0000755000000000000000000000057712650125230015304 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More; plan(skip_all => 'running installed') if $ENV{INSTALLED_TESTS}; $/=undef; open(IN, "doc/templates.mdwn") || die "doc/templates.mdwn: $!"; my $page=; close IN; foreach my $file (glob("templates/*.tmpl")) { $file=~s/templates\///; ok($page =~ /\Q$file\E/, "$file documented on doc/templates.mdwn"); } done_testing(); ikiwiki-3.20160121/t/templatebody.t0000755000000000000000000000666112650125230013570 0ustar #!/usr/bin/perl package IkiWiki; use warnings; use strict; use Test::More tests => 18; BEGIN { use_ok("IkiWiki"); } BEGIN { use_ok("IkiWiki::Render"); } BEGIN { use_ok("IkiWiki::Plugin::templatebody"); } BEGIN { use_ok("IkiWiki::Plugin::mdwn"); } BEGIN { use_ok("IkiWiki::Plugin::tag"); } BEGIN { use_ok("IkiWiki::Plugin::template"); } sub assert_pagespec_matches { my $page = shift; my $spec = shift; my @params = @_; @params = (location => 'index') unless @params; my $res = pagespec_match($page, $spec, @params); if ($res) { pass($res); } else { fail($res); } } sub assert_pagespec_doesnt_match { my $page = shift; my $spec = shift; my @params = @_; @params = (location => 'index') unless @params; my $res = pagespec_match($page, $spec, @params); if (ref $res && $res->isa("IkiWiki::ErrorReason")) { fail($res); } elsif ($res) { fail($res); } else { pass($res); } } ok(! system("rm -rf t/tmp; mkdir t/tmp t/tmp/src t/tmp/dst")); $config{verbose} = 1; $config{srcdir} = 't/tmp/src'; $config{underlaydir} = 't/tmp/src'; $config{destdir} = 't/tmp/dst'; $config{underlaydirbase} = '.'; $config{templatedir} = 'templates'; $config{usedirs} = 1; $config{htmlext} = 'html'; $config{wiki_file_chars} = "-[:alnum:]+/.:_"; $config{default_pageext} = "mdwn"; $config{wiki_file_prune_regexps} = [qr/^\./]; is(checkconfig(), 1); %oldrenderedfiles=%pagectime=(); %pagesources=%pagemtime=%oldlinks=%links=%depends=%typedlinks=%oldtypedlinks= %destsources=%renderedfiles=%pagecase=%pagestate=(); $pagesources{index} = "index.mdwn"; $pagemtime{index} = $pagectime{index} = 1000000; writefile("index.mdwn", "t/tmp/src", <,

[[!tag greeting]] ENDBODY]] This template says hello to someone. [[!tag documentation]] EOF ); $pagesources{"templates/oldtmpl"} = "templates/oldtmpl.mdwn"; $pagemtime{index} = $pagectime{index} = 1000000; writefile("templates/oldtmpl.mdwn", "t/tmp/src", <,

EOF ); my %content; foreach my $page (keys %pagesources) { my $content = readfile("t/tmp/src/$pagesources{$page}"); $content = IkiWiki::filter($page, $page, $content); $content = IkiWiki::preprocess($page, $page, $content); $content{$page} = $content; } # Templates are expanded like($content{index}, qr{

hello, world

}); like($content{index}, qr{

greetings, earthlings

}); assert_pagespec_matches('index', 'tagged(greeting)'); # The documentation from the templatebody-using page is not expanded unlike($content{index}, qr{This template says hello to someone}); assert_pagespec_doesnt_match('index', 'tagged(documentation)'); # In the templatebody-using page, the documentation is expanded like($content{'templates/deftmpl'}, qr{This template says hello to someone}); assert_pagespec_matches('templates/deftmpl', 'tagged(documentation)'); # In the templatebody-using page, the template is *not* expanded unlike($content{'templates/deftmpl'}, qr{

hello, world

}); unlike($content{'templates/deftmpl'}, qr{

greetings, earthlings

}); assert_pagespec_doesnt_match('templates/deftmpl', 'tagged(greeting)'); 1; ikiwiki-3.20160121/t/template_syntax.t0000755000000000000000000000061312650125230014307 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More; plan(skip_all => 'running installed') if $ENV{INSTALLED_TESTS}; my @templates=(glob("templates/*.tmpl"), glob("doc/templates/*.mdwn")); plan(tests => 2*@templates); use HTML::Template; foreach my $template (@templates) { my $obj=eval {HTML::Template->new(filename => $template)}; ok(! $@, $template." $@"); ok($obj, $template); } ikiwiki-3.20160121/t/tag.t0000755000000000000000000000545312650125230011650 0ustar #!/usr/bin/perl package IkiWiki; use warnings; use strict; use Test::More tests => 24; BEGIN { use_ok("IkiWiki"); } BEGIN { use_ok("IkiWiki::Render"); } BEGIN { use_ok("IkiWiki::Plugin::mdwn"); } BEGIN { use_ok("IkiWiki::Plugin::tag"); } ok(! system("rm -rf t/tmp; mkdir t/tmp")); $config{srcdir} = 't/tmp'; $config{underlaydir} = 't/tmp'; $config{templatedir} = 'templates'; $config{usedirs} = 1; $config{htmlext} = 'html'; $config{wiki_file_chars} = "-[:alnum:]+/.:_"; $config{userdir} = "users"; $config{tagbase} = "tags"; $config{tag_autocreate} = 1; $config{tag_autocreate_commit} = 0; $config{default_pageext} = "mdwn"; $config{wiki_file_prune_regexps} = [qr/^\./]; $config{underlaydirbase} = '.'; is(checkconfig(), 1); %oldrenderedfiles=%pagectime=(); %pagesources=%pagemtime=%oldlinks=%links=%depends=%typedlinks=%oldtypedlinks= %destsources=%renderedfiles=%pagecase=%pagestate=(); foreach my $page (qw(tags/numbers tags/letters one two alpha beta)) { $pagesources{$page} = "$page.mdwn"; $pagemtime{$page} = $pagectime{$page} = 1000000; writefile("$page.mdwn", "t/tmp", "your ad here"); } $links{one}=[qw(tags/numbers alpha tags/letters)]; $links{two}=[qw(tags/numbers)]; $links{alpha}=[qw(tags/letters one)]; $links{beta}=[qw(tags/letters)]; $typedlinks{one}={tag => {"tags/numbers" => 1 }}; $typedlinks{two}={tag => {"tags/numbers" => 1 }}; $typedlinks{alpha}={tag => {"tags/letters" => 1 }}; $typedlinks{beta}={tag => {"tags/letters" => 1 }}; ok(pagespec_match("one", "tagged(numbers)")); ok(!pagespec_match("two", "tagged(alpha)")); ok(pagespec_match("one", "link(tags/numbers)")); ok(pagespec_match("one", "link(alpha)")); # emulate preprocessing [[!tag numbers primes lucky]] on page "seven", causing # the "numbers" and "primes" tag pages to be auto-created IkiWiki::Plugin::tag::preprocess_tag(page => "seven", numbers => undef, primes => undef, lucky => undef); is($autofiles{"tags/lucky.mdwn"}{plugin}, "tag"); is($autofiles{"tags/numbers.mdwn"}{plugin}, "tag"); is($autofiles{"tags/primes.mdwn"}{plugin}, "tag"); is_deeply([sort keys %autofiles], [qw(tags/lucky.mdwn tags/numbers.mdwn tags/primes.mdwn)]); ok(!-e "t/tmp/tags/lucky.mdwn"); my (%pages, @del); IkiWiki::gen_autofile("tags/lucky.mdwn", \%pages, \@del); ok(! -s "t/tmp/tags/lucky.mdwn"); ok(-s "t/tmp/.ikiwiki/transient/tags/lucky.mdwn"); is_deeply(\%pages, {"t/tmp/tags/lucky" => 1}); is_deeply(\@del, []); # generating an autofile that already exists does nothing %pages = @del = (); IkiWiki::gen_autofile("tags/numbers.mdwn", \%pages, \@del); is_deeply(\%pages, {}); is_deeply(\@del, []); # generating an autofile that we just deleted does nothing %pages = (); @del = ('tags/primes.mdwn'); IkiWiki::gen_autofile("tags/primes.mdwn", \%pages, \@del); is_deeply(\%pages, {}); is_deeply(\@del, ['tags/primes.mdwn']); # cleanup ok(! system("rm -rf t/tmp")); 1; ikiwiki-3.20160121/t/syslog.t0000755000000000000000000000077312650125230012415 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 5; use utf8; BEGIN { use_ok("IkiWiki"); } $IkiWiki::config{verbose} = 1; $IkiWiki::config{syslog} = 1; $IkiWiki::config{wikiname} = 'ASCII'; is(debug('test'), '', 'plain ASCII syslog'); $IkiWiki::config{wikiname} = 'not ⒶSCII and has %s in it'; is(debug('𝗧ĕṡҭ'), '', 'UTF8 syslog'); my $orig = $IkiWiki::config{wikiname}; is(debug('test'), '', 'check for idempotency'); is($IkiWiki::config{wikiname}, $orig, 'unchanged config'); ikiwiki-3.20160121/t/syntax.t0000755000000000000000000000127012650125230012414 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More; plan(skip_all => 'running installed') if $ENV{INSTALLED_TESTS}; my @progs="ikiwiki.in"; my @libs="IkiWiki.pm"; # monotone, external, amazon_s3, po, and cvs # skipped since they need perl modules push @libs, map { chomp; $_ } `find IkiWiki -type f -name \\*.pm | grep -v monotone.pm | grep -v external.pm | grep -v amazon_s3.pm | grep -v po.pm | grep -v cvs.pm`; push @libs, 'IkiWiki/Plugin/skeleton.pm.example'; plan(tests => (@progs + @libs)); foreach my $file (@progs) { ok(system("perl -c $file >/dev/null 2>&1") eq 0, $file); } foreach my $file (@libs) { ok(system("perl -c $file >/dev/null 2>&1") eq 0, $file); } ikiwiki-3.20160121/t/svn.t0000755000000000000000000000371412650125230011701 0ustar #!/usr/bin/perl use warnings; use strict; my $dir; BEGIN { $dir="/tmp/ikiwiki-test-svn.$$"; my $svn=`which svn`; chomp $svn; my $svnadmin=`which svnadmin`; chomp $svnadmin; if (! -x $svn || ! -x $svnadmin) { eval q{ use Test::More skip_all => "svn or svnadmin not available" } } if (! mkdir($dir)) { die $@; } } use Test::More tests => 12; BEGIN { use_ok("IkiWiki"); } %config=IkiWiki::defaultconfig(); $config{rcs} = "svn"; $config{srcdir} = "$dir/src"; $config{svnrepo} = "$dir/repo"; $config{svnpath} = "trunk"; IkiWiki::loadplugins(); IkiWiki::checkconfig(); my $svnrepo = "$dir/repo"; system "svnadmin create $svnrepo >/dev/null"; system "svn mkdir file://$svnrepo/trunk -m add >/dev/null"; system "svn co file://$svnrepo/trunk $config{srcdir} >/dev/null"; # Web commit my $test1 = readfile("t/test1.mdwn"); writefile('test1.mdwn', $config{srcdir}, $test1); IkiWiki::rcs_add("test1.mdwn"); IkiWiki::rcs_commit( file => "test1.mdwn", message => "Added the first page", token => "moo", ); my @changes; @changes = IkiWiki::rcs_recentchanges(3); is($#changes, 0); is($changes[0]{message}[0]{"line"}, "Added the first page"); is($changes[0]{pages}[0]{"page"}, "test1"); # Manual commit my $message = "Added the second page"; my $test2 = readfile("t/test2.mdwn"); writefile('test2.mdwn', $config{srcdir}, $test2); system "svn add $config{srcdir}/test2.mdwn >/dev/null"; system "svn commit $config{srcdir}/test2.mdwn -m \"$message\" >/dev/null"; @changes = IkiWiki::rcs_recentchanges(3); is($#changes, 1); is($changes[0]{message}[0]{"line"}, $message); is($changes[0]{pages}[0]{"page"}, "test2"); is($changes[1]{pages}[0]{"page"}, "test1"); # extra slashes in the path shouldn't break things $config{svnpath} = "/trunk//"; IkiWiki::checkconfig(); @changes = IkiWiki::rcs_recentchanges(3); is($#changes, 1); is($changes[0]{message}[0]{"line"}, $message); is($changes[0]{pages}[0]{"page"}, "test2"); is($changes[1]{pages}[0]{"page"}, "test1"); system "rm -rf $dir"; ikiwiki-3.20160121/t/rst.t0000755000000000000000000000123712650125230011701 0ustar #!/usr/bin/perl use warnings; use strict; BEGIN { if (system("python -c 'import docutils.core'") != 0) { eval 'use Test::More skip_all => "docutils not available"'; } } use Test::More tests => 3; BEGIN { use_ok("IkiWiki"); } %config=IkiWiki::defaultconfig(); $config{srcdir}=$config{destdir}="/dev/null"; $config{libdir}="."; $config{add_plugins}=[qw(rst)]; IkiWiki::loadplugins(); IkiWiki::checkconfig(); like(IkiWiki::htmlize("foo", "foo", "rst", "foo\n"), qr{\s*

foo

\s*}); # regression test for [[bugs/rst fails on file containing only a number]] my $html = IkiWiki::htmlize("foo", "foo", "rst", "11"); $html =~ s/<[^>]*>//g; like($html, qr{\s*11\s*}); ikiwiki-3.20160121/t/rssurls.t0000755000000000000000000000230612650125230012604 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 13; BEGIN { use_ok("IkiWiki::Plugin::inline"); } # Test the absolute_urls function, used to fix up relative urls for rss # feeds. sub test { my $input=shift; my $baseurl=shift; my $expected=shift; $expected=~s/URL/$baseurl/g; is(IkiWiki::absolute_urls($input, $baseurl), $expected); # try it with single quoting -- it's ok if the result comes back # double or single-quoted $input=~s/"/'/g; my $expected_alt=$expected; $expected_alt=~s/"/'/g; my $ret=IkiWiki::absolute_urls($input, $baseurl); ok(($ret eq $expected) || ($ret eq $expected_alt), "$ret vs $expected"); } sub unchanged { test($_[0], $_[1], $_[0]); } my $url="http://example.com/blog/foo/"; unchanged("foo", $url); unchanged('', $url, ); test('', $url, ''); test('', $url, ''); test('', $url, ''); test('', $url, ''); # off until bug #603736 is fixed #test('this is the name of my wiki/}s) { $bits{tophref} = $1; } if ($content =~ qr{]+href="([^"]+)\?do=prefs"}) { $bits{cgihref} = $1; } return %bits; } sub write_old_file { my $name = shift; my $content = shift; writefile($name, "t/tmp/in", $content); ok(utime(333333333, 333333333, "t/tmp/in/$name")); } sub write_setup_file { my (%args) = @_; my $urlline = defined $args{url} ? "url: $args{url}" : ""; my $w3mmodeline = defined $args{w3mmode} ? "w3mmode: $args{w3mmode}" : ""; my $reverseproxyline = defined $args{reverse_proxy} ? "reverse_proxy: $args{reverse_proxy}" : ""; writefile("test.setup", "t/tmp", < on static HTML unlike($content, qr{A: a}); like($content, qr{
  • B: b
  • }); like($content, qr{
  • E: e
  • }); } sub run_cgi { my (%args) = @_; my ($in, $out); my $is_preview = delete $args{is_preview}; my $is_https = delete $args{is_https}; my %defaults = ( SCRIPT_NAME => '/cgi-bin/ikiwiki.cgi', HTTP_HOST => 'example.com', ); if (defined $is_preview) { $defaults{REQUEST_METHOD} = 'POST'; $in = 'do=edit&page=a/b/c&Preview'; $defaults{CONTENT_LENGTH} = length $in; } else { $defaults{REQUEST_METHOD} = 'GET'; $defaults{QUERY_STRING} = 'do=prefs'; } if (defined $is_https) { $defaults{SERVER_PORT} = '443'; $defaults{HTTPS} = 'on'; } else { $defaults{SERVER_PORT} = '80'; } my %envvars = ( %defaults, %args, ); run(["./t/tmp/ikiwiki.cgi"], \$in, \$out, init => sub { map { $ENV{$_} = $envvars{$_} } keys(%envvars); }); return $out; } sub test_startup { ok(! system("rm -rf t/tmp")); ok(! system("mkdir t/tmp")); write_old_file("a.mdwn", "A"); write_old_file("a/b.mdwn", "B"); write_old_file("a/b/c.mdwn", "* A: [[a]]\n". "* B: [[b]]\n". "* E: [[a/d/e]]\n"); write_old_file("a/d.mdwn", "D"); write_old_file("a/d/e.mdwn", "E"); } sub test_site1_perfectly_ordinary_ikiwiki { write_setup_file( url => "http://example.com/wiki/", cgiurl => "http://example.com/cgi-bin/ikiwiki.cgi", ); thoroughly_rebuild(); check_cgi_mode_bits(); # url and cgiurl are on the same host so the cgiurl is host-relative check_generated_content(qr{]+href="/cgi-bin/ikiwiki.cgi\?do=prefs"}); my %bits = parse_cgi_content(run_cgi()); like($bits{basehref}, qr{^(?:(?:http:)?//example\.com)?/wiki/$}); like($bits{stylehref}, qr{^(?:(?:http:)?//example.com)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:/wiki|\.)/$}); like($bits{cgihref}, qr{^(?:(?:http:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); # when accessed via HTTPS, links are secure %bits = parse_cgi_content(run_cgi(is_https => 1)); like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:/wiki|\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); # when accessed via a different hostname, links stay on that host %bits = parse_cgi_content(run_cgi(HTTP_HOST => 'staging.example.net')); like($bits{basehref}, qr{^(?:(?:http:)?//staging\.example\.net)?/wiki/$}); like($bits{stylehref}, qr{^(?:(?:http:)?//staging.example.net)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:/wiki|\.)/$}); like($bits{cgihref}, qr{^(?:(?:http:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$}); # previewing a page %bits = parse_cgi_content(run_cgi(is_preview => 1)); like($bits{basehref}, qr{^(?:(?:http:)?//example\.com)?/wiki/a/b/c/$}); like($bits{stylehref}, qr{^(?:(?:http:)?//example.com)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$}); like($bits{cgihref}, qr{^(?:(?:http:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); } sub test_site2_static_content_and_cgi_on_different_servers { write_setup_file( url => "http://static.example.com/", cgiurl => "http://cgi.example.com/ikiwiki.cgi", ); thoroughly_rebuild(); check_cgi_mode_bits(); # url and cgiurl are not on the same host so the cgiurl has to be # protocol-relative or absolute check_generated_content(qr{]+href="(?:http:)?//cgi.example.com/ikiwiki.cgi\?do=prefs"}); my %bits = parse_cgi_content(run_cgi(SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com')); like($bits{basehref}, qr{^(?:(?:http:)?//static.example.com)?/$}); like($bits{stylehref}, qr{^(?:(?:http:)?//static.example.com)?/style.css$}); like($bits{tophref}, qr{^(?:http:)?//static.example.com/$}); like($bits{cgihref}, qr{^(?:(?:http:)?//cgi.example.com)?/ikiwiki.cgi$}); # when accessed via HTTPS, links are secure %bits = parse_cgi_content(run_cgi(is_https => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com')); like($bits{basehref}, qr{^(?:https:)?//static\.example\.com/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//static.example.com)?/style.css$}); like($bits{tophref}, qr{^(?:https:)?//static.example.com/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//cgi.example.com)?/ikiwiki.cgi$}); # when accessed via a different hostname, links to the CGI (only) should # stay on that host? %bits = parse_cgi_content(run_cgi(is_preview => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'staging.example.net')); like($bits{basehref}, qr{^(?:http:)?//static\.example\.com/a/b/c/$}); like($bits{stylehref}, qr{^(?:(?:http:)?//static.example.com|\.\./\.\./\.\.)/style.css$}); like($bits{tophref}, qr{^(?:(?:http:)?//static.example.com|\.\./\.\./\.\.)/$}); like($bits{cgihref}, qr{^(?:(?:http:)?//(?:staging\.example\.net|cgi\.example\.com))?/ikiwiki.cgi$}); TODO: { local $TODO = "use self-referential CGI URL?"; like($bits{cgihref}, qr{^(?:(?:http:)?//staging.example.net)?/ikiwiki.cgi$}); } } sub test_site3_we_specifically_want_everything_to_be_secure { write_setup_file( url => "https://example.com/wiki/", cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi", ); thoroughly_rebuild(); check_cgi_mode_bits(); # url and cgiurl are on the same host so the cgiurl is host-relative check_generated_content(qr{]+href="/cgi-bin/ikiwiki.cgi\?do=prefs"}); # when accessed via HTTPS, links are secure my %bits = parse_cgi_content(run_cgi(is_https => 1)); like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:/wiki|\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); # when not accessed via HTTPS, links should still be secure # (but if this happens, that's a sign of web server misconfiguration) %bits = parse_cgi_content(run_cgi()); like($bits{tophref}, qr{^(?:/wiki|\.)/$}); TODO: { local $TODO = "treat https in configured url, cgiurl as required?"; is($bits{basehref}, "https://example.com/wiki/"); like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$}); } like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); # when accessed via a different hostname, links stay on that host %bits = parse_cgi_content(run_cgi(is_https => 1, HTTP_HOST => 'staging.example.net')); like($bits{basehref}, qr{^(?:(?:https:)?//staging\.example\.net)?/wiki/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//staging.example.net)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:/wiki|\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$}); # previewing a page %bits = parse_cgi_content(run_cgi(is_preview => 1, is_https => 1)); like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/a/b/c/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); } sub test_site4_cgi_is_secure_static_content_doesnt_have_to_be { # (NetBSD wiki) write_setup_file( url => "http://example.com/wiki/", cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi", ); thoroughly_rebuild(); check_cgi_mode_bits(); # url and cgiurl are on the same host but different schemes check_generated_content(qr{]+href="https://example.com/cgi-bin/ikiwiki.cgi\?do=prefs"}); # when accessed via HTTPS, links are secure (to avoid mixed-content) my %bits = parse_cgi_content(run_cgi(is_https => 1)); like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:/wiki|\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); # FIXME: when not accessed via HTTPS, should the static content be # forced to https anyway? For now we accept either %bits = parse_cgi_content(run_cgi()); like($bits{basehref}, qr{^(?:(?:https?)?://example\.com)?/wiki/$}); like($bits{stylehref}, qr{^(?:(?:https?:)?//example.com)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:(?:https?://example.com)?/wiki|\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); # when accessed via a different hostname, links stay on that host %bits = parse_cgi_content(run_cgi(is_https => 1, HTTP_HOST => 'staging.example.net')); # because the static and dynamic stuff is on the same server, we assume that # both are also on the staging server like($bits{basehref}, qr{^(?:(?:https:)?//staging\.example\.net)?/wiki/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//staging.example.net)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:(?:(?:https:)?//staging.example.net)?/wiki|\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//(?:staging\.example\.net|example\.com))?/cgi-bin/ikiwiki.cgi$}); TODO: { local $TODO = "this should really point back to itself but currently points to example.com"; like($bits{cgihref}, qr{^(?:(?:https:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$}); } # previewing a page %bits = parse_cgi_content(run_cgi(is_preview => 1, is_https => 1)); like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/a/b/c/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$}); like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); } sub test_site5_w3mmode { # as documented in [[w3mmode]] write_setup_file( url => undef, cgiurl => "ikiwiki.cgi", w3mmode => 1, ); thoroughly_rebuild(); check_cgi_mode_bits(); # FIXME: does /$LIB/ikiwiki-w3m.cgi work under w3m? check_generated_content(qr{]+href="(?:file://)?/\$LIB/ikiwiki-w3m.cgi/ikiwiki.cgi\?do=prefs"}); my %bits = parse_cgi_content(run_cgi(PATH_INFO => '/ikiwiki.cgi', SCRIPT_NAME => '/cgi-bin/ikiwiki-w3m.cgi')); my $pwd = getcwd(); like($bits{tophref}, qr{^(?:\Q$pwd\E/t/tmp/out|\.)/$}); like($bits{cgihref}, qr{^(?:file://)?/\$LIB/ikiwiki-w3m.cgi/ikiwiki.cgi$}); like($bits{basehref}, qr{^(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out/$}); like($bits{stylehref}, qr{^(?:(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out|\.)/style.css$}); } sub test_site6_behind_reverse_proxy { write_setup_file( url => "https://example.com/wiki/", cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi", reverse_proxy => 1, ); thoroughly_rebuild(); check_cgi_mode_bits(); # url and cgiurl are on the same host so the cgiurl is host-relative check_generated_content(qr{]+href="/cgi-bin/ikiwiki.cgi\?do=prefs"}); # because we are behind a reverse-proxy we must assume that # we're being accessed by the configured cgiurl my %bits = parse_cgi_content(run_cgi(HTTP_HOST => 'localhost')); like($bits{tophref}, qr{^(?:/wiki|\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$}); # previewing a page %bits = parse_cgi_content(run_cgi(is_preview => 1, HTTP_HOST => 'localhost')); like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$}); like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$}); like($bits{basehref}, qr{^(?:(?:https)?://example\.com)?/wiki/a/b/c/$}); like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$}); } test_startup(); test_site1_perfectly_ordinary_ikiwiki(); test_site2_static_content_and_cgi_on_different_servers(); test_site3_we_specifically_want_everything_to_be_secure(); test_site4_cgi_is_secure_static_content_doesnt_have_to_be(); test_site5_w3mmode(); test_site6_behind_reverse_proxy(); done_testing(); ikiwiki-3.20160121/t/readfile.t0000755000000000000000000000044712650125230012646 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 3; use Encode; BEGIN { use_ok("IkiWiki"); } # should read files as utf8 ok(Encode::is_utf8(readfile("t/test1.mdwn"), 1)); is(readfile("t/test1.mdwn"), Encode::decode_utf8('![o](../images/o.jpg "ó")'."\n".'óóóóó'."\n")); ikiwiki-3.20160121/t/prune.t0000755000000000000000000000114112650125230012214 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 6; use File::Path qw(make_path remove_tree); BEGIN { use_ok("IkiWiki"); } BEGIN { use_ok("IkiWiki::Render"); } %config=IkiWiki::defaultconfig(); remove_tree("t/tmp"); make_path("t/tmp/srcdir/a/b/c"); make_path("t/tmp/srcdir/d/e/f"); writefile("a/b/c/d.mdwn", "t/tmp/srcdir", "foo"); writefile("d/e/f/g.mdwn", "t/tmp/srcdir", "foo"); IkiWiki::prune("t/tmp/srcdir/d/e/f/g.mdwn"); ok(-d "t/tmp/srcdir"); ok(! -e "t/tmp/srcdir/d"); IkiWiki::prune("t/tmp/srcdir/a/b/c/d.mdwn", "t/tmp/srcdir"); ok(-d "t/tmp/srcdir"); ok(! -e "t/tmp/srcdir/a"); ikiwiki-3.20160121/t/preprocess.t0000755000000000000000000001005712650125230013256 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 34; BEGIN { use_ok("IkiWiki"); } $IkiWiki::hooks{preprocess}{foo}{call}=sub { my @bits; while (@_) { my $key=shift; my $value=shift; next if $key eq 'page' || $key eq 'destpage' || $key eq 'preview'; if (length $value) { push @bits, "$key => $value"; } else { push @bits, $key; } } return "foo(".join(", ", @bits).")"; }; is(IkiWiki::preprocess("foo", "foo", "[[foo]]", 0, 0), "[[foo]]", "not wikilink"); is(IkiWiki::preprocess("foo", "foo", "[[foo ]]", 0, 0), "foo()", "simple"); is(IkiWiki::preprocess("foo", "foo", "[[!foo ]]", 0, 0), "foo()", "prefixed"); is(IkiWiki::preprocess("foo", "foo", "[[!foo]]", 0, 0), "[[!foo]]", "prefixed, no space"); is(IkiWiki::preprocess("foo", "foo", "[[foo a=1]]", 0, 0), "foo(a => 1)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a="1"]]}, 0, 0), "foo(a => 1)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a="""1"""]]}, 0, 0), "foo(a => 1)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a=""]]}, 0, 0), "foo(a)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a="" b="1"]]}, 0, 0), "foo(a, b => 1)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a=""""""]]}, 0, 0), "foo(a)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a="""""" b="1"]]}, 0, 0), "foo(a, b => 1)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a="""""" b="""1"""]]}, 0, 0), "foo(a, b => 1)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a="""""" b=""""""]]}, 0, 0), "foo(a, b)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a="" b=""""""]]}, 0, 0), "foo(a, b)"); is(IkiWiki::preprocess("foo", "foo", q{[[foo a="" b="""1"""]]}, 0, 0), "foo(a, b => 1)"); is(IkiWiki::preprocess("foo", "foo", "[[foo a=\"1 2 3 4\"]]", 0, 0), "foo(a => 1 2 3 4)"); is(IkiWiki::preprocess("foo", "foo", "[[foo ]] then [[foo a=2]]", 0, 0), "foo() then foo(a => 2)"); is(IkiWiki::preprocess("foo", "foo", "[[foo b c \"d and e=f\"]]", 0, 0), "foo(b, c, d and e=f)"); is(IkiWiki::preprocess("foo", "foo", "[[foo a=1 b c=1]]", 0, 0), "foo(a => 1, b, c => 1)"); is(IkiWiki::preprocess("foo", "foo", "[[foo a=1 b c=1 \t\t]]", 0, 0), "foo(a => 1, b, c => 1)", "whitespace"); is(IkiWiki::preprocess("foo", "foo", "[[foo a=1\nb \nc=1]]", 0, 0), "foo(a => 1, b, c => 1)", "multiline directive"); is(IkiWiki::preprocess("foo", "foo", "[[foo a=1 a=2 a=3]]", 0, 0), "foo(a => 1, a => 2, a => 3)", "dup item"); is(IkiWiki::preprocess("foo", "foo", '[[foo a="[[bracketed]]" b=1]]', 0, 0), "foo(a => [[bracketed]], b => 1)"); my $multiline="here is my \"first\" !! [[multiline ]] !! string!"; is(IkiWiki::preprocess("foo", "foo", '[[foo a="""'.$multiline.'"""]]', 0, 0), "foo(a => $multiline)"); is(IkiWiki::preprocess("foo", "foo", '[[foo """'.$multiline.'"""]]', 0, 0), "foo($multiline)"); is(IkiWiki::preprocess("foo", "foo", '[[foo a="""'.$multiline.'""" b="foo"]]', 0, 0), "foo(a => $multiline, b => foo)"); is(IkiWiki::preprocess("foo", "foo", '[[foo a="""'."\n".$multiline."\n".'""" b="foo"]]', 0, 0), "foo(a => $multiline, b => foo)", "leading/trailing newline stripped"); my $long='[[foo a="""'.("a" x 100000).''; is(IkiWiki::preprocess("foo", "foo", $long, 0, 0), $long, "unterminated triple-quoted string inside unterminated directive(should not warn about over-recursion)"); is(IkiWiki::preprocess("foo", "foo", $long."]]", 0, 0), $long."]]", "unterminated triple-quoted string is not treated as a bare word"); is(IkiWiki::preprocess("foo", "foo", "[[!foo a=< $multiline)", "nested strings via heredoc (for key)"); is(IkiWiki::preprocess("foo", "foo", "[[!foo < $multiline)", "nested multiline strings"); } ikiwiki-3.20160121/t/podcast.t0000755000000000000000000001446312650125230012533 0ustar #!/usr/bin/perl use warnings; use strict; BEGIN { eval q{use XML::Feed; use HTML::Parser; use HTML::LinkExtor; use File::MimeInfo}; if ($@) { eval q{use Test::More skip_all => "XML::Feed and/or HTML::Parser or File::MimeInfo not available"}; } else { eval q{use Test::More tests => 137}; } } use Cwd; use File::Basename; my $installed = $ENV{INSTALLED_TESTS}; my @base_command; if ($installed) { ok(1, "running installed"); @base_command = qw(ikiwiki); } else { ok(! system("make -s ikiwiki.out")); @base_command = qw(perl -I. ./ikiwiki.out --underlaydir=underlays/basewiki --set underlaydirbase=underlays --templatedir=templates); } my $tmp = 't/tmp'; my $statedir = 't/tinypodcast/.ikiwiki'; sub podcast { my $podcast_style = shift; my $baseurl = 'http://example.com'; my @command = (@base_command, qw(--plugin inline --rss --atom)); push @command, "--url=$baseurl", qw(t/tinypodcast), "$tmp/out"; ok(! system("mkdir $tmp"), q{setup}); ok(! system(@command), q{build}); my %media_types = ( 'simplepost' => undef, 'piano.mp3' => 'audio/mpeg', 'scroll.3gp' => 'video/3gpp', 'walter.ogg' => 'audio/ogg', ); for my $format (qw(atom rss)) { my $feed = XML::Feed->parse("$tmp/out/$podcast_style/index.$format"); is($feed->title, $podcast_style, qq{$format feed title}); is($feed->link, "$baseurl/$podcast_style/", qq{$format feed link}); is($feed->description, 'wiki', qq{$format feed description}); if ('atom' eq $format) { is($feed->author, $feed->description, qq{$format feed author}); is($feed->id, $feed->link, qq{$format feed id}); is($feed->generator, "ikiwiki", qq{$format feed generator}); } for my $entry ($feed->entries) { my $title = $entry->title; my $url = $entry->id; my $body = $entry->content->body; my $enclosure = $entry->enclosure; is($entry->link, $url, qq{$format $title link}); isnt($entry->issued, undef, qq{$format $title issued date}); isnt($entry->modified, undef, qq{$format $title modified date}); if (defined $media_types{$title}) { is($url, "$baseurl/$title", qq{$format $title id}); is($body, undef, qq{$format $title no body text}); is($enclosure->url, $url, qq{$format $title enclosure url}); is($enclosure->type, $media_types{$title}, qq{$format $title enclosure type}); cmp_ok($enclosure->length, '>', 0, qq{$format $title enclosure length}); } else { # XXX hack hack hack my $expected_id = "$baseurl/$title/"; $expected_id =~ s/\ /_/g; is($url, $expected_id, qq{$format $title id}); isnt($body, undef, qq{$format $title body text}); if ('fancy' eq $podcast_style) { isnt($enclosure, undef, qq{$format $title enclosure}); my $filename = basename($enclosure->url); is($enclosure->type, $media_types{$filename}, qq{$format $title enclosure type}); cmp_ok($enclosure->length, '>', 0, qq{$format $title enclosure length}); } else { is($enclosure, undef, qq{$format $title no enclosure}); } } } } ok(! system("rm -rf $tmp $statedir"), q{teardown}); } sub single_page_html { my @command = @base_command; push @command, qw(t/tinypodcast), "$tmp/out"; ok(! system("mkdir $tmp"), q{setup}); ok(! system(@command), q{build}); my $html = "$tmp/out/pianopost/index.html"; like(_extract_html_content($html, 'content'), qr/has content and/m, q{html body text}); like(_extract_html_content($html, 'enclosure'), qr/Download/m, q{html enclosure}); my ($href) = _extract_html_links($html, 'piano'); is($href, '/piano.mp3', q{html enclosure sans --url is site-absolute}); $html = "$tmp/out/attempted_multiple_enclosures/index.html"; like(_extract_html_content($html, 'content'), qr/has content and/m, q{html body text}); like(_extract_html_content($html, 'enclosure'), qr/Download/m, q{html enclosure}); ($href) = _extract_html_links($html, 'walter'); is($href, '/walter.ogg', q{html enclosure sans --url is site-absolute}); my $baseurl = 'http://example.com'; ok(! system(@command, "--url=$baseurl", q{--rebuild})); $html = "$tmp/out/pianopost/index.html"; ($href) = _extract_html_links($html, 'piano'); is($href, "$baseurl/piano.mp3", q{html enclosure with --url is fully absolute}); $html = "$tmp/out/attempted_multiple_enclosures/index.html"; ($href) = _extract_html_links($html, 'walter'); is($href, "$baseurl/walter.ogg", q{html enclosure with --url is fully absolute}); ok(! system("rm -rf $tmp $statedir"), q{teardown}); } sub inlined_pages_html { my @command = (@base_command, qw(--plugin inline)); push @command, qw(t/tinypodcast), "$tmp/out"; ok(! system("mkdir $tmp"), q{setup}); ok(! system(@command), q{build}); my $html = "$tmp/out/fancy/index.html"; my $contents = _extract_html_content($html, 'content'); like($contents, qr/has content and an/m, q{html body text from pianopost}); like($contents, qr/has content and only one/m, q{html body text from attempted_multiple_enclosures}); my $enclosures = _extract_html_content($html, 'inlineenclosure'); like($enclosures, qr/Download/m, q{html enclosure}); my ($href) = _extract_html_links($html, 'piano.mp3'); is($href, '/piano.mp3', q{html enclosure from pianopost sans --url}); ($href) = _extract_html_links($html, 'walter.ogg'); is($href, '/walter.ogg', q{html enclosure from attempted_multiple_enclosures sans --url}); ok(! system("rm -rf $tmp $statedir"), q{teardown}); } sub _extract_html_content { my ($file, $desired_id, $desired_tag) = @_; $desired_tag = 'div' unless defined $desired_tag; my $p = HTML::Parser->new(api_version => 3); my $content = ''; $p->handler(start => sub { my ($tag, $self, $attr) = @_; return if $tag ne $desired_tag; return unless exists $attr->{id} && $attr->{id} eq $desired_id; $self->handler(text => sub { my ($dtext) = @_; $content .= $dtext; }, "dtext"); }, "tagname,self,attr"); $p->parse_file($file) || die $!; return $content; } sub _extract_html_links { my ($file, $desired_value) = @_; my @hrefs = (); my $p = HTML::LinkExtor->new(sub { my ($tag, %attr) = @_; return if $tag ne 'a'; return unless $attr{href} =~ qr/$desired_value/; push(@hrefs, values %attr); }, getcwd() . '/' . $file); $p->parse_file($file); return @hrefs; } podcast('simple'); single_page_html(); inlined_pages_html(); podcast('fancy'); ikiwiki-3.20160121/t/po.t0000755000000000000000000003034712650125230011513 0ustar #!/usr/bin/perl # -*- cperl-indent-level: 8; -*- use warnings; use strict; use File::Temp qw{tempdir}; BEGIN { unless (eval { require Locale::Po4a::Chooser }) { eval q{ use Test::More skip_all => "Locale::Po4a::Chooser::new is not available" } } unless (eval { require Locale::Po4a::Po }) { eval q{ use Test::More skip_all => "Locale::Po4a::Po::new is not available" } } } use Test::More tests => 114; BEGIN { use_ok("IkiWiki"); } my $msgprefix; my $dir = tempdir("ikiwiki-test-po.XXXXXXXXXX", DIR => File::Spec->tmpdir, CLEANUP => 1); ### Init %config=IkiWiki::defaultconfig(); $config{srcdir} = "$dir/src"; $config{destdir} = "$dir/dst"; $config{destdir} = "$dir/dst"; $config{underlaydirbase} = "/dev/null"; $config{underlaydir} = "/dev/null"; $config{url} = "http://example.com"; $config{cgiurl} = "http://example.com/ikiwiki.cgi"; $config{discussion} = 0; $config{po_master_language} = { code => 'en', name => 'English' }; $config{po_slave_languages} = { es => 'Castellano', fr => "Français" }; $config{po_translatable_pages}='index or test1 or test2 or translatable'; $config{po_link_to}='negotiated'; IkiWiki::loadplugins(); ok(IkiWiki::loadplugin('meta'), "meta plugin loaded"); ok(IkiWiki::loadplugin('po'), "po plugin loaded"); IkiWiki::checkconfig(); ### seed %pagesources and %pagecase $pagesources{'index'}='index.mdwn'; $pagesources{'index.fr'}='index.fr.po'; $pagesources{'index.es'}='index.es.po'; $pagesources{'test1'}='test1.mdwn'; $pagesources{'test1.es'}='test1.es.po'; $pagesources{'test1.fr'}='test1.fr.po'; $pagesources{'test2'}='test2.mdwn'; $pagesources{'test2.es'}='test2.es.po'; $pagesources{'test2.fr'}='test2.fr.po'; $pagesources{'test3'}='test3.mdwn'; $pagesources{'test3.es'}='test3.es.mdwn'; $pagesources{'translatable'}='translatable.mdwn'; $pagesources{'translatable.fr'}='translatable.fr.po'; $pagesources{'translatable.es'}='translatable.es.po'; $pagesources{'nontranslatable'}='nontranslatable.mdwn'; foreach my $page (keys %pagesources) { $IkiWiki::pagecase{lc $page}=$page; } ### populate srcdir writefile('index.mdwn', $config{srcdir}, "[[!meta title=\"index title\"]]\n[[translatable]] [[nontranslatable]]"); writefile('test1.mdwn', $config{srcdir}, "[[!meta title=\"test1 title\"]]\ntest1 content"); writefile('test2.mdwn', $config{srcdir}, 'test2 content'); writefile('test3.mdwn', $config{srcdir}, 'test3 content'); writefile('translatable.mdwn', $config{srcdir}, '[[nontranslatable]]'); writefile('nontranslatable.mdwn', $config{srcdir}, '[[/]] [[translatable]]'); ### istranslatable/istranslation # we run these tests twice because memoization attempts made them # succeed once every two tries... foreach (1, 2) { ok(IkiWiki::Plugin::po::istranslatable('index'), "index is translatable"); ok(IkiWiki::Plugin::po::istranslatable('/index'), "/index is translatable"); ok(! IkiWiki::Plugin::po::istranslatable('index.fr'), "index.fr is not translatable"); ok(! IkiWiki::Plugin::po::istranslatable('index.es'), "index.es is not translatable"); ok(! IkiWiki::Plugin::po::istranslatable('/index.fr'), "/index.fr is not translatable"); ok(! IkiWiki::Plugin::po::istranslation('index'), "index is not a translation"); ok(IkiWiki::Plugin::po::istranslation('index.fr'), "index.fr is a translation"); ok(IkiWiki::Plugin::po::istranslation('index.es'), "index.es is a translation"); ok(IkiWiki::Plugin::po::istranslation('/index.fr'), "/index.fr is a translation"); ok(IkiWiki::Plugin::po::istranslatable('test1'), "test1 is translatable"); ok(IkiWiki::Plugin::po::istranslation('test1.es'), "test1.es is a translation"); ok(IkiWiki::Plugin::po::istranslation('test1.fr'), "test1.fr is a translation"); ok(IkiWiki::Plugin::po::istranslatable('test2'), "test2 is translatable"); ok(! IkiWiki::Plugin::po::istranslation('test2'), "test2 is not a translation"); ok(! IkiWiki::Plugin::po::istranslatable('test3'), "test3 is not translatable"); ok(! IkiWiki::Plugin::po::istranslation('test3'), "test3 is not a translation"); } ### pofiles my @pofiles = IkiWiki::Plugin::po::pofiles(srcfile("index.mdwn")); ok( @pofiles, "pofiles is defined"); ok( @pofiles == 2, "pofiles has correct size"); is_deeply(\@pofiles, ["$config{srcdir}/index.es.po", "$config{srcdir}/index.fr.po"], "pofiles content is correct"); ### links require IkiWiki::Render; sub refresh_n_scan(@) { my @masterfiles_rel=@_; foreach my $masterfile_rel (@masterfiles_rel) { my $masterfile=srcfile($masterfile_rel); IkiWiki::scan($masterfile_rel); next unless IkiWiki::Plugin::po::istranslatable(pagename($masterfile_rel)); my @pofiles=IkiWiki::Plugin::po::pofiles($masterfile); IkiWiki::Plugin::po::refreshpot($masterfile); IkiWiki::Plugin::po::refreshpofiles($masterfile, @pofiles); map IkiWiki::scan(IkiWiki::abs2rel($_, $config{srcdir})), @pofiles; } } $config{po_link_to}='negotiated'; $msgprefix="links (po_link_to=negotiated)"; refresh_n_scan('index.mdwn', 'translatable.mdwn', 'nontranslatable.mdwn'); is_deeply(\@{$links{'index'}}, ['translatable', 'nontranslatable'], "$msgprefix index"); is_deeply(\@{$links{'index.es'}}, ['translatable.es', 'nontranslatable'], "$msgprefix index.es"); is_deeply(\@{$links{'index.fr'}}, ['translatable.fr', 'nontranslatable'], "$msgprefix index.fr"); is_deeply(\@{$links{'translatable'}}, ['nontranslatable'], "$msgprefix translatable"); is_deeply(\@{$links{'translatable.es'}}, ['nontranslatable'], "$msgprefix translatable.es"); is_deeply(\@{$links{'translatable.fr'}}, ['nontranslatable'], "$msgprefix translatable.fr"); is_deeply([sort @{$links{'nontranslatable'}}], [sort('/', 'translatable', 'translatable.fr', 'translatable.es')], "$msgprefix nontranslatable"); $config{po_link_to}='current'; $msgprefix="links (po_link_to=current)"; refresh_n_scan('index.mdwn', 'translatable.mdwn', 'nontranslatable.mdwn'); is_deeply(\@{$links{'index'}}, ['translatable', 'nontranslatable'], "$msgprefix index"); is_deeply(\@{$links{'index.es'}}, [ (map bestlink('index.es', $_), ('translatable.es', 'nontranslatable'))], "$msgprefix index.es"); is_deeply(\@{$links{'index.fr'}}, [ (map bestlink('index.fr', $_), ('translatable.fr', 'nontranslatable'))], "$msgprefix index.fr"); is_deeply(\@{$links{'translatable'}}, [bestlink('translatable', 'nontranslatable')], "$msgprefix translatable"); is_deeply(\@{$links{'translatable.es'}}, ['nontranslatable'], "$msgprefix translatable.es"); is_deeply(\@{$links{'translatable.fr'}}, ['nontranslatable'], "$msgprefix translatable.fr"); is_deeply([sort @{$links{'nontranslatable'}}], [sort('/', 'translatable', 'translatable.fr', 'translatable.es')], "$msgprefix nontranslatable"); ### targetpage $config{usedirs}=0; $msgprefix="targetpage (usedirs=0)"; is(targetpage('test1', 'html'), 'test1.en.html', "$msgprefix test1"); is(targetpage('test1.fr', 'html'), 'test1.fr.html', "$msgprefix test1.fr"); $config{usedirs}=1; $msgprefix="targetpage (usedirs=1)"; is(targetpage('index', 'html'), 'index.en.html', "$msgprefix index"); is(targetpage('index.fr', 'html'), 'index.fr.html', "$msgprefix index.fr"); is(targetpage('test1', 'html'), 'test1/index.en.html', "$msgprefix test1"); is(targetpage('test1.fr', 'html'), 'test1/index.fr.html', "$msgprefix test1.fr"); is(targetpage('test3', 'html'), 'test3/index.html', "$msgprefix test3 (non-translatable page)"); is(targetpage('test3.es', 'html'), 'test3.es/index.html', "$msgprefix test3.es (non-translatable page)"); ### urlto -> index $config{po_link_to}='current'; $msgprefix="urlto (po_link_to=current)"; is(urlto('', 'index'), './index.en.html', "$msgprefix index -> ''"); is(urlto('', 'nontranslatable'), '../index.en.html', "$msgprefix nontranslatable -> ''"); is(urlto('', 'translatable.fr'), '../index.fr.html', "$msgprefix translatable.fr -> ''"); # when asking for a semi-absolute or absolute URL, we can't know what the # current language is, so for translatable pages we use the master language is(urlto('nontranslatable'), '/nontranslatable/', "$msgprefix 1-arg -> nontranslatable"); is(urlto('translatable'), '/translatable/index.en.html', "$msgprefix 1-arg -> translatable"); is(urlto('nontranslatable', undef, 1), 'http://example.com/nontranslatable/', "$msgprefix 1-arg -> nontranslatable"); is(urlto('index', undef, 1), 'http://example.com/index.en.html', "$msgprefix 1-arg -> index"); is(urlto('', undef, 1), 'http://example.com/index.en.html', "$msgprefix 1-arg -> ''"); # FIXME: should these three produce the negotiatable URL instead of the master # language? is(urlto(''), '/index.en.html', "$msgprefix 1-arg -> ''"); is(urlto('index'), '/index.en.html', "$msgprefix 1-arg -> index"); is(urlto('translatable', undef, 1), 'http://example.com/translatable/index.en.html', "$msgprefix 1-arg -> translatable"); $config{po_link_to}='negotiated'; $msgprefix="urlto (po_link_to=negotiated)"; is(urlto('', 'index'), './', "$msgprefix index -> ''"); is(urlto('', 'nontranslatable'), '../', "$msgprefix nontranslatable -> ''"); is(urlto('', 'translatable.fr'), '../', "$msgprefix translatable.fr -> ''"); is(urlto('nontranslatable'), '/nontranslatable/', "$msgprefix 1-arg -> nontranslatable"); is(urlto('translatable'), '/translatable/', "$msgprefix 1-arg -> translatable"); is(urlto(''), '/', "$msgprefix 1-arg -> ''"); is(urlto('index'), '/', "$msgprefix 1-arg -> index"); is(urlto('nontranslatable', undef, 1), 'http://example.com/nontranslatable/', "$msgprefix 1-arg -> nontranslatable"); is(urlto('translatable', undef, 1), 'http://example.com/translatable/', "$msgprefix 1-arg -> translatable"); is(urlto('index', undef, 1), 'http://example.com/', "$msgprefix 1-arg -> index"); is(urlto('', undef, 1), 'http://example.com/', "$msgprefix 1-arg -> ''"); ### bestlink $config{po_link_to}='current'; $msgprefix="bestlink (po_link_to=current)"; is(bestlink('test1.fr', 'test2'), 'test2.fr', "$msgprefix test1.fr -> test2"); is(bestlink('test1.fr', 'test2.es'), 'test2.es', "$msgprefix test1.fr -> test2.es"); $config{po_link_to}='negotiated'; $msgprefix="bestlink (po_link_to=negotiated)"; is(bestlink('test1.fr', 'test2'), 'test2.fr', "$msgprefix test1.fr -> test2"); is(bestlink('test1.fr', 'test2.es'), 'test2.es', "$msgprefix test1.fr -> test2.es"); ### beautify_urlpath $config{po_link_to}='default'; $msgprefix="beautify_urlpath (po_link_to=default)"; is(IkiWiki::beautify_urlpath('test1/index.en.html'), './test1/index.en.html', "$msgprefix test1/index.en.html"); is(IkiWiki::beautify_urlpath('test1/index.fr.html'), './test1/index.fr.html', "$msgprefix test1/index.fr.html"); $config{po_link_to}='negotiated'; $msgprefix="beautify_urlpath (po_link_to=negotiated)"; is(IkiWiki::beautify_urlpath('test1/index.html'), './test1/', "$msgprefix test1/index.html"); is(IkiWiki::beautify_urlpath('test1/index.en.html'), './test1/', "$msgprefix test1/index.en.html"); is(IkiWiki::beautify_urlpath('test1/index.fr.html'), './test1/', "$msgprefix test1/index.fr.html"); $config{po_link_to}='current'; $msgprefix="beautify_urlpath (po_link_to=current)"; is(IkiWiki::beautify_urlpath('test1/index.en.html'), './test1/index.en.html', "$msgprefix test1/index.en.html"); is(IkiWiki::beautify_urlpath('test1/index.fr.html'), './test1/index.fr.html', "$msgprefix test1/index.fr.html"); ### re-scan refresh_n_scan('index.mdwn'); is($pagestate{'index'}{meta}{title}, 'index title'); is($pagestate{'index.es'}{meta}{title}, 'index title'); is($pagestate{'index.fr'}{meta}{title}, 'index title'); refresh_n_scan('test1.mdwn'); is($pagestate{'test1'}{meta}{title}, 'test1 title'); is($pagestate{'test1.es'}{meta}{title}, 'test1 title'); is($pagestate{'test1.fr'}{meta}{title}, 'test1 title'); ### istranslatedto ok(IkiWiki::Plugin::po::istranslatedto('index', 'es')); ok(IkiWiki::Plugin::po::istranslatedto('index', 'fr')); ok(! IkiWiki::Plugin::po::istranslatedto('index', 'cz')); ok(IkiWiki::Plugin::po::istranslatedto('test1', 'es')); ok(IkiWiki::Plugin::po::istranslatedto('test1', 'fr')); ok(! IkiWiki::Plugin::po::istranslatedto('test1', 'cz')); ok(! IkiWiki::Plugin::po::istranslatedto('nontranslatable', 'es')); ok(! IkiWiki::Plugin::po::istranslatedto('nontranslatable', 'cz')); ok(! IkiWiki::Plugin::po::istranslatedto('test1.es', 'fr')); ok(! IkiWiki::Plugin::po::istranslatedto('test1.fr', 'es')); ### islanguagecode ok(IkiWiki::Plugin::po::islanguagecode('en')); ok(IkiWiki::Plugin::po::islanguagecode('es')); ok(IkiWiki::Plugin::po::islanguagecode('arn')); ok(! IkiWiki::Plugin::po::islanguagecode('es_')); ok(! IkiWiki::Plugin::po::islanguagecode('_en')); ikiwiki-3.20160121/t/permalink.t0000755000000000000000000000150612650125230013052 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More; my $installed = $ENV{INSTALLED_TESTS}; my @command; if ($installed) { @command = qw(ikiwiki); } else { ok(! system("make -s ikiwiki.out")); @command = qw(perl -I. ./ikiwiki.out --underlaydir=underlays/basewiki --set underlaydirbase=underlays --templatedir=templates); } ok(! system("rm -rf t/tmp")); ok(! system("mkdir t/tmp")); ok(! system(@command, qw(--plugin inline --url=http://example.com --cgiurl=http://example.com/ikiwiki.cgi --rss --atom t/tinyblog t/tmp/out))); # This guid should never, ever change, for any reason whatsoever! my $guid="http://example.com/post/"; ok(length `egrep '$guid' t/tmp/out/index.rss`); ok(length `egrep '$guid' t/tmp/out/index.atom`); ok(! system("rm -rf t/tmp t/tinyblog/.ikiwiki")); done_testing(); ikiwiki-3.20160121/t/parentlinks/0000755000000000000000000000000012650125230013230 5ustar ikiwiki-3.20160121/t/parentlinks/templates/0000755000000000000000000000000012650125230015226 5ustar ikiwiki-3.20160121/t/parentlinks/templates/parentlinks.tmpl0000644000000000000000000000017512650125230020461 0ustar ikiwiki-3.20160121/t/parentlinks.t0000755000000000000000000000411312650125230013417 0ustar #!/usr/bin/perl # -*- cperl-indent-level: 8; -*- # Testcases for the Ikiwiki parentlinks plugin. use warnings; use strict; use Test::More; my %expected; BEGIN { use_ok("IkiWiki"); } # Init %config=IkiWiki::defaultconfig(); $config{srcdir}=$config{destdir}="/dev/null"; $config{underlaydir}="underlays/basewiki"; $config{templatedir}="t/parentlinks/templates"; IkiWiki::loadplugins(); IkiWiki::checkconfig(); # Test data $expected{'parentlinks'} = { "ikiwiki" => [], "ikiwiki/pagespec" => [ {depth => 0, height => 2, }, {depth => 1, height => 1, }, ], "ikiwiki/pagespec/attachment" => [ {depth => 0, height => 3, depth_0 => 1, height_3 => 1}, {depth => 1, height => 2, }, {depth => 2, height => 1, }, ], }; # Test function sub test_loop($$) { my $loop=shift; my $expected=shift; my $template; my %params; ok($template=template('parentlinks.tmpl'), "template created"); ok($params{template}=$template, "params populated"); while ((my $page, my $exp) = each %{$expected}) { my @path=(split("/", $page)); my $pagedepth=@path; my $msgprefix="$page $loop"; # manually run the plugin hook $params{page}=$page; $template->clear_params(); IkiWiki::Plugin::parentlinks::pagetemplate(%params); my $res=$template->param($loop); is(scalar(@$res), $pagedepth, "$msgprefix: path length"); # logic & arithmetic validation tests for (my $i=0; $i<$pagedepth; $i++) { my $r=$res->[$i]; is($r->{height}, $pagedepth - $r->{depth}, "$msgprefix\[$i\]: height = pagedepth - depth"); ok($r->{depth} ge 0, "$msgprefix\[$i\]: depth>=0"); ok($r->{height} ge 0, "$msgprefix\[$i\]: height>=0"); } # comparison tests, iff the test-suite has been written if (scalar(@$exp) eq $pagedepth) { for (my $i=0; $i<$pagedepth; $i++) { my $e=$exp->[$i]; my $r=$res->[$i]; map { is($r->{$_}, $e->{$_}, "$msgprefix\[$i\]: $_"); } keys %$e; } } # else { # diag("Testsuite is incomplete for ($page,$loop); cannot run comparison tests."); # } } } # Main test_loop('parentlinks', $expected{'parentlinks'}); done_testing(); ikiwiki-3.20160121/t/pagetitle.t0000755000000000000000000000063212650125230013045 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 7; BEGIN { use_ok("IkiWiki"); } is(pagetitle("foo_bar"), "foo bar"); is(pagetitle("foo_bar_baz"), "foo bar baz"); is(pagetitle("foo_bar__33__baz"), "foo bar!baz"); is(pagetitle("foo_bar__1234__baz"), "foo barӒbaz"); is(pagetitle("foo_bar___33___baz"), "foo bar ! baz"); is(pagetitle("foo_bar___95___baz"), "foo bar _ baz"); ikiwiki-3.20160121/t/pagespec_match_result.t0000755000000000000000000000446012650125230015433 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 138; BEGIN { use_ok("IkiWiki"); } # Note that new objects have to be constructed freshly for each test, since # object states are mutated as they are combined. sub S { IkiWiki::SuccessReason->new("match", @_) } sub F { IkiWiki::FailReason->new("no match", @_) } sub E { IkiWiki::ErrorReason->new("error in matching", @_) } ok(S() eq "match"); ok(F() eq "no match"); ok(E() eq "error in matching"); ok(S()); ok(! F()); ok(! E()); ok(!(! S())); ok(!(!(! F))); ok(!(!(! E))); ok(S() | F()); ok(F() | S()); ok(!(F() | E())); ok(!(!S() | F() | E())); ok(S() & S() & S()); ok(!(S() & E())); ok(!(S() & F())); ok(!(S() & F() & E())); ok(S() & (F() | F() | S())); # influence merging tests foreach my $test ( ['$s | $f' => 1], # OR merges ['! $s | ! $f' => 1], # OR merges with negated terms too ['!(!(!$s)) | $f' => 1],# OR merges with multiple negation too ['$s | $f | E()' => 1], # OR merges, even though E() has no influences ['$s | E() | $f' => 1], # ditto ['E() | $s | $f' => 1], # ditto ['!$s | !$f | E()' => 1],# negated terms also do not block merges ['!$s | E() | $f' => 1],# ditto ['E() | $s | !$f' => 1],# ditto ['$s & $f' => 1], # AND merges if both items have influences ['!$s & $f' => 1], # AND merges negated terms too ['$s & !$f' => 1], # AND merges negated terms too ['$s & $f & E()' => 0], # AND fails to merge since E() has no influences ['$s & E() & $f' => 0], # ditto ['E() & $s & $f' => 0], # ditto ) { my $op=$test->[0]; my $influence=$test->[1]; my $s=S(foo => 1, bar => 1); is($s->influences->{foo}, 1); is($s->influences->{bar}, 1); my $f=F(bar => 2, baz => 1); is($f->influences->{bar}, 2); is($f->influences->{baz}, 1); my $c = eval $op; ok(ref $c); if ($influence) { is($c->influences->{foo}, 1, "foo ($op)"); is($c->influences->{bar}, (1 | 2), "bar ($op)"); is($c->influences->{baz}, 1, "baz ($op)"); } else { ok(! %{$c->influences}, "no influence for ($op)"); } } my $s=S(foo => 0, bar => 1); $s->influences(baz => 1); ok(! $s->influences->{foo}, "removed 0 influence"); ok(! $s->influences->{bar}, "removed 1 influence"); ok($s->influences->{baz}, "set influence"); ok($s->influences_static); $s=S(foo => 0, bar => 1); $s->influences(baz => 1, "" => 1); ok(! $s->influences_static); ikiwiki-3.20160121/t/pagespec_match_list.t0000755000000000000000000001673112650125230015074 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 126; BEGIN { use_ok("IkiWiki"); } %config=IkiWiki::defaultconfig(); $config{srcdir}=$config{destdir}="/dev/null"; IkiWiki::checkconfig(); { package IkiWiki::SortSpec; sub cmp_raw_path { $a cmp $b } } %pagesources=( foo => "foo.mdwn", foo2 => "foo2.mdwn", foo3 => "foo3.mdwn", bar => "bar.mdwn", "post/1" => "post/1.mdwn", "post/2" => "post/2.mdwn", "post/3" => "post/3.mdwn", ); $IkiWiki::pagectime{foo} = 2; $IkiWiki::pagectime{foo2} = 2; $IkiWiki::pagectime{foo3} = 1; $IkiWiki::pagectime{foo4} = 1; $IkiWiki::pagectime{foo5} = 1; $IkiWiki::pagectime{bar} = 3; $IkiWiki::pagectime{"post/1"} = 6; $IkiWiki::pagectime{"post/2"} = 6; $IkiWiki::pagectime{"post/3"} = 6; $links{foo}=[qw{post/1 post/2}]; $links{foo2}=[qw{bar}]; $links{foo3}=[qw{bar}]; is_deeply([pagespec_match_list("foo", "bar")], ["bar"]); is_deeply([sort(pagespec_match_list("foo", "* and !post/*"))], ["bar", "foo", "foo2", "foo3"]); is_deeply([sort(pagespec_match_list("foo", "post/*"))], ["post/1", "post/2", "post/3"]); is_deeply([pagespec_match_list("foo", "post/*", sort => "title")], ["post/1", "post/2", "post/3"]); is_deeply([pagespec_match_list("foo", "post/*", sort => "title", reverse => 1)], ["post/3", "post/2", "post/1"]); is_deeply([pagespec_match_list("foo", "post/*", sort => "title", num => 2)], ["post/1", "post/2"]); is_deeply([pagespec_match_list("foo", "post/*", sort => "title", num => 50)], ["post/1", "post/2", "post/3"]); is_deeply([pagespec_match_list("foo", "post/*", sort => "title", num => 50, reverse => 1)], ["post/3", "post/2", "post/1"]); is_deeply([pagespec_match_list("foo", "post/*", sort => "title", filter => sub { $_[0] =~ /3/}) ], ["post/1", "post/2"]); is_deeply([pagespec_match_list("foo", "*", sort => "raw_path", num => 2)], ["bar", "foo"]); is_deeply([pagespec_match_list("foo", "foo* or bar*", sort => "-age title")], # oldest first, break ties by title ["foo3", "foo", "foo2", "bar"]); my $r=eval { pagespec_match_list("foo", "beep") }; ok(eval { pagespec_match_list("foo", "beep") } == 0); ok(! $@, "does not fail with error when unable to match anything"); eval { pagespec_match_list("foo", "this is not a legal pagespec!") }; ok($@, "fails with error when pagespec bad"); # A pagespec that requires page metadata should add influences # as an explicit dependency. In the case of a link, a links dependency. foreach my $spec ("* and link(bar)", "* or link(bar)") { pagespec_match_list("foo2", $spec, deptype => deptype("presence")); ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE); ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS))); ok($IkiWiki::depends_simple{foo2}{foo2} == $IkiWiki::DEPEND_LINKS); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); pagespec_match_list("foo3", $spec, deptype => deptype("links")); ok($IkiWiki::depends{foo3}{$spec} & $IkiWiki::DEPEND_LINKS); ok(! ($IkiWiki::depends{foo3}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_PRESENCE))); ok($IkiWiki::depends_simple{foo3}{foo3} == $IkiWiki::DEPEND_LINKS); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); } # A link pagespec is influenced by the pages that currently contain the link. # It is not influced by pages that do not currently contain the link, # because if those pages were changed to contain it, regular dependency # handling would be triggered. foreach my $spec ("* and link(bar)", "link(bar)", "no_such_page or link(bar)") { pagespec_match_list("foo2", $spec); ok($IkiWiki::depends_simple{foo2}{foo2} == $IkiWiki::DEPEND_LINKS); ok(! exists $IkiWiki::depends_simple{foo2}{foo}, $spec); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); } # Oppositely, a pagespec that tests for pages that do not have a link # is not influenced by pages that currently contain the link, but # is instead influenced by pages that currently do not (but that # could be changed to have it). foreach my $spec ("* and !link(bar)", "* and !(!(!link(bar)))") { pagespec_match_list("foo2", $spec); ok(! exists $IkiWiki::depends_simple{foo2}{foo2}); ok($IkiWiki::depends_simple{foo2}{foo} == $IkiWiki::DEPEND_LINKS, $spec); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); } # a pagespec with backlinks() will add as an influence the page with the links foreach my $spec ("bar or (backlink(foo) and !*.png)", "backlink(foo)", "!backlink(foo)") { pagespec_match_list("foo2", $spec, deptype => deptype("presence")); ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE); ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS))); ok($IkiWiki::depends_simple{foo2}{foo} == $IkiWiki::DEPEND_LINKS); ok(! exists $IkiWiki::depends_simple{foo2}{foo2}); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); pagespec_match_list("foo2", $spec, deptype => deptype("links")); ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_LINKS); ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_PRESENCE | $IkiWiki::DEPEND_CONTENT))); ok($IkiWiki::depends_simple{foo2}{foo} == $IkiWiki::DEPEND_LINKS); ok(! exists $IkiWiki::depends_simple{foo2}{foo2}); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); pagespec_match_list("foo2", $spec, deptype => deptype("presence", "links")); ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE); ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_LINKS); ok(! ($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_CONTENT)); ok($IkiWiki::depends_simple{foo2}{foo} == $IkiWiki::DEPEND_LINKS); ok(! exists $IkiWiki::depends_simple{foo2}{foo2}); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); pagespec_match_list("foo2", $spec); ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_CONTENT); ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_PRESENCE | $IkiWiki::DEPEND_LINKS))); ok($IkiWiki::depends_simple{foo2}{foo} == $IkiWiki::DEPEND_LINKS); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); } # Hard fails due to a glob, etc, will block influences of other anded terms. foreach my $spec ("nosuchpage and link(bar)", "link(bar) and nosuchpage", "link(bar) and */Discussion", "*/Discussion and link(bar)", "!foo2 and link(bar)", "link(bar) and !foo2") { pagespec_match_list("foo2", $spec, deptype => deptype("presence")); ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE); ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS))); ok(! exists $IkiWiki::depends_simple{foo2}{foo2}, "no influence from $spec"); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); } # A hard fail will not block influences of other ored terms. foreach my $spec ("nosuchpage or link(bar)", "link(bar) or nosuchpage", "link(bar) or */Discussion", "*/Discussion or link(bar)", "!foo2 or link(bar)", "link(bar) or !foo2", "link(bar) or (!foo2 and !foo1)") { pagespec_match_list("foo2", $spec, deptype => deptype("presence")); ok($IkiWiki::depends{foo2}{$spec} & $IkiWiki::DEPEND_PRESENCE); ok(! ($IkiWiki::depends{foo2}{$spec} & ($IkiWiki::DEPEND_CONTENT | $IkiWiki::DEPEND_LINKS))); ok($IkiWiki::depends_simple{foo2}{foo2} == $IkiWiki::DEPEND_LINKS); %IkiWiki::depends_simple=(); %IkiWiki::depends=(); } my @ps; foreach my $p (100..500) { $IkiWiki::pagectime{"p/$p"} = $p; $pagesources{"p/$p"} = "p/$p.mdwn"; unshift @ps, "p/$p"; } is_deeply([pagespec_match_list("foo", "p/*", sort => "age")], [@ps]); is_deeply([pagespec_match_list("foo", "p/*", sort => "age", num => 20)], [@ps[0..19]]); ikiwiki-3.20160121/t/pagespec_match.t0000755000000000000000000001573112650125230014040 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 87; BEGIN { use_ok("IkiWiki"); } ok(pagespec_match("foo", "*")); ok(!pagespec_match("foo", "")); ok(pagespec_match("foo", "!bar")); ok(pagespec_match("page", "?ag?")); ok(! pagespec_match("page", "?a?g?")); ok(pagespec_match("foo.png", "*.*")); ok(! pagespec_match("foo", "*.*")); ok(pagespec_match("foo", "foo or bar"), "simple list"); ok(pagespec_match("bar", "foo or bar"), "simple list 2"); ok(pagespec_match("foo", "f?? and !foz")); ok(! pagespec_match("foo", "f?? and !foo")); ok(! pagespec_match("foo", "* and !foo")); ok(! pagespec_match("foo", "foo and !foo")); ok(! pagespec_match("foo.png", "* and !*.*")); ok(pagespec_match("foo", "(bar or ((meep and foo) or (baz or foo) or beep))")); ok(pagespec_match("foo", "( bar or ( (meep and foo) or (baz or foo) or beep ) )"), "multiline complex pagespec"); ok(! pagespec_match("a/foo", "foo", location => "a/b"), "nonrelative fail"); ok(! pagespec_match("foo", "./*", location => "a/b"), "relative fail"); ok(pagespec_match("a/foo", "./*", location => "a/b"), "relative"); ok(pagespec_match("a/b/foo", "./*", location => "a/b"), "relative 2"); ok(pagespec_match("a/foo", "./*", "a/b"), "relative oldstyle call"); ok(pagespec_match("foo", "./*", location => "a"), "relative toplevel"); ok(pagespec_match("foo/bar", "*", location => "baz"), "absolute"); ok(! pagespec_match("foo", "foo and bar"), "foo and bar"); ok(pagespec_match("{f}oo", "{*}*"), "curly match"); ok(! pagespec_match("foo", "{*}*"), "curly !match"); ok(pagespec_match("somepage", "user(frodo)", user => "frodo")); ok(pagespec_match("somepage", "user(frodo)", user => "Frodo")); ok(! pagespec_match("somepage", "user(frodo)", user => "Sam")); ok(pagespec_match("somepage", "user(*o)", user => "Bilbo")); ok(pagespec_match("somepage", "user(*o)", user => "frodo")); ok(! pagespec_match("somepage", "user(*o)", user => "Sam")); ok(pagespec_match("somepage", "user(http://*.myopenid.com/)", user => "http://foo.myopenid.com/")); ok(pagespec_match("somepage", "user(*://*)", user => "http://foo.myopenid.com/")); # The link and backlink stuff needs this. $config{userdir}=""; $links{foo}=[qw{bar baz}]; $links{bar}=[]; $links{baz}=[]; $links{meh}=[]; $links{"bugs/foo"}=[qw{bugs/done}]; $links{"bugs/done"}=[]; $links{"bugs/bar"}=[qw{done}]; $links{"done"}=[]; $links{"done"}=[]; $links{"examples/softwaresite/bugs/fails_to_frobnicate"}=[qw{done}]; $links{"examples/softwaresite/bugs/done"}=[]; $links{"ook"}=[qw{/blog/tags/foo}]; foreach my $p (keys %links) { $pagesources{$p}="$p.mdwn"; } $pagesources{"foo.png"}="foo.png"; $pagesources{"foo"}="foo.mdwn"; $IkiWiki::hooks{htmlize}{mdwn}={}; ok(pagespec_match("foo", "foo"), "simple"); ok(! pagespec_match("foo", "bar"), "simple fail"); ok(pagespec_match("foo", "foo"), "simple glob"); ok(pagespec_match("foo", "f*"), "simple glob fail"); ok(pagespec_match("foo", "page(foo)"), "page()"); print pagespec_match("foo", "page(foo)")."\n"; ok(! pagespec_match("foo", "page(bar)"), "page() fail"); ok(! pagespec_match("foo.png", "page(foo.png)"), "page() fails on non-page"); ok(! pagespec_match("foo.png", "page(foo*)"), "page() fails on non-page glob"); ok(pagespec_match("foo", "page(foo)"), "page() glob"); ok(pagespec_match("foo", "page(f*)"), "page() glob fail"); ok(pagespec_match("foo", "link(bar)"), "link"); ok(pagespec_match("foo", "link(.)", location => "bar"), "link with ."); ok(! pagespec_match("foo", "link(.)"), "link with . but missing location"); ok(pagespec_match("foo", "link(ba?)"), "glob link"); ok(! pagespec_match("foo", "link(quux)"), "failed link"); ok(! pagespec_match("foo", "link(qu*)"), "failed glob link"); ok(pagespec_match("bugs/foo", "link(done)", location => "bugs/done"), "link match to bestlink"); ok(! pagespec_match("examples/softwaresite/bugs/done", "link(done)", location => "bugs/done"), "link match to bestlink"); ok(pagespec_match("examples/softwaresite/bugs/fails_to_frobnicate", "link(./done)", location => "examples/softwaresite/bugs/done"), "link relative"); ok(! pagespec_match("foo", "link(./bar)", location => "foo/bar"), "link relative fail"); ok(pagespec_match("bar", "backlink(foo)"), "backlink"); ok(! pagespec_match("quux", "backlink(foo)"), "failed backlink"); ok(! pagespec_match("bar", ""), "empty pagespec should match nothing"); ok(! pagespec_match("bar", " "), "blank pagespec should match nothing"); ok(pagespec_match("ook", "link(blog/tags/foo)"), "link internal absolute success"); ok(pagespec_match("ook", "link(/blog/tags/foo)"), "link explicit absolute success"); ok(pagespec_match("meh", "!link(done)"), "negated failing match is a success"); $ENV{TZ}="GMT"; $IkiWiki::pagectime{foo}=1154532692; # Wed Aug 2 11:26 EDT 2006 $IkiWiki::pagectime{bar}=1154532695; # after ok(pagespec_match("foo", "created_before(bar)")); ok(! pagespec_match("foo", "created_after(bar)")); ok(! pagespec_match("bar", "created_before(foo)")); ok(pagespec_match("bar", "created_after(foo)")); ok(pagespec_match("foo", "creation_year(2006)"), "year"); ok(! pagespec_match("foo", "creation_year(2005)"), "other year"); ok(pagespec_match("foo", "creation_month(8)"), "month"); ok(! pagespec_match("foo", "creation_month(9)"), "other month"); ok(pagespec_match("foo", "creation_day(2)"), "day"); ok(! pagespec_match("foo", "creation_day(3)"), "other day"); ok(! pagespec_match("foo", "no_such_function(foo)"), "foo"); my $ret=pagespec_match("foo", "(invalid"); ok(! $ret, "syntax error"); ok($ret =~ /syntax error/, "error message"); $ret=pagespec_match("foo", "bar or foo"); ok($ret, "simple match"); is($ret, "foo matches foo", "stringified return"); my $i=pagespec_match("foo", "link(bar)")->influences; is(join(",", keys %$i), 'foo', "link is influenced by the page with the link"); $i=pagespec_match("bar", "backlink(foo)")->influences; is(join(",", keys %$i), 'foo', "backlink is influenced by the page with the link"); $i=pagespec_match("bar", "backlink(foo)")->influences; is(join(",", keys %$i), 'foo', "backlink is influenced by the page with the link"); $i=pagespec_match("bar", "created_before(foo)")->influences; is(join(",", keys %$i), 'foo', "created_before is influenced by the comparison page"); $i=pagespec_match("bar", "created_after(foo)")->influences; is(join(",", keys %$i), 'foo', "created_after is influenced by the comparison page"); $i=pagespec_match("foo", "link(baz) and created_after(bar)")->influences; is(join(",", sort keys %$i), 'bar,foo', "influences add up over AND"); $i=pagespec_match("foo", "link(baz) and created_after(bar)")->influences; is(join(",", sort keys %$i), 'bar,foo', "influences add up over OR"); $i=pagespec_match("foo", "!link(baz) and !created_after(bar)")->influences; is(join(",", sort keys %$i), 'bar,foo', "influences unaffected by negation"); $i=pagespec_match("foo", "!link(baz) and !created_after(bar)")->influences; is(join(",", sort keys %$i), 'bar,foo', "influences unaffected by negation"); $i=pagespec_match("meh", "!link(done)")->influences; is(join(",", sort keys %$i), 'meh', "a negated, failing link test is successful, so the page is a link influence"); ikiwiki-3.20160121/t/pagename.t0000755000000000000000000000214612650125230012646 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More tests => 19; BEGIN { use_ok("IkiWiki"); } # define mdwn as an extension $IkiWiki::hooks{htmlize}{mdwn}={}; is(pagetype("foo.mdwn"), "mdwn"); is(pagename("foo.mdwn"), "foo"); is(pagetype("foo/bar.mdwn"), "mdwn"); is(pagename("foo/bar.mdwn"), "foo/bar"); # bare files get the full filename as page name, undef type is(pagetype("foo.png"), undef); is(pagename("foo.png"), "foo.png"); is(pagetype("foo/bar.png"), undef); is(pagename("foo/bar.png"), "foo/bar.png"); is(pagetype("foo"), undef); is(pagename("foo"), "foo"); # keepextension preserves the extension in the page name $IkiWiki::hooks{htmlize}{txt}={keepextension => 1}; is(pagename("foo.txt"), "foo.txt"); is(pagetype("foo.txt"), "txt"); is(pagename("foo/bar.txt"), "foo/bar.txt"); is(pagetype("foo/bar.txt"), "txt"); # noextension makes extensionless files be treated as first-class pages $IkiWiki::hooks{htmlize}{Makefile}={noextension =>1}; is(pagetype("Makefile"), "Makefile"); is(pagename("Makefile"), "Makefile"); is(pagetype("foo/Makefile"), "Makefile"); is(pagename("foo/Makefile"), "foo/Makefile"); ikiwiki-3.20160121/t/openiduser.t0000755000000000000000000000325712650125230013252 0ustar #!/usr/bin/perl use warnings; use strict; BEGIN { eval q{ use Net::OpenID::VerifiedIdentity; }; if ($@) { eval q{use Test::More skip_all => "Net::OpenID::VerifiedIdentity not available"}; } else { eval q{use Test::More tests => 11}; } use_ok("IkiWiki"); } # Some typical examples: # This test, when run by Test::Harness using perl -w, exposes a warning in # Net::OpenID::VerifiedIdentity. Normally that warning is not displayed, as # that module does not use warnings. To avoid cluttering the test output, # disable the -w switch temporarily. $^W=0; is(IkiWiki::openiduser('http://josephturian.blogspot.com'), 'josephturian [blogspot.com]'); $^W=1; is(IkiWiki::openiduser('http://yam655.livejournal.com/'), 'yam655 [livejournal.com]'); is(IkiWiki::openiduser('http://id.mayfirst.org/jamie/'), 'jamie [id.mayfirst.org]'); # yahoo has an anchor in the url is(IkiWiki::openiduser('https://me.yahoo.com/joeyhess#35f22'), 'joeyhess [me.yahoo.com]'); # google urls are horrendous, but the worst bit is after a ?, so can be dropped is(IkiWiki::openiduser('https://www.google.com/accounts/o8/id?id=AItOawm-ebiIfxbKD3KNa-Cu9LvvD9edMLW7BAo'), 'id [www.google.com/accounts/o8]'); # and some less typical ones taken from the ikiwiki commit history is(IkiWiki::openiduser('http://thm.id.fedoraproject.org/'), 'thm [id.fedoraproject.org]'); is(IkiWiki::openiduser('http://dtrt.org/'), 'dtrt.org'); is(IkiWiki::openiduser('http://alcopop.org/me/openid/'), 'openid [alcopop.org/me]'); is(IkiWiki::openiduser('http://id.launchpad.net/882/bielawski1'), 'bielawski1 [id.launchpad.net/882]'); is(IkiWiki::openiduser('http://technorati.com/people/technorati/drajt'), 'drajt [technorati.com/people/technorati]'); ikiwiki-3.20160121/t/meta.t0000755000000000000000000000750212650125230012020 0ustar #!/usr/bin/perl use warnings; use strict; use Test::More; use IkiWiki; my $tmp = 't/tmp'; my $srcdir = "$tmp/in"; my $destdir = "$tmp/out"; my $installed = $ENV{INSTALLED_TESTS}; my @command; if ($installed) { @command = qw(ikiwiki); } else { ok(! system("make -s ikiwiki.out")); @command = qw(perl -I. ./ikiwiki.out --underlaydir=underlays/basewiki --set underlaydirbase=underlays --templatedir=templates); } push @command, qw(--plugin meta --disable-plugin htmlscrubber); push @command, $srcdir, $destdir; sub write_build_read_compare { my ($pagename, $input, $expected_output) = @_; ok(! system("mkdir -p $srcdir"), q{setup}); writefile("$pagename.mdwn", $srcdir, $input); ok(! system(@command), q{build}); like(readfile("$destdir/$pagename/index.html"), $expected_output); ok(! system("rm -rf $tmp"), q{teardown}); } write_build_read_compare( 'title', q{[[!meta title="a page about bar"]]}, qr{a page about bar}, ); write_build_read_compare( 'description', q{[[!meta description="a page about bar"]]}, qr{}, ); write_build_read_compare( 'guid', q{[[!meta guid="12345"]]}, qr{}, ); write_build_read_compare( 'license', q{[[!meta license="you get to keep both pieces"]]}, qr{
    }, ); write_build_read_compare( 'copyright', q{[[!meta copyright="12345"]]}, qr{ [[!tag patch]] > Unfortunately, the inlinepage content passes through markdown, and markdown > gets confused by these nested div's and puts p's around one of them, generating > broken html. If you can come up with a way to put in the div that passes > the test suite, or a fix to markdown, I will accept it, but the above patch > fails the test suite. --[[Joey]] >> Just a note... This discrepancy doesn't exist in [pandoc](http://code.google.com/p/pandoc/) as >> demonstrated in the relevant [page](http://code.google.com/p/pandoc/wiki/PandocVsMarkdownPl). >> Pandoc is a _real parser_ for markdown (contrasting the regexp based implementation of >> markdown.pl). I've almost finished the Debian packaging. John is working on a `--strict` mode >> which will hopefully make pandoc a drop-in replacement for markdown. I'll upload pandoc after >> his work has finished. Whether it could be used in IkiWiki is an open question, but having >> alternatives is always a good thing and perhaps, the fact that pandoc can make markdown->LaTeX >> conversion may lead to new possibilities. --[[Roktas]] >>> I confirm that this ([[!debbug 405058]]) has just been fixed in markdown >>> [`1.0.2b7`](http://packages.debian.org/experimental/web/markdown) (BTW, thanks to your bug >>> report Joey). FYI, I've observed some performance drop with `1.0.2b7` compared to `1.0.1`, >>> especially noticable with big files. This was also confirmed by someone else, for example, >>> see this [thread](http://six.pairlist.net/pipermail/markdown-discuss/2006-August/000152.html) >>> --[[Roktas]] >>>> 1.0.2b7 is slower, but ok, and parses much better. I'm waiting for it >>>> to at least get into debian testing before I make ikiwiki depend on it >>>> though. --[[Joey]] >> This Markdown issue seems to have been worked around by the optimization >> in which \[[!inline]] is replaced with a placeholder, and the >> placeholder is later replaced by the HTML. Meanwhile, this patch >> has been obsoleted by applying a similar one (wrapping things in a div >> with class inlinefooter). That was the last remaining unapplied patch >> on this page, so I think this whole page can be considered [[done]]. >> --[[smcv]] ---- I'd like a class attribute on the `` tag surrounding wikilinks that refer to non-existent pages, in Ikiwiki.pm:htmllink, so that such broken links can be styled more dramatically with CSS. --Jamey > added --[[Joey]] ikiwiki-3.20160121/doc/todo/monochrome_theme.mdwn0000644000000000000000000000617412650125230016374 0ustar [[!template id=gitbranch branch=jmtd/monochrome_theme author="[[Jon]]" ]][As requested](http://jmtd.net/log/goodreads/), please find a new theme named 'monochrome' in listed git repo/branch. [Here's the screenshot of what it looks like](https://github.com/jmtd/ikiwiki/blob/30af2437cd41d394930864e93b3c2319d1ec2b06/doc/themes/monochrome.png). — [[Jon]] Perhaps controversially, I think that this would be a good basis for a default theme for the ikiwiki website. (I suspect more work is needed, I have not tested the theme against every plugin which provides theme-able bits and pieces, nor with e.g. HTML5 mode turned on, etc. etc.) Whilst the anti-theme is the best default for an ikiwiki instance (although an argument could be made against that, too!), the site needs to try to advertise some of the potential of ikiwiki to visitors, and serve as an example of what can be done. I'd appreciate thoughts of frequent ikiwiki contributors on this proposal ☺ — [[Jon]] > I appreciate you putting that branch together. I was ready to merge it, > but `themes/monochrome/style.css` seems to contain a lot of redundant > things that are in ikiwiki's normal style.css. This is especially > redundant since ikiwiki's style.css gets prepended to the theme's stylesheet > at build time! Can you remove those redundant bits please? (PITA I know, > but it will make maintaining this much easier.) --[[Joey]] >> Sure I'll sort that out. Sorry, I didn't realise the prepending was an automatic process. I did it manually. It should be quick for me to fix. — [[Jon]] >>> Fixed. I rebased the branch; hopefully that won't cause your script issues. — [[Jon]] >>>> I've merged your branch. >>>> >>>> Looking more closely at the css, I do have a few questions: >>>> >>>> * Is the google-provided font really necessary? I consider that a sort >>>> of web bug, I would prefer users of ikiwiki not need to worry that >>>> their referer information is being sent to some third party. >>>> I'd also prefer for ikiwiki sites to always be functional when >>>> viewed offline. >>>> * The external link markup needs the local url to be put into >>>> local.css to work right, correct? I wonder if this is too much of a >>>> complication to ask of users. It seems to be it could either be left >>>> out of the theme, or perhaps ikiwiki could be made to expand >>>> something in the css to the site's url at build time. >>>> >>>> --[[Joey]] >>>>>Thanks for merging! >>>>> >>>>> * the font is not necessary. I will check, it might be license-compatible >>>>> and thus could be bundled. As things stand, if people have no 'net connection >>>>> or the font fails to load, the theme still "works". Good point RE the referral >>>>> situation. >>>>> >>>>> * The external link markup works without customizing the CSS, but if something >>>>> generates a non-relative link within the content area of a page, it will be >>>>> styled as an external link. By default, nothing does this in ikiwiki afaik, >>>>> so the impact is pretty small. (except perhaps if someone specifies an absolute >>>>> `cgiurl` path?) The additional customization is belt-and-braces. >>>>> — [[Jon]] ikiwiki-3.20160121/doc/todo/modify_page_filename_in_plugin.mdwn0000644000000000000000000000207112650125230021223 0ustar I'm writing a plugin to wikify c/c++ code. By default ikiwiki generates xxx.html for a file called xxx.c. The problem is that I occasionally have xxx.c and xxx.h in the same directory and there's a filename collision. My solution is to allow plugins to provide a hook that sets the pagename. --[[/users/bstpierre]] > You might also find the solution to [[bugs/multiple_pages_with_same_name]] helps you. That patch is already applied. -- [[Will]] --- /usr/share/perl5/IkiWiki.pm.ORIG 2008-10-03 14:12:50.000000000 -0400 +++ /usr/share/perl5/IkiWiki.pm 2008-10-07 11:57:26.000000000 -0400 @@ -196,11 +196,32 @@ sub pagename ($) { my $file=shift; my $type=pagetype($file); + + if(defined $type && + exists $hooks{pagename} && + exists $hooks{pagename}{$type}) { + + return $hooks{pagename}{$type}{call}($file); + + } else { + my $page=$file; $page=~s/\Q.$type\E*$// if defined $type; return $page; + } } sub htmlpage ($) { ikiwiki-3.20160121/doc/todo/missingparents.pm.mdwn0000644000000000000000000001777512650125230016536 0ustar This is another blogging support thing, and it relies on [[pagespec_relative_to_a_target]] (but only to figure out whether a given page has a child). Basically, you give it a page called missingparents.mdwn, something like this:
    [[!missingparents pages="posts/* and !posts/*/*" generate="""[[!template id=year text="$page"]]"""]]
    [[!missingparents pages="posts/*/* and !posts/*/*/*" generate="""[[!template id=month text="$page"]]"""]]
    [[!missingparents pages="posts/*/*/* and !posts/*/*/*/*" generate="""[[!template id=day text="$page"]]"""]]
    
    And it scans the whole wiki for pages that match the pagespecs but are missing parents. If any are found, they are generated automatically using the text in the "generate" parameter (except $page is substituted for the page title). *These generated pages aren't kept in version control*, but of course they're ordinary wiki pages and can be edited by the web form or otherwise added, at which point the missingparents plugin lets go of them. (TODO: CGI.pm needs to know to rcs_add these pages if they are edited, and it doesn't.) If all of the children of a missingparent page goes away, the missingparent itself is unlinked automatically, and all missingparents are deleted on wiki rebuild. To implement this, I needed to tell ikiwiki that pages were being added and removed in a non-standard way, and so created functions newpage and delpage in the IkiWiki namespace to do these things. delpage is modeled on the Render.pm code that deletes pages, so I re-used it in Render.pm. I also needed a way to add files to be deleted on a refresh(), so I added a needsdelete hook, parallel in form to needsbuild. This patch, or one like it, would enable better blogging support, by adding the ability to hierarchically organize blog posts and automatically generate structural pages for year, month, or day. Please apply. --Ethan > This looks a lot like [[plugins/autoindex]], except limited to a subset > of pages, and with different templates according to the page it's used > on. Perhaps it could become several enhancements for autoindex? --[[smcv]] ----
    Index: IkiWiki/Render.pm
    ===================================================================
    --- IkiWiki/Render.pm	(revision 3926)
    +++ IkiWiki/Render.pm	(working copy)
    @@ -322,17 +322,7 @@
     		if (! $exists{$page}) {
     			debug(sprintf(gettext("removing old page %s"), $page));
     			push @del, $pagesources{$page};
    -			$links{$page}=[];
    -			$renderedfiles{$page}=[];
    -			$pagemtime{$page}=0;
    -			prune($config{destdir}."/".$_)
    -				foreach @{$oldrenderedfiles{$page}};
    -			delete $pagesources{$page};
    -			foreach (keys %destsources) {
    -				if ($destsources{$_} eq $page) {
    -					delete $destsources{$_};
    -				}
    -			}
    +			delpage($page);
     		}
     	}
     
    @@ -377,6 +367,10 @@
     		}
     	}
     
    +	if (@del) {
    +		run_hooks(needsdelete => sub { shift->(\@del) });
    +	}
    +
     	if (%rendered || @del) {
     		# rebuild dependant pages
     		foreach my $f (@files) {
    Index: IkiWiki/Plugin/missingparents.pm
    ===================================================================
    --- IkiWiki/Plugin/missingparents.pm	(revision 0)
    +++ IkiWiki/Plugin/missingparents.pm	(revision 0)
    @@ -0,0 +1,142 @@
    +#!/usr/bin/perl
    +# missingparents plugin: detect missing parents of pages and create them
    +package IkiWiki::Plugin::missingparents;
    +
    +use warnings;
    +use strict;
    +use IkiWiki 2.00;
    +use IkiWiki::Plugin::relative;
    +
    +my %ownfiles;
    +my @pagespecs;
    +
    +sub import {
    +	hook(type => "checkconfig", id => "missingparents", call => \&checkconfig);
    +	hook(type => "needsdelete", id => "missingparents", call => \&needsdelete);
    +	hook(type => "needsbuild", id => "missingparents", call => \&needsbuild);
    +	hook(type => "savestate", id => "missingparents", call => \&savestate);
    +	hook(type => "preprocess", id => "missingparents", call => \&preprocess_missingparents);
    +}
    +
    +sub checkconfig () {
    +	IkiWiki::preprocess("missingparents", "missingparents",
    +		readfile(srcfile("missingparents.mdwn")));
    +	loadstate();
    +	if ($config{rebuild}){
    +		foreach my $file (keys %ownfiles) {
    +			unlink $config{srcdir}.'/'.$file;
    +		}
    +	}
    +}
    +
    +sub preprocess_missingparents (@) {
    +	my %params=@_;
    +
    +	if (! defined $params{pages} || ! defined $params{generate}) {
    +		return "[[!missingparents ".gettext("missing pages or generate parameter")."]]";
    +	}
    +
    +	push @pagespecs, \%params;
    +
    +	#translators: This is used to display what missingparents are defined.
    +	#translators: First parameter is a pagespec, the second
    +	#translators: is text for pages that match that pagespec.
    +	return sprintf(gettext("missingparents in %s will be %s"), 
    +		       '`'.$params{pages}.'`', '`\\'.$params{generate}.'`');
    +}
    +
    +my $state_loaded=0;
    +sub loadstate() {
    +	my $filename = "$config{wikistatedir}/missingparents";
    +	if (-e $filename) {
    +		open (IN, $filename) ||
    +		      die "$filename: $!";
    +		while () {
    +			chomp;
    +			$ownfiles{$_} = 1;
    +		}
    +
    +		close IN;
    +
    +		$state_loaded=1;
    +	}
    +}
    +
    +sub savestate() {
    +	my $filename = "$config{wikistatedir}/missingparents.new";
    +	my $cleanup = sub { unlink ($filename) };
    +	open (OUT, ">$filename") || error("open $filename: $!", $cleanup);
    +	foreach my $data (keys %ownfiles) {
    +		print OUT "$data\n" if $ownfiles{$data};
    +	}
    +	rename($filename, "$config{wikistatedir}/missingparents") ||
    +		error("rename $filename: $!", $cleanup);
    +}
    +
    +sub needsdelete (@) {
    +	my $files=shift;
    +	
    +	my @mydel;
    +	my $pruned = 1;
    +	do {
    +		$pruned = 0;
    +		foreach my $file (keys %ownfiles) {
    +			my $page = pagename($file);
    +			if (! IkiWiki::PageSpec::match_has_child($page, "")) {
    +				# No children -- get rid of it
    +				push @mydel, $page;
    +				delete $ownfiles{$file};
    +				IkiWiki::delpage($page);
    +				unlink $config{srcdir}."/".$file;
    +				$pruned = 1;
    +			}
    +		}
    +	} while($pruned);
    +	foreach my $page (@mydel){
    +		push @{$files}, $page;
    +	}
    +}
    +
    +sub check_matches($) {
    +	my $page = shift;
    +	return if $IkiWiki::pagesources{$page};
    +
    +	foreach my $miss (@pagespecs) {
    +		next unless pagespec_match($page, $miss->{pages});
    +		my $text = $miss->{generate};
    +		$text =~ s/\$page/$page/;
    +		my $output = $page.".mdwn";
    +		writefile($output, "$config{srcdir}/", $text);
    +		IkiWiki::newpage($output, $page);
    +		return $output;
    +	}
    +	return "";
    +}
    +
    +sub needsbuild ($) {
    +	my $files=shift;
    +	my @new;
    +
    +	foreach my $file (@{$files}) {
    +		if ($ownfiles{$file}) {
    +			# someone edited our file, making it the
    +			# user's problem
    +			delete $ownfiles{$file};
    +			next;
    +		}
    +		my $page = pagename $file;
    +		my $newfile = "";
    +		foreach my $parent (split '/', $page) {
    +			$newfile .= $parent;
    +			my $output = check_matches($newfile);
    +			push @new, $output if $output;
    +			$newfile .= "/";
    +		}
    +	}
    +	foreach my $file (@new) {
    +		$ownfiles{$file} = 1;
    +		push @{$files}, $file;
    +	}
    +}
    +
    +1
    Index: IkiWiki.pm
    ===================================================================
    --- IkiWiki.pm	(revision 3926)
    +++ IkiWiki.pm	(working copy)
    @@ -16,7 +16,7 @@
     use Exporter q{import};
     our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
                      bestlink htmllink readfile writefile pagetype srcfile pagename
    -                 displaytime will_render gettext urlto targetpage
    +                 displaytime will_render gettext urlto targetpage newpage delpage
                      %config %links %renderedfiles %pagesources %destsources);
     our $VERSION = 2.00; # plugin interface version, next is ikiwiki version
     our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
    @@ -330,6 +336,30 @@
     		error("failed renaming $newfile to $destdir/$file: $!", $cleanup);
     }
     
    +sub newpage($$) {
    +	my $file=shift;
    +	my $page=shift;
    +
    +	$pagemtime{$page} = $pagectime{$page} = time;
    +	$pagesources{$page} = $file;
    +	$pagecase{lc $page} = $page;
    +}
    +
    +sub delpage($) {
    +	my $page=shift;
    +	$links{$page}=[];
    +	$renderedfiles{$page}=[];
    +	$pagemtime{$page}=0;
    +	prune($config{destdir}."/".$_)
    +	    foreach @{$oldrenderedfiles{$page}};
    +	delete $pagesources{$page};
    +	foreach (keys %destsources) {
    +		if ($destsources{$_} eq $page) {
    +			delete $destsources{$_};
    +			}
    +		}
    +}
    +
     my %cleared;
     sub will_render ($$;$) {
     	my $page=shift;
    
    [[!tag patch patch/core]] ikiwiki-3.20160121/doc/todo/mirrorlist_with_per-mirror_usedirs_settings.mdwn0000644000000000000000000001051012650125230024126 0ustar I've got a wiki that is built at two places: * a static copy, aimed at being viewed without any web server, using a web browser's `file:///` urls => usedirs is disabled to get nice and working links * an online copy, with usedirs enabled in order to benefit from the language negotiation using the po plugin I need to use mirrorlist on the static copy, so that one can easily reach the online, possibly updated, pages. But as documented, "pages are assumed to exist in the same location under the specified url on each mirror", so the generated urls are wrong. My `mirrorlist` branch contains a patch that allows one to configure usedirs per-mirror. Note: the old configuration format is still supported, so this should not break existing wikis. OT: as a bonus, this branch contains a patch to support {hashes,arrays} of {hashes,arrays} in `$config`, which I missed a bit when writing the po plugin, and decided this time it was really needed to implement this feature. --[[intrigeri]] > Ping. --[[intrigeri]] [[!tag patch]] >> (I'm not an ikiwiki committer, opinions may vary.) >> >>> In my opinion, you're an ikiwiki committer! --[[Joey]] >> >> This would be easier to review if there weren't a million merges from >> master; perhaps either leave a branch as-is, or rebase it, or merge >> only at "significant" times like after a release? >> >> I believe Joey's main objection to complex $config entries is that >> it's not at all clear what [[plugins/websetup]] would do with them. >> Would something like this make a reasonable alternative? >> >> $config{mirrorlist} = ["nousedirs|file:///home/intrigeri/wiki", >> "usedirs|http://example.com/wiki", "http://example.net"]; >> >> From how I understand tainting, this: >> >> $untainted{$_} = possibly_foolish_untaint($tainted->{$_}) >> >> probably needs to untaint the key too: >> >> my $key = possibly_foolish_untaint($_); >> $untainted{$key} = possibly_foolish_untaint($tainted->{key}); >> >> --[[smcv]] >>> You are fully right about the complex `$config` entries. I'll >>> convert this to use what you are suggesting, i.e. what we ended up >>> choosing for the `po_slave_languages` setting. >>> >>> About the merges in this branch: Joey told me once he did not care >>> about this; moreover the `--no-merges` git log option makes it >>> easy to filter these out. I'll try merging tagged releases only in >>> the future, though. >>> >>> --[[intrigeri]] >>>> FWIW, I don't care about merge commits etc because I review >>>> `git diff ...intrigeri/mirrorlist` -- and if I want to dig deeper >>>> into the why of some code, I'll probably checkout the branch and >>>> use git blame. >>>> >>>> I agree with what smcv said, my other concern though is that >>>> this is such an edge case, that supporting it just adds clutter. >>>> Have to wonder if it wouldn't perhaps be better to do something >>>> using the goto plugin and cgiurl, so that the mirror doesn't have >>>> to know about the configuration of the other mirror. --[[Joey]] >>>>> I have implemented something using the cgi + goto in my (history >>>>> rewrite warning) mirrorlist branch. Please review, please pull. >>>>> --[[intrigeri]] >>>>>> Ping? I've merged 3.20110321 in my `mirrorlist` branch and >>>>>> checked it still works properly. --[[intrigeri]] >>>>>>> Joey: ping? I've rebased my `mirrorlist` branch on top of >>>>>>> 3.20120419, and checked it still works properly. I really >>>>>>> would like to see this functionality merged in time >>>>>>> for Wheezy. --[[intrigeri]] >>>>> concerning goto/cgiurl, what about having that as the default in >>>>> mirrorlist, but keeping ``nousedirs|file:///home/intrigeri/wiki`` and >>>>> ``usedirs|http://example.com/wiki`` valid for cgi-less cases? >>>>> that would keep typical installation with a clutter-less configuration, >>>>> and support more individual setups too. >>>>> --[[chrysn]] >>>>>> I would not mind. On the other hand Joey was concerned about >>>>>> cluttering the code to support edge cases, which I fully >>>>>> understand. The case you (chrysn) are describing being even >>>>>> more specific than the one I was initially talking of, I think >>>>>> this should not block the merge of the branch I have been >>>>>> proposing. Support for the usecase you are suggesting can >>>>>> always be added later if needed. --[[intrigeri]] >>>>>>> Well, that came out nice and clean. [[done]] --[[Joey]] ikiwiki-3.20160121/doc/todo/minor_adjustment_to_setup_documentation_for_recentchanges_feeds.mdwn0000644000000000000000000000221012650125230030171 0ustar Expand a comment so you know which bit to uncomment if you want to turn on feeds for recentchanges. diff --git a/doc/ikiwiki.setup b/doc/ikiwiki.setup index 99c81cf..7ca7687 100644 --- a/doc/ikiwiki.setup +++ b/doc/ikiwiki.setup @@ -91,9 +91,9 @@ use IkiWiki::Setup::Standard { #}, ], - # Default to generating rss feeds for blogs? + # Default to generating rss feeds for blogs/recentchanges? #rss => 1, - # Default to generating atom feeds for blogs? + # Default to generating atom feeds for blogs/recentchanges? #atom => 1, # Allow generating feeds even if not generated by default? #allowrss => 1, [[!tag patch]] > Hmm, recentchanges is just a blog. Of course the word "blog" is perhaps > being used in too broad a sense here, since it tends to imply personal > opinions, commentary, not-a-journalist, sitting-in-ones-underwear-typing, > and lots of other fairly silly stuff. But I don't know of a better word > w/o all these connotations. I've reworded it to not use the term "blog".. > [[done]] --[[Joey]] ikiwiki-3.20160121/doc/todo/metadata.mdwn0000644000000000000000000000106612650125230014617 0ustar There should be a way to add metadata to a page. Probably a plugin could do this, for example: \[[!meta foo="bar"]] Uses for this include: * Setting a page title that's not tied to the filename. * Any metadata that's generally useful on html pages. * Maybe as an alternate way to tag a page, like linking to the tag, except it doesn't have to show up in the page text. * Recording page licenses. [[!meta link=done]] [[!meta title="supporting metadata..."]] [[!meta author="Joey Hess"]] [[!meta link="foo.css" rel="stylesheet" type="text/css"]] [[todo/done]] ikiwiki-3.20160121/doc/todo/meta_rcsid.mdwn0000644000000000000000000000351212650125230015147 0ustar The following patch adds an 'rcsid' parameter to the [[!taglink plugins/Meta]] plugin, to allow inclusion of CVS/SVN-style keywords (like '$Id$', etc.) from the source file in the page template. > So the idea is you'd write something like: > > \[[!meta rcsid="$Id$"]] > > And this would be put at the bottom of the page or somewhere like that by > the template? > > I wonder if it wouldn't be just as clear to say: > > $Id$ > > And then use a stylesheet to display it as desired. > --[[Joey]] >> That's possibly true; my reasoning was that I wanted it to be more independent >> of the page content, and independent of any stylesheet. --- meta.pm.orig 2007-10-10 19:57:04.000000000 +0100 +++ meta.pm 2007-10-10 20:07:37.000000000 +0100 @@ -13,6 +13,7 @@ my %authorurl; my %license; my %copyright; +my %rcsid; sub import { hook(type => "preprocess", id => "meta", call => \&preprocess, scan => 1); @@ -110,6 +111,9 @@ $meta{$page}.="\n"; $copyright{$page}=$value; } + elsif ($key eq 'rcsid') { + $rcsid{$page}=$value; + } else { $meta{$page}.=scrub("\n"); @@ -142,6 +146,8 @@ if exists $author{$page} && $template->query(name => "author"); $template->param(authorurl => $authorurl{$page}) if exists $authorurl{$page} && $template->query(name => "authorurl"); + $template->param(rcsid => $rcsid{$page}) + if exists $rcsid{$page} && $template->query(name => "rcsid"); if ($page ne $destpage && ((exists $license{$page} && ! exists $license{$destpage}) || [[patch]] ikiwiki-3.20160121/doc/todo/mercurial/0000755000000000000000000000000012650125230014130 5ustar ikiwiki-3.20160121/doc/todo/mercurial/discussion.mdwn0000644000000000000000000000120712650125230017202 0ustar How does the lack of a post-commit hook for mercurial affect my ikiwiki installation? I want to use ikiwiki with one of the distributed scm systems and mercurial appears have the best balance of mature ikiwiki support and windows support. > Without a post-commit hook, changes committed to the wiki (either via > mercurial or via the web) will not automatically cause ikiwiki to run to > rebuild the changed pages. The parent page has an example of how to > configure mercurial to run ikiwiki as a post-commit hook. Someone just > needs to test this (including my suggested change) and then we could > document it in the setup page. --[[Joey]] ikiwiki-3.20160121/doc/todo/mercurial.mdwn0000644000000000000000000001541412650125230015024 0ustar * Is the code sufficiently robust? It just warns when mercurial fails. * When rcs_commit is called with a $user that is an openid, it will be passed through to mercurial -u. Will mercurial choke on this? * Nope. Mercurial doesn't expect any particular format for the username, though "Name " is standard. --[[bma]] * The way `-u $user` is passed to `hg commit`, there's no way to tell if a given commit came in over the web or was done directly. So rcs_recentchanges hardcodes 'committype => "mercurial"'. See the monotone backend for an example of one that does this right. * The rcs_commit implementation seems not to notice if the file has been changed since a web edit started. Unlike all the other frontends, which use the rcstoken to detect if the web commit started editing an earlier version of the file, and if so, merge the two sets of changes together. It seems that with the current mercurial commit code, it will always blindly overwrite the current file with the web edited version, losing any other changes. * `rcs_commit_staged`, `rcs_rename`, `rcs_remove`, and `rcs_diff` are not implemented for mercurial, and so attachments, remove and rename plugins and recentchangesdiff cannot be used with it. (These should be fairly easy to add..) Posthook: in `$srcdir/.hg/hgrc`, I have the following [hooks] incoming.update = hg up update.ikiwiki = ikiwiki --setup /path/to/ikiwiki.setup --refresh This should update the working directory and run ikiwiki every time a change is recorded (someone who knows mercurial better than I do may be able to suggest a better way, but this works for me.) > Try running it with --post-commit instead of --refresh. That should > work better, handling both the case where the edit was made via the web > and then committed, and the case where a commit was made directly. > It can deadlock if the post-commit hook runs with --refresh in the > former case. --[[Joey]] The problem with --post-commit is that if you delete some pages in $SRC, ikiwiki --setup setupfile --post-commit will not delete them in $DEST. --[[users/weakish]] > You should really be using a setup file that has `mercurial_wrapper` > set, and running the wrapper generated by that from your hook. > That will work. I think that the `--setup --post-commit` on the command > line is currently broken and does the same expensive rebuild process as --setup > alone (which doesn't delete files from $DEST either). Will fix that. > (fixed) > --[[Joey]] >> Mercurial doesn't support put hooks in .hg/hooks/* (like git). In Mercurial, the only way to run >> your own hooks is specifying them in the hgrc file. (Or write a new extension.) >> I guess use a very long command will work. >> (e.g. ikiwiki --post-commit --a-lot-of-switches --set var=value $SRC $DEST) >> (Fortunately ikiwiki supports --set var=value so without --setup works.) >> >> Alternative is always editing via cgi or pushing. Never work on the $SRC/repo directly. >> --[[users/weakish]] >>> I don't see anything preventing you from using a setup file with >>> `mercurial_wrapper => ".hg/ikiwiki-hook",` and then modifying the hgrc >>> to run that wrapper. --[[Joey]] >> Thanks for pointing out this. I have some stupid misunderstanding on the >> usage of mercurial_wrapper before. The wrapper works nicely! --[[weakish]] I add the following to .hg/hgrc:(I use changegroup since I don't think we need refresh per changeset, please point out if I am wrong.) [hooks] changegroup = hg update >&2 && ikiwiki --setup path.to.setup.file --refresh post-commit = path.to.the.mercurial.wrapper ----- I have no idea when the deadlock will happen. --[[users/weakish]] > For the deadlock to occur, a edit has to be made via the web. > > Ikiwiki, > running as a CGI, takes a lock on the wiki, and commits the edit, > continuing to run in the background, with the lock still held. > When the edit is committed, the hg hook runs, running `ikwiki --refresh`. > Nearly the first thing that process does it try to lock the wiki.. > which is already locked. This lock is taken in a blocking manner, > thus the deadlock -- the cgi is waiting for the commit to finish before > dropping the lock, and the commit is blocked waiting for the lock to be > released. > > --post-commit avoids this problem by checking if the cgi is running > and avoiding doing anything in that case. (While still handing the > refresh if the commit was not made by the CGI.) > So in that case, the commit finishes w/o ikiwiki doing anything, > and the ikiwiki CGI handles the wiki refresh. > --[[Joey]] *** I have a few notes on mercurial usage after trying it out for a while: 1. I have been using ikiwiki's `--post-commit` option without apparent problems. I'm the only current user of my wiki, though. 1. The `ikiwiki.setup` file included in ikiwiki works with mercurial's `hgserve`, which is not the preferred solution. Mercurial's `hgwebdir.cgi` is more flexible and doesn't require running a server. I have this in my .setup file: # Mercurial stuff. rcs => "mercurial", historyurl => "http://localhost/cgi-bin/hgwebdir.cgi/ikiwiki/log/tip/\[[file]]", diffurl => "http://localhost/cgi-bin/hgwebdir.cgi/ikiwiki/diff/tip/\[[file]]", 1. I have noticed that running `ikiwiki` after a change to the wiki adds files to a directory called `recentchanges` under `$srcdir`. I don't understand why such files are needed; worse, they are not added to mercurial's list of tracked files, so they polute the output of `hg log`. Is this a bug? Should mercurial's commit hook be modified to add these files before the commit? --buo > No, those files should not be added to revision control. --[[Joey]] >> OK. I see two problems: >> 1. If I clone my wiki, I won't get an exact copy of it: I will lose the recentchanges history. This could be an acceptable limitation but IMO this should be documented. >>> The history is stored in mercurial. How will it be lost? >> 2. The output of `hg status` is polluted. This could be solved trivially by adding a line containing `recentchanges` to `.hgignore`. Another alternative would be to store the `recentchanges` directory inside `$srdcir/.ikiwiki`. >> I think the ideal solution would be to build `$destdir/recentchanges/*` directly from the output of `hg log`. --[[buo]] >>>> That would be 100 times as slow, so I chose not to do that. --[[Joey]] >>>> Since this is confusing people, allow me to clarify: Ikiwiki's >>>> recentchanges generation pulls log information directly out of the VCS as >>>> needed. It caches it in recentchanges/* in the `scrdir`. These cache >>>> files need not be preserved, should never be checked into VCS, and if >>>> you want to you can configure your VCSignore file to ignore them, >>>> just as you can configure it to ignore the `.ikiwiki` directory in the >>>> `scrdir`. --[[Joey]] ikiwiki-3.20160121/doc/todo/mdwn_preview/0000755000000000000000000000000012650125230014653 5ustar ikiwiki-3.20160121/doc/todo/mdwn_preview/discussion.mdwn0000644000000000000000000000010112650125230017715 0ustar +1, not sure where this feature is going. I'm keen to seen this! ikiwiki-3.20160121/doc/todo/mdwn_preview.mdwn0000644000000000000000000003217112650125230015546 0ustar ikiwiki needs a wysiwyg markdown editor. While there have been tries using WMD etc, they are not fully satisfactory, and also the license of everything around WMD is [[unclear|plugins/wmd/discussion]]. [Hallo](https://github.com/bergie/hallo) is the closest to a solution I've seen. The user can edit the page by clicking on the html part they want to change and typing. Selecting text pops up a toolbar to modify it. [Demo of Hallo with live WYSIWYG markdown editing](http://bergie.github.com/hallo/markdown.html) This demo uses showdown, and I still don't know what the license of showdown is. However, the showdown part seems to only be to handle the live conversion from the markdown source in the edit field to the html. The (edited) html to markdown conversion is accomplished by Hallo. So, ikiwiki could use this in a page edit UI that does not show the markdown at all. The user would edit the live page, entirely in wysiwyg mode, and on saving hallo's generated markdown would be saved. Probably there would need to be a button to bring up the current markdown editor too, but without showdown, changes in it would not immediatly preview, so it'd make sense to disable hallo when the editor is visible. Issue: Ikiwiki directives can generate html. We would not want that html to be editable by halo and converted back to markdown. Also, the directives need to appear in the html so users can edit them. This seems to call for a special page rendering mode for editing, in which directives are either not expanded, or are expanded but the generated html wrapped in some tag that makes hallo refuse to edit it (which would probably require that feature be added to hallo, currently it acts on all blocks with `class=editable`), or otherwise allows it to be stripped out at save time. --[[Joey]] ### old discussion The [StackOverflow](http://stackoverflow.com/) site uses markdown for markup. It has a fancy javascript thing for showing a real-time preview of what the user is editing. It would be nice if ikiwiki could support this, too. The thing they use on StackOverflow is supposed to be free software, so it should be easy to add to ikiwiki. > See [[wikiwyg]]. Note that I do not have a copy of the code for that, or > it'd be in ikiwiki already. --[[Joey]] >> I just had a brief look at the [[wikiwyg]] page and the link to the plugin was >> broken. The StackOverflow site uses the [WMD](http://wmd-editor.com/) editor, >> which seems to be related to the [ShowDown](http://attacklab.net/showdown/) >> javascript port of Markdown. Interestingly, [WMD source](http://wmd.googlecode.com/) >> is now available under an MIT license, though it is supposedly undergoing heavy >> refactoring. It looks like there was previous discussion ( [[todo/Add_showdown_GUI_input__47__edit]] ) >> about a showdown plugin. Maybe a WMD plugin would be worthwhile. I might >> look into it if I have time on the weekend. -- [[Will]] [[!tag wishlist]] >>> Below is a simple plugin/[[patch]] to make use of the WMD editor. >>>> Now added to ikiwiki, thanks! --[[Joey]] >>> Turns out it isn't hard at all to >>> get a basic version going (which doesn't handle directives at all, nor does it swtich itself off when you're >>> editing something other than Markdown source). I've >>> removed the done tag so this is visible as a patch. -- [[Will]] >>>> Hmm, it would be good if it turned off for !mdwn. Although this could >>>> be difficult for a new page, since there is a dropdown selector to >>>> choose the markup language then. But it should be doable for editing an >>>> existing page. >>>>> I agree. I'm working on this for for both new pages and existing pages. >>>>> It shouldn't be hard once I get WMD going through the javascript API. >>>>> At the moment that is inexplicably failing, and I haven't had time to have a good look at why. >>>>> I may not get a chance to look at this again for a few weeks. >>>> Can I get a license statement (ie, GPL-2+) ffrom you for the plugin? >>>> --[[Joey]] >>>>> Certainly. You're free to use the code I posted below under the GPL-2+ license. You'll note >>>>> however that I haven't said anything about the WMD code itself. The WMD web page says: >>>>>> "I'm refactoring the code, and will be releasing WMD under the MIT license soon. For now you can download the most recent release (wmd-1.0.1.zip) and use it freely." >>>>> It might be best to contact to for an explicit license on that if you want to include it. >>>>> -- [[Will]] > So, I wonder if I should add a copy of the WMD source to ikiwiki, or rely > on the user or distribution providing it. It does not seem to be packaged > for Debian yet. Hmm, I also can't find any copyright or license info in > the zip file. --[[Joey]] >> This is a good question. My thought is that it will probably not be packaged any time soon, >> so you're better off adding it to IkiWiki. I'd contact the author of WMD and ask them. They >> may have more insight. -- [[Will]] Note that the WMD plugin does **not** handle directives. For this reason the normal `preview` button remains. Some CSS to clean up the display of the live WMD preview would be good. > Can you elucidate the CSS comment -- or will it be obvious what you mean > when I try it? Is it what's needed for the live preview? --[[Joey]] >> In the version of the plugin below, a new `div` is added just below the form. WMD >> populates this div with the HTML it generates from the Markdown source. This is not very >> pretty at the moment - it appears in the same place as the preview used to, but with no >> header or anything. Any standard IkiWiki preview will appear below the WMD live preview. >> I recommend having a look at >> for what a little CSS could achieve. -- [[Will]] > Hmm, now that I've tried it, I notice that it does live preview by > default, below the edit window. Which is nice, but then if I hit the > preview button, I get two previews.. which is confusing. (Also, minor, > but: the live preview is missing the "Page Preview:" header.) --[[Joey]] > I wonder how annoying it would be to add some kind of simplistic wikilink > support to wmd's preview? And/or a wikilink button? While not supporting > directies is fine, not supporting wikilinks in a wiki seems a bit > lacking. It may also entice novide users to not use wikilinks and instead > use the hyperlinks that wmd does support. --[[Joey]] > Bug: When I preview, all the text in the edit field seems to be > converted from mdwn to html. I think that wmd is converting the mdwn > into html when the form is posted, so it would also save like that. > I assume that is designed for websites that do not use markdown > internally. Doesn't it have a setting to leave it as markdown? >> Found setting, fixed. --[[Joey]] >>> As I noted above, I've been working on the non-markdown page issue. >>> Below is my a new javascript file that I'm using, and below that a patch >>> to enable it. This patch makes the normal usage prettier - you get >>> a side panel with the live preview in it. It also adds a new config >>> option, `wmd_use101api`, which turns on code that tries to use the >>> wmd api. At the moment this code doesn't seem to work - moreover the >>> code that uses the new API dies early, so any code after that point is >>> completely untested. I will not >>> get a chance to look at this again soon though, so I thought I'd post >>> my progress so far. -- [[Will]] Place the following file in `underlays/wmd/wmd-ikiwiki.js`. ---- // This is some code to interface the WMD interface 1.0.1 with IkiWiki // The WMD interface is planned to change, so this file will likely need // updating in future. if (useWMDinterface) { wmd_options = { autostart: false, output: "Markdown" }; var instance = null; hook("onload", initwmd); } else { var typeSelector = document.getElementById("type"); var currentType = getType(typeSelector); if (currentType == "mdwn") { wmd_options = { output: "Markdown" }; document.getElementById("wmd-preview-container").style.display = 'none'; } else { wmd_options = { autostart: false }; document.getElementById("wmd-preview-container").style.display = 'block'; } } function initwmd() { if (!Attacklab || !Attacklab.wmd) { alert("WMD hasn't finished loading!"); return; } var typeSelector = document.getElementById("type"); var currentType = getType(typeSelector); if (currentType == "mdwn") { window.setTimeout(enableWMD,10); } typeSelector.onchange=function() { var docType=getType(this); if (docType=="mdwn") { enableWMD(); } else { disableWMD(); } } } function getType(typeSelector) { if (typeSelector.nodeName.toLowerCase() == 'input') { return typeSelector.getAttribute('value'); } else if (typeSelector.nodeName.toLowerCase() == 'select') { return typeSelector.value; // return typeSelector.options[typeSelector.selectedIndex].innerText; } return ""; } function enableWMD() { var editContent = document.getElementById("editcontent"); var previewDiv = document.getElementById("wmd-preview"); var previewDivContainer = document.getElementById("wmd-preview-container"); previewDivContainer.style.display = 'block'; // editContent.style.width = previewDivContainer.style.width; /***** build the preview manager *****/ var panes = {input:editContent, preview:previewDiv, output:null}; var previewManager = new Attacklab.wmd.previewManager(panes); /***** build the editor and tell it to refresh the preview after commands *****/ var editor = new Attacklab.wmd.editor(editContent,previewManager.refresh); // save everything so we can destroy it all later instance = {ta:editContent, div:previewDiv, ed:editor, pm:previewManager}; } function disableWMD() { document.getElementById("wmd-preview-container").style.display = 'none'; if (instance != null) { instance.pm.destroy(); instance.ed.destroy(); // inst.ta.style.width='100%' } instance = null; } ---- diff --git a/IkiWiki/Plugin/wmd.pm b/IkiWiki/Plugin/wmd.pm index 9ddd237..743a0b8 100644 --- a/IkiWiki/Plugin/wmd.pm +++ b/IkiWiki/Plugin/wmd.pm @@ -17,6 +17,13 @@ sub getsetup () { return plugin => { safe => 1, + rebuild => 1, + }, + wmd_use101api => { + type => "boolean", + description => "Use the advanced, but unstable, WMD api for markdown preview.", + safe => 0, + rebuild => 0, }, } @@ -24,29 +31,25 @@ sub formbuilder_setup (@) { my %params=@_; my $form=$params{form}; - return if ! defined $form->field("do"); + return unless defined $form->field("do"); return unless $form->field("do") eq "edit" || - $form->field("do") eq "create" || - $form->field("do") eq "comment"; - - $form->tmpl_param("wmd_preview", "
    \n". - include_javascript(undef, 1)); -} - -sub include_javascript ($;$) { - my $page=shift; - my $absolute=shift; - - my $wmdjs=urlto("wmd/wmd.js", $page, $absolute); - return <<"EOF" - - -EOF + $form->field("do") eq "create" || + $form->field("do") eq "comment"; + + my $useAPI = $config{wmd_use101api}?'true':'false'; + my $ikiwikijs = urlto("ikiwiki.js", undef, 1); + my $wmdIkiwikijs = urlto("wmd-ikiwiki.js", undef, 1); + my $wmdjs = urlto("wmd.js", undef, 1); + + my $previewScripts = <<"EOS"; + + + + +EOS + + $form->tmpl_param("wmd_preview", $previewScripts); } 1 diff --git a/doc/style.css b/doc/style.css index a6e6734..36c2b13 --- a/doc/style.css +++ b/doc/style.css @@ -76,9 +76,16 @@ div.tags { float: right; } +/* #editcontent { width: 100%; } +*/ + +#wmd-preview-container { + width: 49%; + float: right; +} img { border-style: none; diff --git a/templates/editpage.tmpl b/templates/editpage.tmpl index b1cf015..1d2f080 100644 --- a/templates/editpage.tmpl +++ b/templates/editpage.tmpl @@ -15,6 +15,14 @@ Page type: + +
    +
    +Live preview: +
    +
    +
    +

    Optional comment about this change:
    ikiwiki-3.20160121/doc/todo/mdwn_itex.mdwn0000644000000000000000000000141412650125230015032 0ustar [[!template id=gitbranch branch=wtk/mdwn_itex author="[[wtk]]"]] summary ======= Extend the [[plugins/mdwn]] plugin to support [itex][] using Jacques Distler's [itex2MML][]. notes ===== This is an updated form of [[users/JasonBlevins]]' plugin. You can see the plugin [in action][example] on my blog. The blog post lists a few additional changes you may need to make to use the plugin, including changing your page template to a MathML-friendly doctype and disabling plugins like [[plugins/htmlscrubber]] and [[plugins/htmltidy]] which would otherwise strip out the generated MathML. [itex]: http://golem.ph.utexas.edu/~distler/blog/itex2MMLcommands.html [itex2MML]: http://golem.ph.utexas.edu/~distler/blog/itex2MML.html [example]: http://blog.tremily.us/posts/mdwn_itex/ ikiwiki-3.20160121/doc/todo/mbox.mdwn0000644000000000000000000000174512650125230014010 0ustar I'd like to be able to drop an unmodified RFC2822 email message into ikiwiki, and get it formatted to HTML. Something like this: > We're discussing doing just that (well, whole mailboxes, really) over in > [[comment_by_mail]] --[[Joey]] >> The >> [[plugins/contrib/mailbox]] >> plugin is roughly feature complete at this point. It can read mbox, maildir, and >> MH folders, does threading, and deals with MIME (now with >> pagespec based sanity checking). No doubt lots of things could be >> be improved, and it hasn't been tested a great deal. Formatting of the body could be attempted >> as well. -- [[DavidBremner]] >>> One hitch I noticed was that it is not currently possible to treat a maildir >>> or an MH directory as a page (i.e. just call it foo.mh and have it transformed >>> to page foo). I'm not sure if this is possible and worthwhile to fix. >> It is certainly workable >>> to use a \[[!mailbox ]] directive. -- [[DavidBremner]] [[done]] ikiwiki-3.20160121/doc/todo/maximum_page_length.mdwn0000644000000000000000000000113312650125230017044 0ustar It would be useful if ikiwiki could reject adding file with names longer than some limit, like 255 bytes. I've had some git-annex users complain that some files ended up longer than that, which won't work on some OS's and filesystems. 255 might be a good default value, even. This would apply to pages created following broken wikilinks and using blogpost forms, as well as to uploaded attachments, and perhaps aggregated pages. In the wikilink case, I guess ikiwiki would need to reject it as too long; in the other cases it could simply pick a shorter version of the proposed filename. --[[Joey]] ikiwiki-3.20160121/doc/todo/matching_different_kinds_of_links.mdwn0000644000000000000000000002202612650125230021732 0ustar [[!tag wishlist]] As noted in [[todo/tag_pagespec_function]], there is a "misbehavior" of a `tagged()` pagespec: it matches even pages which have plain links to the tag page. And in general, it would be quite useful to be able to distinguish different kinds of links: one more kind, in addition to "tag", is "bug dependency" noted in [[todo/structured_page_data#another_kind_of_links]] and [[todo/tracking_bugs_with_dependencies#another_kind_of_links]]. It could distinguish the links by the `rel=` attribute. ([[Tags already receive a special rel-class|todo/rel_attribute_for_links]].) This means there is a general need for a syntax to specify user-defined rel-classes on wikilink (then bug deps would simply use their special rel-class, either directly, or through a special directive like `\[[!depends ]]`), and to refer to them in pagespecs (in forward and backward direction). Besides pagespecs, the `rel=` attribute could be used for styles. --Ivan Z. > FWIW, the `add_link` function introduced in a recent > release adds an abstraction that could be used to get > part of the way there to storing data about different types of > links. That function could easily be extended to take an optional > third parameter specifying the link type. > > Then there's the question of how to store and access the data. `%links` > does not offer a good way to add additional information about links. > Now, we could toss `%links` entirely and switch to an accessor function, > but let's think about not doing that.. > > The data that seems to be needed is basically a deep hash, so > one could check `$linktype{$page}{tag}{$link}` to see if > the page contains a link of the given type. (Note that pages could > contain links that were duplicates except for their types.) > > There would be some data duplication, unfortuantly, but if `%linktype` > is not populated for regular wikilinks, it would at least be limited to > tags and other unusual link types, so not too bad. > > `%linktype` could be stored in `%pagestate`.. if so > the actual use might look like `$pagestate{$page}{linktype}{tag}{$link}`. > That could be implemented by the tag plugin right now > with no core changes. (BTW, then I originally wrote tag, pagestate > was not available, which is why I didn't make it differentiate from > normal links.) Might be better to go ahead and add the variable to > core though. --[[Joey]] >> I've implemented this with the data structure you suggested, except that >> I called it `%typedlinks` instead of `%linktype` (it seemed to make more >> sense that way). I also ported `tag` to it, and added a `tagged_is_strict` >> config option. See below! --[[smcv]] I saw somewhere else here some suggestions for the wiki-syntax for specifying the relation name of a link. One more suggestion---[the syntax used in Semantic MediaWiki](http://en.wikipedia.org/wiki/Semantic_MediaWiki#Basic_usage), like this:
    ... the capital city is \[[Has capital::Berlin]] ...
    
    So a part of the effect of [[`\[[!taglink TAG\]\]`|plugins/tag]] could be represented as something like `\[[tag::TAG]]` or (more understandable relation name in what concerns the direction) `\[[tagged::TAG]]`. I don't have any opinion on this syntax (whether it's good or not)...--Ivan Z. ------- >> [[!template id=gitbranch author="[[Simon_McVittie|smcv]]" branch=smcv/ready/link-types]] >> [[!tag patch]] ## Documentation for smcv's branch ### added to [[ikiwiki/pagespec]] * "`typedlink(type glob)`" - matches pages that link to a given page (or glob) with a given link type. Plugins can create links with a specific type: for instance, the tag plugin creates links of type `tag`. ### added to [[plugins/tag]] If the `tagged_is_strict` config option is set, `tagged()` will only match tags explicitly set with [[ikiwiki/directive/tag]] or [[ikiwiki/directive/taglink]]; if not (the default), it will also match any other [[WikiLinks|ikiwiki/WikiLink]] to the tag page. ### added to [[plugins/write]] #### `%typedlinks` The `%typedlinks` hash records links of specific types. Do not modify this hash directly; call `add_link()`. The keys are page names, and the values are hash references. In each page's hash reference, the keys are link types defined by plugins, and the values are hash references with link targets as keys, and 1 as a dummy value, something like this: $typedlinks{"foo"} = { tag => { short_word => 1, metasyntactic_variable => 1 }, next_page => { bar => 1 }, }; Ordinary [[WikiLinks|ikiwiki/WikiLink]] appear in `%links`, but not in `%typedlinks`. #### `add_link($$;$)` This adds a link to `%links`, ensuring that duplicate links are not added. Pass it the page that contains the link, and the link text. An optional third parameter sets the link type (`undef` produces an ordinary [[ikiwiki/WikiLink]]). ## Review Some code refers to `oldtypedlinks`, and other to `oldlinktypes`. --[[Joey]] > Oops, I'll fix that. That must mean missing test coverage, too :-( > --s >> A test suite for the dependency resolver *would* be nice. --[[Joey]] >>> Bug fixed, I think. A test suite for the dependency resolver seems >>> more ambitious than I want to get into right now, but I added a >>> unit test for this part of it... --s I'm curious what your reasoning was for adding a new variable rather than using `pagestate`. Was it only because you needed the `old` version to detect change, or was there other complexity? --J > You seemed to be more in favour of adding it to the core in > your proposal above, so I assumed that'd be more likely to be > accepted :-) I don't mind one way or the other - `%typedlinks` > costs one core variable, but saves one level of hash nesting. If > you're not sure either, then I think the decision should come down > to which one is easier to document clearly - I'm still unhappy with > my docs for `%typedlinks`, so I'll try to write docs for it as > `pagestate` and see if they work any better. --s >> On reflection, I don't think it's any better as a pagestate, and >> the contents of pagestates (so far) aren't documented for other >> plugins' consumption, so I'm inclined to leave it as-is, unless >> you want to veto that. Loose rationale: it needs special handling >> in the core to be a dependency type (I re-used the existing link >> type), it's API beyond a single plugin, and it's really part of >> the core parallel to pagestate rather than being tied to a >> specific plugin. Also, I'd need to special-case it to have >> ikiwiki not delete it from the index, unless I introduced a >> dummy typedlinks plugin (or just hook) that did nothing... --s I have not convinced myself this is a real problem, but.. If a page has a typed link, there seems to be no way to tell if it also has a separate, regular link. `add_link` will add to `@links` when adding a typed, or untyped link. If only untyped links were recorded there, one could tell the difference. But then typed links would not show up at all in eg, a linkmap, unless it was changed to check for typed links too. (Or, regular links could be recorded in typedlinks too, with a empty type. (Bloaty.)) --J > I think I like the semantics as-is - I can't think of any > reason why you'd want to ask the question "does A link to B, > not counting tags and other typed links?". A typed link is > still a link, in my mind at least. --s >> Me neither, let's not worry about it. --[[Joey]] I suspect we could get away without having `tagged_is_strict` without too much transitional trouble. --[[Joey]] > If you think so, I can delete about 5 LoC. I don't particularly > care either way; [[Jon]] expressed concern about people relying > on the current semantics, on one of the pages requesting this > change. --s >> Removed in a newer version of the branch. --s I might have been wrong to introduce `typedlink(tag foo)`. It's not very user-friendly, and is more useful as a backend for other plugins that as a feature in its own right - any plugin introducing a link type will probably also want to have its own preprocessor directive to set that link type, and its own pagespec function to match it. I wonder whether to make a `typedlink` plugin that has the typedlink pagespec match function and a new `\[[!typedlink to="foo" type="bar"]]` though... --[[smcv]] > I agree, per-type matchers are more friendly and I'm not enamored of the > multi-parameter pagespec syntax. --[[Joey]] >> Removed in a newer version of the branch. I re-introduced it as a >> plugin in `smcv/typedlink`, but I don't think we really need it. --s ---- I am ready to merge this, but I noticed one problem -- since `match_tagged` now only matches pages with the tag linktype, a wiki will need to be rebuilt on upgrade in order to get the linktype of existing tags in it recorded. So there needs to be a NEWS item about this and the postinst modified to force the rebuild. > Done, although you'll need to plug in an appropriate version number when > you release it. Is there a distinctive reminder string you grep for > during releases? I've used `UNRELEASED` for now. --[[smcv]] Also, the ready branch adds `typedlink()` to [[ikiwiki/pagespec]], but you removed that feature as documented above. --[[Joey]] > [[Done]]. --s ikiwiki-3.20160121/doc/todo/mark_edit_as_trivial__44___identify__47__filter_on_trivial_changes.mdwn0000644000000000000000000000255712650125230030153 0ustar One feature of mediawiki which I quite like is the ability to mark a change as 'minor', or 'trivial'. This can then be used to filter the 'recentchanges' page, to only show substantial edits. The utility of this depends entirely on whether the editors use it properly. I currently use an inline on the front page of my personal homepage to show the most recent pages (by creation date) within a subsection of my site (a blog). Blog posts are rarely modified much after they are 'created' (or published - I bodge the creation time via meta when I publish a post. It might sit in draft form indefinitely), so this effectively shows only non-trivial changes. I would like to have a short list of the most recent modifications to the site on the front page. I therefore want to sort by modified time rather than creation time, but exclude edits that I self-identify as minor. I also only want to take a short number of items, the top 5, and display only their titles (which may be derived from filename, or set via meta again). I'm still thinking through how this might be achieved in an ikiwiki-suitable fashion, but I think I need a scheme to identify certain edits as trivial. This would have to work via web edits (easier: could add a check box to the edit form) and plain changes in the VCS (harder: scan for keywords in a commit message? in a VCS-agnostic fashion?) [[!tag wishlist]] ikiwiki-3.20160121/doc/todo/manpages.mdwn0000644000000000000000000000033612650125230014631 0ustar ikiwiki could support manpages (or general groff input files) and convert them to HTML. --[[JoshTriplett]] > I wrote [[plugins/contrib/mandoc]] a while back. Just noticed > this wishlist item. --[[Schmonz]] [[wishlist]] ././@LongLink0000644000000000000000000000015200000000000011601 Lustar rootrootikiwiki-3.20160121/doc/todo/make_localstyle__44___pagetemplate__44___edittemplate_more_similar__63__.mdwnikiwiki-3.20160121/doc/todo/make_localstyle__44___pagetemplate__44___edittemplate_more_similar__63__0000644000000000000000000000156612650125230030160 0ustar If I'm reading the docs right, I count three different ways of associating some local styling information with a portion of a site: * [[plugins/localstyle]] uses the [[ikiwiki/subpage/LinkingRules]] to find the 'nearest' stylesheet * [[plugins/edittemplate]] uses a directive with a [[ikiwiki/PageSpec]] to indicate which pages should get which templates * [[plugins/pagetemplate]] doesn't do a thing for you unless you shoehorn a `pagetemplate` directive into every affected page. That last is the one that seems least useful. The [[ikiwiki/PageSpec]] approach seems most flexible. Would it be a bad thing to allow `pagetemplate` to work the way `edittemplate` does? Maybe just extend the existing directive? If it has a `pages` parameter, it specifies the template for the supplied [[ikiwiki/PageSpec]], otherwise it just affects the enclosing page as it does now? --Chap ikiwiki-3.20160121/doc/todo/make_link_target_search_all_paths_as_fallback.mdwn0000644000000000000000000000354512650125230024221 0ustar [[!tag wishlist]] ## Idea After searching from the most local to the root for a wikilinkable page, descend into the tree of pages looking for a matching page. For example, if I link to \[\[Pastrami\]\] from /users/eric, the current behavior is to look for * /users/eric/pastrami * /users/pastrami * /users/eric/pastrami I'd like it to find /sandwiches/pastrami. ## Issues I know this is a tougher problem, especially to scale efficiently. There is also not a clear ordering unless it is the recursive dictionary ordering (ie the order of a breadth-first search with natural ordering). It would probably require some sort of static lookup table keyed by pagename and yielding a path. This could be generated initially by a breadth-first search and then updated incrementally when pages are added/removed/renamed. In some ways a global might not be ideal, since one might argue that the link above should match /users/eric/sandwiches/pastrami before /sandwiches/pastrami. I guess you could put all matching paths in the lookup table and then evaluate the ordering at parse-time. ## Motivation Since I often access my documents using a text editor, I find it useful to keep them ordered in a heirarchy, about 3 levels deep with a branching factor of perhaps 10. When linking though, I'd like the wiki to find the document for me, since I am lazy. Also, many of my wiki pages comprise the canonical local representation of some unique entity, for example I might have /software/ikiwiki. The nesting, however, is only to aid navigation, and shouldn't be considered as part of resource's name. ## Alternatives If an alias could be specified in the page body (for example, /ikiwiki for /software/ikiwiki) which would then stand in for a regular page when searching, then the navigational convenience of folders could be preserved while selectively flattening the search namespace. ikiwiki-3.20160121/doc/todo/make_html-parser_use_encode_entities_numeric.mdwn0000644000000000000000000000106612650125230024111 0ustar Hi, Using encode_entities makes this sort of thing happen: XML Parsing Error: undefined entity Location: http://XXX.YYY.ZZZ/ and points to the relevant entity. I think using encode_entities_numeric would help a lot with this. This is just a naïve assessment, but this would prevent xml-like pages being non-xml. [[wishlist]] > I suppose you mean a html generator, and not a html parser. > > ikiwiki uses numeric entities where required, but not otherwise. > > It seems valid for xhtml to have eg, `<` in it. Do you have a specific > example? --[[Joey]] ikiwiki-3.20160121/doc/todo/mailnotification/0000755000000000000000000000000012650125230015476 5ustar ikiwiki-3.20160121/doc/todo/mailnotification/discussion.mdwn0000644000000000000000000000155012650125230020551 0ustar I think I would like mail notifications. Though it kinda comes out of this general fear of vandalisation. So if some 'evil doer' turned my wiki into a porn site, I would like to rectify it ASAP. So I would like: 1. Mail notifications of edits not made by me (or established contributors) 2. If there is something fishy, the steps I would need to revert the changes Mail notifications are probably not required. For example I get lots of comments on my blog, but I don't get mailed about them. They go through the (proprietary) [Akismet](http://akismet.com/) filter. Perhaps a powerful little UNDO feature on RecentChanges is all that is needed. > Um, if you'll look at the [[mailnotification]] page, ikiwiki has > supported mail notifications for > 1 year, with a powerful [[ikiwiki/PageSpec]] > to allow chosing which pages you want to be notified about. --[[Joey]] ikiwiki-3.20160121/doc/todo/mailnotification.mdwn0000644000000000000000000000464612650125230016377 0ustar Should support mail notification of new and changed pages. Hmm, should be easy to implement this.. it runs as a svn post-coommit hook already, so just look at the userdb, svnlook at what's changed, and send mails to people who have subscribed. A few details: 1. [[Joey]] mentioned that being able to subscribe to globs as well as explicitly named pages would be desirable. 2. I think that since we're using Perl on the backend, being able to let users craft their own arbitrary regexes would be good. Joey points out that this is actually a security hole, because Perl regexes let you embed (arbitrary?) Perl expressions inside them. Yuck! (This is not actually true unless you "use re 'eval';", without which (?{ code }) is disabled for expressions which interpolate variables. See perldoc re, second paragraph of DESCRIPTION. It's a little iffy to allow arbitrary regexen, since it's fairly easy to craft a regular expression that takes unbounded time to run, but this can be avoided with the use of alarm to add a time limit. Something like eval { # catches invalid regexen no re 'eval'; # to be sure local $SIG{ALRM} = sub { die }; alarm(1); ... stuff involving m/$some_random_variable/ ... alarm(0); }; if ($@) { ... handle the error ... } should be safe. --[[WillThompson]]) It would also be good to be able to subscribe to all pages except discussion pages or the SandBox: `* !*/discussion !sandobx`, maybe --[[Joey]] 3. Of course if you do that, you want to have form processing on the user page that lets them tune it, and probably choose literal or glob by default. I think that the new globlist() function should do everything you need. Adding a field to the prefs page will be trivial --[[Joey]] The first cut, I suppose, could use one sendmail process to batch-mail all subscribers for a given page. However, in the long run, I can see users demanding a bit of feature creep: 4. Each user should be able to tune whether they see the actual diff parts or not. 5. Each user should be able to set a maximum desired email size. 6. We might want to support a user-specified shibboleth string that will be included in the email they receive so they can easily procmail the messages into a folder. --[[BrandenRobinson]] I'm deferring these nicities until there's some demonstrated demand --[[Joey]]. [[todo/done]] ikiwiki-3.20160121/doc/todo/lucene_search_engine.mdwn0000644000000000000000000000134412650125230017163 0ustar There are [some issue](http://www.branchable.com/bugs/Exception:_Cannot_open_tables_at_consistent_revisions_at___47__usr__47__lib__47__perl5__47__Search__47__Xapian__47__WritableDatabase.pm_line_41./#comment-c159ea3f9be35fcd9ed0eeedb162e816) with the current search engine. Sometimes the database gets corrupted and it's not very good at weighting say, the title against the content. For example, [searching for pagespec](http://ikiwiki.info/ikiwiki.cgi?P=pagespec) in this wiki doesn't lead to the [[ikiwiki/pagespec]] page in the first page... but in the third page. In [[different_search_engine]], there was the idea of using Lucene - is there any reason why we shouldn't have both, or at least let lucene live in contrib? --[[anarcat]] ikiwiki-3.20160121/doc/todo/logo.mdwn0000644000000000000000000000024312650125230013773 0ustar ikiwiki needs a logo. I'm thinking something simple like the word "ikiwiki" with the first "k" backwards; drawn to show that it's "wiki" reflected. [[todo/done]] ikiwiki-3.20160121/doc/todo/location_of_ikiwiki-w3m.cgi.mdwn0000644000000000000000000000044412650125230020317 0ustar The `ikiwiki-w3m.cgi` script is installed (hard-coded) into `/usr/lib/w3m/cgi-bin`. On Fedora however, the w3m package expects it in `/usr/libexec/w3m/cgi-bin`. So, it would be nice if the destination for this script could be configured. > You can use `W3M_CGI_BIN now`. [[done]] --[[Joey]] ikiwiki-3.20160121/doc/todo/location_of_external_plugins.mdwn0000644000000000000000000000411012650125230020767 0ustar Would it be possible to make the installation location for the external plugins (those talked to via xmlrpc) configurable? Currently, they are installed into (and later expected to be in) /usr/lib/ikiwiki/plugins. For the Fedora package (which I maintain), I move them to /usr/libexec/ikiwiki/plugins. While not covered by the FHS, this seems to be a more appropriate place, see: https://fedoraproject.org/wiki/Packaging/Guidelines#Libexecdir. > This would need to be a build time configuration setting so the directory > is built into ikiwiki for use at runtime. --[[Joey]] As a side note, the accompanying proxy.py might better be placed into some directory on the python path. > If someone can show how to do so without needing a Setup.py and all the > pain that using one entails.. --[[Joey]] >> At the very least I don't think proxy.py should be on the `sys.path` >> under its current name. If it was renamed to ikiwiki_proxy or some such, >> possibly; but I think it's more appropriate to have it in an >> ikiwiki-specific directory (a "private module") since it's not useful for >> anything outside ikiwiki, and putting it in the same directory as the >> external plugins means it's automatically in their `sys.path` without >> needing special configuration. --[[smcv]] >> (a mostly-inactive member of Debian's Python modules packaging team) >>> I mostly agree, but a problem arises when the external plugin we are using is not located in the same directory as the `proxy.py` file, but in a directory somewhere in a `libdir` or `libdirs` directory. Right now (for a soon-to-be published plugin I am working on) the solutions I am thinking about are: >>> >>> - Call ikiwiki using ``PYTHONPATH="$PYTHONPATH:/usr/lib/ikiwiki/plugins ikiwiki ...``, but it is a pain to write this each time I want to use this (and any solution like creating a custom script do not seem very nice); >>> - Make my plugin add `/usr/lib/ikiwiki/plugins` to its python path, but I do not know how portable this is for a non Debian distribution. >>> >>> Any better idea (and sorry for digging up an old post)? >>> -- [[Louis|spalax]] ikiwiki-3.20160121/doc/todo/lists.mdwn0000644000000000000000000000005512650125230014172 0ustar * list of all missing pages [[todo/done]] ikiwiki-3.20160121/doc/todo/linktitle.mdwn0000644000000000000000000000203312650125230015031 0ustar Pages could have a `linktitle` (perhaps via [[plugins/meta]]), and [[wikilinks|ikiwiki/wikilink]] could use that title by default when linking to the page. That would allow pages to have a simple, easily linkable name (without spaces, for instance), but use the proper title for links. For example, [[ikiwiki/Directive]] could use the `linktitle` "preprocessor directive", and pages for [[users]] could have `linktitle`s that put spaces in their names. Ideally, perhaps two versions of the title could exist, one for general use, and an optional one for if the case in the actual link starts with an uppercase letter. That would allow [[ikiwiki/directive]] to use the link text "preprocessor directive", but [[ikiwiki/Directive]] to use the link text "Preprocessor Directive", for use at the beginnings of sentences. If the second version did not exist, the first version would apply to both cases. However, that also seems like potential overkill, and less important than the basic functionality of `linktitle`. --[[JoshTriplett]] [[wishlist]] ikiwiki-3.20160121/doc/todo/linkify_and_preprocessor_ordering.mdwn0000644000000000000000000000211012650125230022014 0ustar Currently ikiwiki linkifies text, then runs preprocessor directives. This allows a directive to contain a wikilink inside a parameter, but since the wikilink expands to some arbitrary html, the parameter needs to be triple-quoted to avoid quotes in the expanded text from leaking out. This is rather non-obvious. One fix would be to switch the order, since linkification and preprocessing are relatively independant. Some directives, like inline, would need to keep on linkifiying the inlined pages, to make the links be resolved correctly, but that's ok. Any directives that outputed stuff that looked like a wikilink, but wasn't, would need to be changed. > This solution has been implemented and _seems_ ok. An alternative would be to change the wikilink regexp so it doesn't apply to wikilinks that are embedded inside preprocessor directives. I haven't found a way to do that yet, since perl doesn't allow variable-width negative lookbehind. Maybe processing wikilinks and preprocessor directives as part of the same loop would work, but that probably has its own issues. [[todo/done]] ikiwiki-3.20160121/doc/todo/linkbase.mdwn0000644000000000000000000000136212650125230014626 0ustar [[!template id=gitbranch branch=GiuseppeBilotta/linkbase author="[[GiuseppeBilotta]]"]] This patches enables the user to specify additional paths (“link bases”) that can be used by ikiwiki when trying to resolve links. The list of link bases is built as follows: * the page itself (as ikiwiki currently does) * all link bases specified for this page * all link bases specified for pagespecs matched by this page To specify the link bases, the only way made available presently by the patchset is a linkbase plugin that works similarly to the shortcut plugin (link bases are specified in a linkbases.mdwn file at the document root). However, are ways are potentially possible. This is still work in progress. Comments and suggestions are welcome. ikiwiki-3.20160121/doc/todo/link_plugin_perhaps_too_general__63__.mdwn0000644000000000000000000000217712650125230022423 0ustar [[!tag wishlist blue-sky]] (This isn't important to me - I don't use MediaWiki or Creole syntax myself - but just thinking out loud...) The [[ikiwiki/wikilink]] syntax IkiWiki uses sometimes conflicts with page languages' syntax (notably, [[plugins/contrib/MediaWiki]] and [[plugins/Creole]] want their wikilinks the other way round, like `\[[plugins/write|how to write a plugin]]`). It would be nice if there was some way for page language plugins to opt in/out of the normal wiki link processing - then MediaWiki and Creole could have their own `linkify` hook that was only active for *their* page types, and used the appropriate syntax. In [[todo/matching_different_kinds_of_links]] I wondered about adding a `\[[!typedlink to="foo" type="bar"]]` directive. This made me wonder whether a core `\[[!link]]` directive would be useful; this could be a fallback for page types where a normal wikilink can't be done for whatever reason, and could also provide extension points more easily than WikiLinks' special syntax with extra punctuation, which doesn't really scale? Straw-man: \[[!link to="ikiwiki/wikilink" desc="WikiLinks"]] --[[smcv]] ikiwiki-3.20160121/doc/todo/link_map.mdwn0000644000000000000000000000026412650125230014630 0ustar An idea: Use graphviz to generate a map of all the links between pages. (Could it be made clickable somehow?) Graphviz can output image maps. -- ChristofferSawicki [[todo/done]] ikiwiki-3.20160121/doc/todo/limit_the_markup_formats_available_for_editing.mdwn0000644000000000000000000000105412650125230024475 0ustar For `aggregate` to work, I have to have the `html` plugin enabled, and this allows users to create `html` pages via the standard edit form. It would be good if I could tell IkiWiki that I don't want certain page types to be editable (but still enabled to let e.g. aggregate/inline work. So by telling IkiWiki that e.g. `html` pages are uneditable (in the setup file), people would no longer - choose the `html` (or `htm`) page type in the edit form - bring up the edit form for `html` or `htm`) pages in the first place. --[[madduck]] [[!tag wishlist]] ikiwiki-3.20160121/doc/todo/let_inline_plugin_use_pagetemplates.mdwn0000644000000000000000000000041512650125230022323 0ustar Is there any reason why the inline plugin's template parameter couldn't take any pagetemplate templates, meaning those in use by the template plugin? Right now it seems that inline templates have to be `.tmpl` files on the filesystem. --[[madduck]] [[!tag wishlist]] ikiwiki-3.20160121/doc/todo/latex/0000755000000000000000000000000012650125230013262 5ustar ikiwiki-3.20160121/doc/todo/latex/discussion.mdwn0000644000000000000000000000045412650125230016337 0ustar Okay, moving the discussion from the soc - page to here :) I'll have a look on the MediaWiki plugin and how they do it. (I think the parser I want to use can also handle \newcommand{ ... and similiar things very well. The bad point is: The parser is (not yet) in debian... i'm package it atm. ;-)ikiwiki-3.20160121/doc/todo/latex.mdwn0000644000000000000000000003427512650125230014164 0ustar How about a plugin adding a [[preprocessor_directive|ikiwiki/directive]] to render some given LaTeX and include it in the page? This could either render the LaTeX as a PNG via [[!debpkg dvipng]] and include the resulting image in the page, or perhaps render via [HeVeA](http://pauillac.inria.fr/~maranget/hevea/index.html), [TeX2page](http://www.ccs.neu.edu/~dorai/tex2page/tex2page-doc.html), or similar. Useful for mathematics, as well as for stuff like the LaTeX version of the ikiwiki [[/logo]]. > [[users/JasonBlevins]] has also a plugin for including [[LaTeX]] expressions (by means of `itex2MML`) -- [[plugins/mdwn_itex]] (look at his page for the link). --Ivan Z. >> I've [[updated|mdwn_itex]] Jason's plugin for ikiwiki 3.x. --[[wtk]] >>> I've updated [[Jason's pandoc plugin|users/jasonblevins]] to permit the TeX processing to be managed via Pandoc. See for details. --Profjim ---- ikiwiki could also support LaTeX as a document type, again rendering to HTML. > [[users/JasonBlevins]] has also a [[plugins/pandoc]] plugin (look at his page for the link): in principle, [Pandoc](http://johnmacfarlane.net/pandoc/) can read and write [[LaTeX]]. --Ivan Z. ---- Conversely, how about adding a plugin to support exporting to LaTeX? >> I did some tests with using Markdown and a customized HTML::Latex and html2latex >> and it appears it will work for me now. (I hope to use ikiwiki for many >> to collaborate on a printed book that will be generated at least once per day in PDF format.) >> >> --JeremyReed >>> Have a look at [pandoc](http://code.google.com/p/pandoc/). It can make PDFs via pdflatex. --[[roktas]] >>>> Interesting, just yesterday I was playing with pandoc to make PDFs from my Markdown. Could someone advise me on how to embed these PDFs into ikiwiki? I need some guidance in implementing this. --[[JosephTurian]] >>>> [[users/JasonBlevins]] has a [[plugins/pandoc]] plugin (look at his page for the link). --Ivan Z. ---- [here](http://ng.l4x.org/gitweb/gitweb.cgi?p=ikiwiki.git/.git;a=blob;f=IkiWiki/Plugin/latex.pm) is a first stab at a latex plugin. Examples [here](http://ng.l4x.org/latex/). Currently without image support for hevea. And the latex2html output has the wrong charset and no command line switch to change that. Dreamland. As this link is not working, I setted a mirror here: http://satangoss.sarava.org/ikiwiki/latex.pm. ---- Okay, now is the time for a mid term report i think. The LaTeX Plugin for ikiwiki is now usable, except for the security check. This means at the moment the latex code is not validated, but only added into a very basic latex template. and the image is generated via this path: latex -> dvips -> convert (.tex -> .dvi -> .ps -> .png). The resulting .png is moved into the imagefolder. The name of this image is the md5hash of the code the user wrote into the wiki. So on a second run there is no need to recreate this image, if it exists. This will fasten up all but the first generation of the page. The generation of the image is done in an temporary folder in /tmp created with tempdir from File::Temp. The tmp-folder name is something like: $md5sumdigest.XXXXXXXX. if there is an .tex file already in this dir it will be overwritten. So until now i finished the basic things, now the most important task is to make an good input validation. This is a bit eased since it is not possible to execute shell commands in perl. Furthermore adding additional packages won't work since the code comes from the user is inserted after \begin{document}. Therefore this will result in an error (and this will stop latex from working --> no image is created). So my task for the next weeks is to write an good input validation. I think this progress is okay, since I'll had to learn the last 5-6 weeks for my final exams in university therefore I can't do anything. From now on I have 3 months of freetime and I'll use them to work heavily on this plugin. So I think I'm inside my own timetable. :) ps: Since I found nothere the possibility to upload an file, here is an link to my page where you can have a look. Comments are very welcome ;-) https://www.der-winnie.de/~winnie/gsoc07/tex.pm You'll find an demo site here: https://www.der-winnie.de/wiki/opensource/gsoc2007/ I'll add some more complex formulas over the days. But this is basically only pure latex. ;-) -- Patrick Winnertz > Looks like you're very well on schedule. > But I wonder: Do you have any plans to work on the latex to html side of > things too? This page kinda combines both uses of latex; creating gifs > for formulas etc, and creating html. Dreamland already has a latex2html > using plugin submitted above, but it still needs some work, and > particularly, needs the same input validation stuff. And then there's the > idea of using html2latex, to convert an ikiwiki site into a book. Any > plans to work on that too? I guess I'm not sure what the scope is of what > you plan to do as part of GSoC. >> Yes I plan to write an html -> tex (or pdf) plugin as well. But I think it is better to work first on the first one and complete it and then work and complete the second one. If it is in the scope of GSoC i don't know, but I'll write it since it is fun to write on an Opensource project ;-) >> For latex-> html: >> I have the problem that I don't really see the sense to create html code (this means text or charts) from latex code. But if you like I can also add this variant to create html code. In my eyes it is much more important that it is possible to have complex chemical/physical & math formulas on the website without the need to use extern programs. (and upload the pictures manually). >>> Well, I suppose someone might just like latex and want to use it as the >>> markup for their wiki, rather than learning markdown. I guess Midnight >>> wanted it enough to write his plugin. But the use case is not too >>> compelling to me either, honestly. --[[Joey]] ### code review > The main problem I see with the code is that you seem to unnecessarily create a dummy div tag > in preprocess, and then in format you call create(), which generates an img tag. So, why not > just create the img tag in preprocess? >> Mh okay, I'll improve this. Fixed > Another problem: Looks like if latex fails to create the image, the user won't be shown any > of its error message, but just "failed to generate image from code". I suspect that in this > case being able to see the error message would be important. >> Yes, that's true. It would be _very_ hard to provide the user the output of latex since this is really very much. For an simple formula as \frac{1}{2} this could be 2 printed out. >>> YM 2 printed pages? Yes, I'm familar with latex's insane errors. :-) >>> However, IMHO, it's worth considering this and doing something. Perhaps >>> put the error inside some kind of box in the html so it's delimited >>> from the rest of the page. (Actually, ikiwiki preprocessor directives in >>> general could mark up their errors better.) Okay, I'll provide the log as an link in the wiki. But there should be a kind of mechanism how they can be removed. This could lead to an DOS (create via a bot so much nonsense code that the disk is full.) Fixed, the log is now provided if latex will fail. > The url handling could stand to be improved. Currently it uses $config{url}, so it depends on that being set. Some ikiwiki builds don't have an url set. The thing to do is to use urlto(), to generate a nice relative url from the page to the image. >> Mh... i choose one single dir explizitly since if you use on several pages the same formula this would really improve the time to generate the formulas and it would waste extra space if you store every formula 3-4 times. But if you really like I'll change this behaviour. >>> No, that makes sense! (My comments about $config{url} still stand >>> though. Yes of course, I'll improve the url handling. My comment was only about the several folder ;-) Fixed. Now I use urlto and will_render. > Another (minor) problem with the url handling is that you put all the images in a "teximages" directory in the toplevel of the wiki. I think it would be better to put each image in the subdirectory for the page that created it. See how the `img` and `sparkline` plugins handle this. > It looks like if the tempdir already exists, tempdir() will croak(), thus crashing ikiwiki. It would be good to catch a failure there and fail more gracefully. >> Okay, I'll improve this behaviour. Maybe: if tempdir croak rerun it to get another not existing dir? (But only x times so that this is no endless loop, with x maybe = 3). >> Then it would not be necessary to inform the user about not generating the image. >>> Or just propigate up an error message. If it's failing, someone is >>> probably trying to DOS ikiwiki or something. :-) Fixed. I now use eval { create_tmp } and then: if ($?) { $returncode = 0 } else { save .tex file ... } ... > I'm not sure why you're sanitising the PATH before calling latex. This could be problimatic on systems where latex is not in /bin or /usr/bin >> Okay what do you suggest to use as PATH? >> I'll have to change the default settings, since we ikiwiki runs in taint mode. (which is good ;-)) >>> But, ikiwiki already sanitises path and deletes the IFS and CDPATH etc. >>> See ikiwiki.in. Fixed. I'll removed these two lines completly. ----- Okay here an short timetable how I want to proceed further: * Until weekend (21-22. July) I'll try to fix all errors above. (done) * From 22.July until 29. July I'll try to set up a first security check My plans are two parts of a security check: * One with an array of blacklisted regular expression. (This would blacklist all the well known and easy to fetch things like \include {/path/to/something} and things like closing the math formula environment ($$). (done) * the second step will be based on Tom::latex, which will help to parse and get a tree view of the code. Okay what do you think of this procedure? > --[[Joey]] >> -- [[PatrickWinnertz]] ---- > It would be nice if it would output image tags with style="height:1em;" so that the formulas scale > with the rest of the text if you change the font size in your browser (ctrl + +/-). Thanks for the comment.. is fixed. Mh... not really fixed :S I added it into the return but it is somehow ignored. I'll figure out why. ----- Okay, the last version of the tex plugin for ikiwiki can be downloaded [here](https://www.der-winnie.de/~winnie/gsoc07/tex.pm). > I've looked this over, fixed the indenting, fixed some variable names > ("$foo" is a bad variable name), removed a gratuotuous use of `tie`, > fixed a bug (the first time it was run, it tried to write the png file > before the teximages/ directory existed) and checked the result in. > > Can you please flesh out [[plugins/teximg]] with > whatever documentation people who know tex will expect to see? Okay, I'll fill this up today I think with information about the plugin Done. Is that docu fine with you? >> Perhaps add some documentation about the kind of tex code that can be >> used, or a link to some documentation so people who don't know latex >> well can figure this out? > Also, please review my changes. In particular, I changed the @badthings > array to use qr//, which is much clearer, but it needs to be tested that > I didn't break the checking code when I did it. It would be nice to write > a test case that tries to feed it bad code and makes sure it rejects it. I'll test this now on my server. I'll report here later. Okay, checked. it works fine. My blacklist tests were successfull. > > Does it really make sense to have an alt tag for the image > that contains the tex code? Will that make any sense when browsing > without images? Mh.. For people who know latex very well this would be enough to imagine how the image would look like. This are of course the minority of people (but I guess also the minority of people are using non-gui browsers). > I'm thinking about renameing the preprocessor directive to teximg. > \[[!teximg code="" alt="foo"]] makes sense.. Would it make sense to rename > the whole plugin, or do you think that other tex stuff should go in this > same plugin? I'll think over this until I'm at work ;) Only for rendering images... not for generating .tex files .../wiki/ the name is all the same i think. If you like teximg better than switch :) > Note: I removed the style= attribute, since as I've told you, the > htmlsanitizer strips those since they can be used to insert javascript. I > put in a class=teximage instead; the style sheet could be modified to > style that, if you want to send a patch for that. Ah yes.. sorry forgot to update the plugin in my public_html folder %-). This was my last change in this plugin :) Sorry. > > --[[Joey]] ----- I'm using a [plugin](http://metameso.org/~joe/math/tex.pm) created by [Josef Urban](http://www.cs.ru.nl/~urban) that gets LaTeX into ikiwiki by using [LaTeXML](http://dlmf.nist.gov/LaTeXML). This could well be "the right way" to go (long term) but the plugin still does not render math expressions right, because ikiwiki is filtering out requisite header information. Examples (I recommend you use Firefox to view these!) are available [here](http://li101-104.members.linode.com/aa/math/) and [here](http://li101-104.members.linode.com/aa/simple/). Compare that last example to the [file generated by LaTeXML directly](http://metameso.org/~joe/math/math.xml). I posted the sources [here](http://metameso.org/aa/sources/) for easy perusal. How to get ikiwiki to use the original DOCTYPE and html fields? I could use some help getting this polished off. --[[jcorneli]] > update: it seems important to force the browser to think of the content as xml, e.g. [http://metameso.org/~joe/math/example.xml](http://metameso.org/~joe/math/example.xml) has the same source code as [http://metameso.org/~joe/math/example.html](http://metameso.org/~joe/math/example.html) and the former shows math working, but the latter doesn't. --[[jcorneli]] >> Looking at the source code, it seems Ikiwiki is doing more than filtering header information - it is filtering out all HTML formatting around MathML constituent objects. In the first example, we see that formatting for tables and such is preserved. --[[jcorneli]] [[!tag soc]] [[!tag wishlist]] ikiwiki-3.20160121/doc/todo/language_definition_for_the_meta_plugin.mdwn0000644000000000000000000001226212650125230023124 0ustar Here is a patch for the [[plugins/meta]] plugin. It adds the possibility to define the language used for a page, with \[[!meta lang="ja"]] It doesn't insert the langage information in the xhtml meta elements, but defines a LANG variable to use in the templates, for example with fr
    " xml:lang="fr"> This way also allows to define a language for a subset of the final page, with custom templates and inclusion. This may be useful for sites with a few pages in different languages, but no full i18n. > Looks good, but the need to modify the template and include a default > language in it is a bit problimatic, I think. --[[Joey]] >> --lang=XX could be a setup option, with a default value, then the template would be >>> Yes, that seems reasonable. I guess there's no problem with defaulting >>> to en if it can be overridden in the setup. --[[Joey]] >>>> Yes, english default makes sense. I guess we should use the `$config{lang}`, >>>> defined from the setup file or command-line options to define the default language >>>> (`$config{lang}` defaults to `en` which is fine) if the html pages, and override >>>> it from the `meta` directive. >>>> — [[NicolasLimare]] >>>>> ikiwiki already has a $config{locale}, which is a full locale (ie, >>>>> "en_US.UTF-8". This just needs to be parsed for the lang. --[[Joey]] >>>>>> My mistake, I meant $config{locale} --[[NicolasLimare]] > So the patch below could be changed to parse `$config{locale}` for the > language, and pass it if no specific lang was set for the page. The only > problem with that would be that this is all done inside the meta plugin, > so if that plugin were disabled, the lang would be empty. To avoid that, > I guess that the template needs to look like: lang="" xml:lang=""> > Now it just needs to be finished up.. --[[Joey]]
    --- meta.orig.pm  2007-07-27 00:19:51.000000000 +0200
    +++ meta.pm       2007-08-05 22:37:40.000000000 +0200
    @@ -11,6 +11,7 @@
     my %permalink;
     my %author;
     my %authorurl;
    +my %lang;
     
     sub import {
            hook(type => "preprocess", id => "meta", call => \&preprocess, scan => 1);
    @@ -100,6 +101,11 @@
                    $meta{$page}.='\n";
            }
    +       elsif ($key eq 'lang') {
    +           if ($value =~ /^[A-Za-z]{2}$/) {
    +               $lang{$page}=$value;
    +           }
    +       }
            else {
                    $meta{$page}.=scrub("\n");
    @@ -131,6 +137,8 @@
                    if exists $author{$page} && $template->query(name => "author");
            $template->param(authorurl => $authorurl{$page})
                    if exists $authorurl{$page} && $template->query(name => "authorurl");
    +       $template->param(lang => $lang{$page})
    +               if exists $lang{$page} && $template->query(name => "lang");
     
     }
    
    > Please resolve lang somewhere reusable rather than within meta plugin: It is certainly usable outside > the scope of the meta plugin as well. --[[JonasSmedegaard]] >> I don't see any problem with having this in meta? meta is on by default, and >> other plugins are free to use it or even depend on it (e.g. inline does). >> >> My only comments on this patch beyond what Joey said are that the page >> language could usefully go into `$pagestate{$page}{meta}{lang}` for other >> plugins to be able to see it (is that what you meant?), and that >> restricting to 2 characters is too restrictive (HTML 4.01 mentions >> `en`, `en-US` and `i-navajo` as possible language codes). >> This slightly complicates parsing the locale to get the default language: >> it'll need `tr/_/-/` after the optional `.encoding` is removed. >> --[[smcv]] >>> Now that po has been merged, this patch should probably also be adapted >>> so that the po plugin forces the meta::lang of every page to what po >>> thinks it should be. --[[smcv]] >>>> Agreed, users of the po plugin would greatly benefit from it. >>>> Seems doable. --[[intrigeri]] >>> Perhaps [[the_special_po_pagespecs|ikiwiki/pagespec/po]] should >>> also work with meta-assigned languages? --[[smcv]] >>>> Yes. But then, these special pagespecs should be moved outside of >>>> [[plugins/po]], as they could be useful to anyone using the >>>> currently discussed patch even when not using the po plugin. >>>> >>>> We could add these pagespecs to the core and make them use >>>> a simple language-guessing system based on a new hook. Any plugin >>>> that implements such a hook could decide whether it should >>>> overrides the language guessed by another one, and optionally use >>>> the `first`/`last` options (e.g. the po plugin will want to be >>>> authoritative on the pages of type po, and will then use >>>> `last`). --[[intrigeri]] [[!tag wishlist patch plugins/meta translation]] ikiwiki-3.20160121/doc/todo/l10n.mdwn0000644000000000000000000000505012650125230013606 0ustar ikiwiki should be fully internationalized. ---- As to the hardcoded strings in ikiwiki, I've internationalized the program, and there is a po/ikiwiki.pot in the source that can be translated. --[[Joey]] ---- > The now merged po plugin handles l10n of wiki pages. The only missing > piece now is l10n of the templates. > --[[Joey]] ---- ## template i18n From [[Recai]]: > Here is my initial work on ikiwiki l10n infrastructure (I'm sending it > before finalizing, there may be errors). I've revised the patches (tested OK): - $config{lang} patch: + Support for CGI::FormBuilder. + Modify Makefile.PL for l10n. - l10n infrastructure from Koha project. (This patch must be applied with '-p1', also, it needs a 'chmod +x l10n/*.pl' after patching.) + Leave templates dir untouched, use a temporary translations directory instead. + Fix Makefile (it failed to update templates). However... > fine. Also a final note, I haven't examined the quality of generated > templates yet. Looks like, tmpl_process3 cannot preserve line breaks in template files. For example, it processed the following template: Someone[1], possibly you, requested that you be emailed the password for user on [2]. The password is: -- ikiwiki [1] The user requesting the password was at IP address [2] Located at as (in Turkish): Birisi[1], ki muhtemelen bu sizsiniz, [2] üzerindeki kullanıcısına ait parolanın epostalanması isteğinde bulundu. Parola: -- ikiwiki [1] Parolayı isteyen kullanıcının ait IP adresi: [2] This could be easily worked around in tmpl_process3, but I wouldn't like to maintain a separate utility. ---- Another way to approach this would be to write a small program that outputs the current set of templates. Now i18n of that program is trivial, and it can be run once per language to generate localized templates. Then it's just a matter of installing the templates somewhere, and having them be used when a different language is enabled. It would make sense to make the existing `locale` setting control which templates are used. But the [[plugins/po]] plugin would probably want to do something more, and use the actual language the page is written in. --[[Joey]] ikiwiki-3.20160121/doc/todo/internal_definition_list_support.mdwn0000644000000000000000000000575312650125230021721 0ustar While ikiwiki can support definition lists (`dl/dt/dd`) through [[multimarkdown|plugins/mdwn]], it doesn't actually /do/ anything with those valuable definitions. It would be interesting for third party plugins to have access to this stuff as a proper data structure. This is what allows MoinMoin to have plugins that collect that data across multiple pages and tabulate it, for example. What I am proposing here is that the [[variables exported to plugins|plugins/write/#index6h2]] be extended to include a `%dictionnaries` hash. For a markup like this: [[!format txt """ Apple : Apple is a fruit : It's also a computer company Orange : Orange is a fruit """]] would result in a data structure like this: [[!format txt """ %dicts = { 'Apple' => [ "Apple is a fruit", "It's also a computer company" ], 'Orange' => [ "Orange is a fruit" ], } """]] Now, I know I can write myself a `format()` parser that would do this on all pages in my own plugin, but then it would need to be adapted to all markups, while markup formatters should be the ones implementing this directly, if possible. My first use case for this would be to extend the [[plugins/osm]] plugin to tap into those lists, so that I could have this data in the page, visible to the user: [[!format txt """ Longitude : -45.30 Latitude : 73.67 """]] and then reuse that data in the plugin. Then for us running the humongous [[koumbit wiki|https://wiki.koumbit.net/]], it is a necessary step to be able to migrate away from MoinMoin to Ikiwiki as we have a lot of pages that tabulate information like this. For example, see our [[ServerList|https://wiki.koumbit.net/ServerList]] ([[source|https://wiki.koumbit.net/ServerList?action=raw]]), being generated from pages like [[this one|https://wiki.koumbit.net/metis.koumbit.net]]. If there are no objections to that concept, I may try to start coding patches. Otherwise this is really just a [[wishlist]]. --[[anarcat]] > Have you looked at the [[/plugins/contrib/field]] plugin? This gives you the infrastructure, and all you need is to write a plugin that parses the definition list format. Then you could use [[/plugins/contrib/getfield]], [[/plugins/contrib/ftemplate]] and/or [[/plugins/contrib/report]] to do what you like with the data. > --[[KathrynAndersen]] > ---- > with the recent inclusion of discount to the [[plugins/mdwn]] module, definition lists can be used by default (instead of, as with multimarkdown, after an option is enabled), and look like this: > > =Apple= > Apple is a fruit. > Apple is also a company. > =Orange= > Orange is a fruit. > > (indented with four spaces). this makes definition lists a bit more attractive for definition harvesting. > > personally, i'd prefer a solution that works from the markup'ed plain text instead of invisible directives, as it integrates more naturally in the flow of designing a document, even though a plugin for explicitly stating invisible facts certainly has its purpose too. (think [[!wikipedia RDFa]] here ;-) ) --[[chrysn]] ikiwiki-3.20160121/doc/todo/interactive_todo_lists.mdwn0000644000000000000000000000356112650125230017621 0ustar This is a fleshed out todo based on discussions at [[forum/managing_todo_lists]]. I would like to have TODO lists inside ikiwiki wikis. This would mean: * a new markup plugin to support a language suitable for TODO lists (OPML, XOXO are two possible candidates) * some javascript to provide interactive editing. As [[chrysn]] pointed out on the forum page, this has some crossover with [[structured page data]]. In particular, if the markup language chosen had a concept of invalid markup (existing plugins just tend to ignore stuff that isn't explicitly part of their markup) we would need to sensibly handle that. Perhaps rejecting web edits and providing context help on why the edit was rejected, although that sounds like a significant headache. I have started working on this, albeit slowly. A proof of concept is at . There are two git repositories associated with my WIP: one contains the javascript, the plugin, the changes made to page templates; the other contains the contents of that wiki-site (so the test todos and the contents of bugs/ which forms a sort-of todo list for the todo list :) ) I will endeavour to get mirrors of those repos up on github or similar asap. -- [[Jon]] ---- Just to report the WIP plugin for this is now in a reasonably good state. I ended up just inventing a new markup language -- for now, items are divided by newlines and lists are one-dimensional, for simplicity. I got fed up thinking about how to handle the structured data issues / needing a lot of boilerplate around items and the implications for the "new item" dialogue. Still quite a lot to do though! -- [[Jon]] I've pushed a copy of the work in progress which consists of * A change to page.tmpl * A javascript underlay directory + javascript file * a few CSS bits in a local.css * a plugin to -- [[Jon]] ikiwiki-3.20160121/doc/todo/integration_with_Firefox_and_Iceweasel_feed_subscription_mechanism.mdwn0000644000000000000000000000144412650125230030515 0ustar Firefox and Iceweasel, when encountering a news feed, display a page that allows the user to subscribe to the feed, using Live Bookmarks, Google Reader, Bloglines, My Yahoo!, or an external reader program. The list of available applications comes from URIs and titles in the preferences, under `browser.contentHandlers.types.*`. For the benefit of people who use [[plugins/aggregate]] as their feed reader, the ikiwiki CGI could expose a URI to directly add a new feed to the aggregated list; this would allow users to configure their browser to subscribe to feeds via [[plugins/aggregate]] running on their site. We could then provide the manual configuration settings as a [[tip|tips]], and perhaps provide an extension or other mechanism to set them automatically. --[[JoshTriplett]] [[wishlist]] ikiwiki-3.20160121/doc/todo/inlines_inheriting_links.mdwn0000644000000000000000000000761612650125230020127 0ustar [[!tag wishlist]] Continuing the ideas in [[bugs/Inline doesn't wikilink to pages]]. I thought of a use case for another feature: making [[ikiwiki/directive/inline]] inherit the link relations of the included pages (optionally, say, with `inheritlinks=yes`). For example, if I want to list `elements/*` that have been linked to in any of `new_stuff/*`, I could try to write a [[ikiwiki/pagespec]] like `elements/* and backlink(new_stuff/*)`. This is not yet possible, as discussed in [[todo/tracking_bugs_with_dependencies]]. It would be possible to work around this limitation of pagespecs if it was possible to create a page `all_new_stuff` with `\[[!inline pages="new_stuff/*" inheritlinks=yes]]`: then the desired pagespec would be expressed as `elements/* and backlink(all_new_stuff)`. > Or, instead of specifying whether to inherit at the place of the inline, add more relations (`inline`, `backinline`) and relation composition (say, `*`, or haskell-ish `$` in order not confuse with the glob `*`) and explicitly write in the pagespecs that you want to follow the inline relation backwards: `elements/* and backlink$backinline(all_new_stuff)` or, equivalently, if [["classes"|todo/tracking_bugs_with_dependencies]] are implemented in pagespecs: `elements/* and backlink(backinline(all_new_stuff))`. Of course, this suggestion requires the powerful extension to pagespecs, but it gives more flexibility, and the possibility to avoid redundant information: the same pagespec at two places -- the inline and the other matching construction. > > BTW, adding more relations -- the `inline` relation among them -- would satisfy [[the other feature request|bugs/Inline doesn't wikilink to pages]]. --Ivan Z. This is not just an ugly workaround. The availability of this feature has some reason: the classes of pages you want to refer to "recursively" (in that kind of complex pagespecs) tend to have some meaning themselves. So, I might indeed want to have a page like `all_new_stuff`, it would be useful for me. And at the same time I would like to write pagespecs like `elements/* and backlink(all_new_stuff)` -- and using the proposed feature in [[todo/tracking_bugs_with_dependencies/]] would be less clean because then I would have to enter the same information at two places: the possibly complex pagespec in the inline. And having redundant information leads to inconsistency. So in a sense, in some or most cases, it would indeed be cleaner to "store" the definition of a class of pages referred to in complex pagespecs as a separate object. And the most natural representation for this definition of a class of pages (adhering to the principle of wiki that what you mean is entered/stored in its most natural representation, not through some hidden disconnected code) is making a page with an inline/map/or the like, so that at the same time you store the definition and you see what it is (the set of pages is displayed to you). I would actually use it in my current "project" in ikiwiki: I actually edit a set of materials as a set of subpages `new_stuff/*`, and I also want to have a combined view of all of them (made through inline), and at another page, I want to list what has been linked to in `new_stuff/*` and what hasn't been linked to.--Ivan Z. > I see where you're coming from, but let's think about > immplementation efficiency for a second. > > In order for inline inheritlinks=yes to work, > the inline directive would need to be processed > during the scan pass. > > When the directive was processed there, it would need > to determine which pages get inlined (itself a moderatly > expensive operation), and then determine which pages > each of them link to. Since the scan pass is unordered, > those pages may not have themselves been scanned yet. > So to tell what they link to, inline would have to load > each of them, and scan them. > > So there's the potential for this to slow > down a wiki build by about a factor of 2. > --[[Joey]] ikiwiki-3.20160121/doc/todo/inline_raw_files.mdwn0000644000000000000000000001046312650125230016351 0ustar [[!template id=gitbranch branch=wtk/raw_inline author="[[wtk]]"]] summary ======= Extend inlining to handle raw files (files with unrecognized extensions). Also raise an error in `IkiWiki::pagetype($file)` if `$file` is blank, which avoids trying to do much with missing files, etc. I'm using the new code in my [blog][]. [blog]: http://blog.tremily.us/posts/yacc2dot/ usage ===== \[[!inline pagenames="somefile.txt" template="raw" feeds="no"]] > But inline already supports raw files in two ways: > > * setting raw=yes will cause a page to be inlined raw without > using any template, as if it were part of the page at the location > of the inline > * otherwise, the file becomes an enclosure in the rss feed, for use with > podcasting. > > So I don't see the point of your patch. Although since your text > editor seems to like to make lots of whitespace changes, it's possible > I missed something in the large quantity of noise introduced by it. > --[[Joey]] >> As I understand it, setting `raw=yes` causes the page to be inlined >> as if the page contents had appeared in place of the directive. The >> content is then processed by whatever `htmlize()` applies to the >> inlining page. I want the inlined page to be unprocessed, and >> wrapped in `
    ...
    ` (as they are on the blog >> post I link to above). >> >> Enclosures do not include the page contents at all, just a link to >> them. I'm trying to inline the content so I can comment on it from >> the inlining page. >> >> Apologies for my cluttered version history, I should have branched my >> earlier changes off to make things clearer. I tried to isolate my >> whitespace changes (fixes?) in c9ae012d245154c3374d155958fcb0b60fda57ce. >> 157389355d01224b2d3c3f6e4c1eb42a20ec8a90 should hold all the content >> changes. >> >> A list of other things globbed into my master branch that should have >> been separate branches: >> >> * Make it easy to select a Markdown executable for mdwn.pm. >> * Included an updated form of >> [[Javier Rojas' linktoimgonly.pm|forum/link_to_an_image_inside_the_wiki_without_inlining_it]]. >> * Included an updated form of >> [Jason Blevins' mdwn_itex.pm](http://jblevins.org/git/ikiwiki/plugins.git/plain/mdwn_itex.pm). >> * Assorted minor documentation changes. >> >> --[[wtk]] >>> I haven't heard anything in a while, so I've reorganized my version >>> history and rebased it on the current ikiwiki head. Perhaps now it >>> will be easier to merge or reject. Note the new branch name: >>> `raw_inline`. I'll open separate todo items for items mentioned in my >>> previous comment. --[[wtk]] ---- Reviewing your patch the first thing I see is this:
    +        if (! $file) {
    +               error("Missing file.");
    +        }
    
    This fails if the filename is "0". Also, `pagetype()` currently cannot fail; allowing it to crash the entire wiki build if the filename is somehow undefined seems unwise. I didn't look much further, because it seems to me what you're trying to do can be better accomplished by using the highlight plugin. Assuming the raw file you want to inline and comment on is some source-code-like thing, which seems likely. Or, another way to do it would be to use the templates plugin, and make a template there that puts an inline directive inside pre tags. --[[Joey]] [[!tag reviewed]] ---- If `pagetype()` cannot fail, then I suppose that check has to go ;). I was under the impression that [[plugins/highlight]] didn't support inlining code. It looks like it supports highlighing stand-alone files or embedded code. Perhaps I should extend it to support inlined code instead of pushing this patch? > If you configure highlight to support standalone files, then you can > inline the resulting pages and get nicely highlighted source code > inlined into the page. --[[Joey]] The `raw.tmpl` included in the patch *does* include the inlined content inside `pre` tags. The problem is that the current inline code insists on running `htmlize()` on the content before inserting it in the template. The heart of my patch is an altered `get_inline_content()` that makes the `htmlize()` call dependent on a `$read_raw` flag. If the flag is set, the raw (non-htmlized) content is used instead. I just rebased my patches against the current Ikiwiki trunk (no major changes) to make them easier to review. --[[wtk]] ikiwiki-3.20160121/doc/todo/inline_postform_autotitles.mdwn0000644000000000000000000000577312650125230020534 0ustar [[!tag wishlist patch plugins/inline]] [[!template id=gitbranch branch=chrysn/inlineautotitles author="[[chrysn]]"]] for postforms in inlines of pages which follow a certain scheme, it might not be required to set the title for each individual post, but to automatically set the title and show no input box prompting for it. this can either be based on timestamp formatting, or use the already existing munging mechanism, which appends numbers to page titles in case that page already exists. two patches (b568eb25, 34bc82f2) set inline up for that, adding an additional `autotitle` parameter. if that is given, the regular input of the inline postform will be replaced with a hidden input of that text. in addition, the empty title is permitted (both for autotitle and regular titles, as they go in the same GET parameter, `title`). as the empty page title is illegal, munging is used, resulting in ascending numeric page titles to be created. the second patch is actually a one-liner, filtering the title through strftime. > Something similar was requested in [[todo/more_customisable_titlepage_function]], > in which [[Joey]] outlined a similar solution. > > What's your use-case for not prompting for the title at all? I can see > [[madduck]]'s requirement for the title he typed in (say, "foo") > being transformed into 2009/07/26/foo or something (I name blog posts > like that myself), but I can't quite see the use for *entirely* automatic > titles. > > However, if that's really what you want, I suspect your code could be > extended so it also solves madduck's second request on > [[todo/more_customisable_titlepage_function]]. > > --[[smcv]] ### potential user interaction issues this has two side effects which have to be considered: first, the empty page title is accepted also in normal postforms (previously, this resulted in a "bad page name" error); second, entering a percent sign in that field might result in unexpexted strftime substitution (strftime might not even substitute for common uses of percent as in "reach 10% market share", but might in others as in "the 10%-rule"). both can be circumvented by using another GET parameter for autotexts, as implemented in 40dc10a4. > this patch still does not work perfectly; especially, it should make a > distinction between "autotitle is set but equal ''" (in which case it > should create a page named `1.mdwn`, and "autotitle is not set, and title is > equal ''" (in which case it should display the old error message) --[[chrysn]] ### potential security issues * the autotitle's value is directly output through the template (but that's done in other places as well, so i assume it's safe) * i don't know if anything bad can happen if unfiltered content is passed to POSIX::strftime. ### further extension having a pre-filled input field instead of an unchangable hidden input might be cool (eg for creating an entry with yesterday's date), but would be a bit of a problem with static pages. javascript could help with the date part, but name munging would be yet another thing. ikiwiki-3.20160121/doc/todo/inline_plugin:_specifying_ordered_page_names.mdwn0000644000000000000000000000200712650125230024064 0ustar A [[!taglink patch]] in my git repository (the inline-pagenames branch) adds the following parameter to the [[ikiwiki/directive/inline]] directive: > * `pagenames` - If given instead of `pages`, this is interpreted as a > space-separated list of links to pages (with the same > [[ikiwiki/SubPage/LinkingRules]] as in a [[ikiwiki/WikiLink]]), and they are inlined > in exactly the order given: the `sort` and `pages` parameters cannot be used > in conjunction with this one. This is on my [[wishlist]] for my [[plugins/contrib/album]] plugin, which currently uses it internally (as it has already collected the pages in order). It could also be useful for other things, like [[todo/wikitrails]]. --[[smcv]] [[!tag plugins/inline]] > It's sort of a pity that a pagespec like "a or b or c" doesn't somehow > match to (a, b, c) in that order, but I don't see how that would be > generally possible. While this feels a bit like bloat and inline already > has far too many parameters, I have [[merged|done]] it. --[[Joey]] ikiwiki-3.20160121/doc/todo/inline_plugin:_hide_feed_buttons_if_empty.mdwn0000644000000000000000000000067012650125230023413 0ustar < joeyh> 03:49:19> also, I think it may be less visually confusing to drop the rss/atom buttons for comments when there are none yet This seems to me like something that applies to the [[plugins/inline]] plugin in general, rather than the [[plugins/contrib/comments]] plugin specifically. --[[smcv]] >> [[done]] as emptyfeeds option, not on by default for inline, but I think >> it should be for comments --[[Joey]] ikiwiki-3.20160121/doc/todo/inline_plugin:_ability_to_override_feed_name.mdwn0000644000000000000000000000237112650125230024066 0ustar If RSS and Atom are enabled by default, the [[plugins/contrib/comments]] plugin generates a feed, perhaps `/sandbox/index.atom` for comments on the sandbox. If a blog is added to the page, the blog will steal the name `/sandbox/index.atom` and the comments plugin's feed will change to `/sandbox/index.atom2`. If `\[[!inline]]` gained a parameter `feedname` or something, the comments plugin could use `feedname=comments` to produce `/sandbox/comments.atom` instead (this would just require minor enhancements to rsspage(), atompage() and targetpage()). As a side benefit, [my blog](http://smcv.pseudorandom.co.uk/) could go back to its historical Atom feed URL of `.../feed.atom` (which is currently a symlink to `index.atom` :-) ) On sites not using `usedirs` the current feed is `/sandbox.atom`, and we could perhaps change it to `/sandbox-comments.atom` or `/sandbox/comments.atom` if `feedname=comments` is given. --[[smcv]] > This is slightly hard to do, because you have to worry about > conflicting pages setting feedname, which could cause ikiwiki to blow up. > > Particularly for the non-usedirs case, where a page `sandbox/comments` > would produce the same feed as sandbox with `feedname=comments`. > --[[Joey]] > [[done]] as feedfile option --[[Joey]] ikiwiki-3.20160121/doc/todo/inline_option_for_pagespec-specific_show__61__N.mdwn0000644000000000000000000000036612650125230024330 0ustar inline could have a pagespec-specific show=N option, to say things like "10 news items (news/*), but at most 3 news items about releases (news/releases/*)". This should eliminate the need for wikiannounce to delete old news items about releases.ikiwiki-3.20160121/doc/todo/inline_directive_should_support_pagination.mdwn0000644000000000000000000000100012650125230023722 0ustar Ikiwiki should support pagination for index pages. Something like showing only 10 items on the first page, and then having the other items on the other pages. Basically, the same page would be rendered multiple times: - The index page: rendered normally, but item list is truncated to N items - The separate pages: rendered with a slice of the item list containing N items (or less for the last page) This I think breaks one major assumption: that source pages only generate one page in the output directory. ikiwiki-3.20160121/doc/todo/inline:_numerical_ordering_by_title.mdwn0000644000000000000000000002417312650125230022236 0ustar Could you please add numerical ordering by title to [[inline|plugins/inline]] plugin? Now I can do only alphabetical order by title, but sometime it's not enough. > Implemented, see [[natural_sorting]] [[!tag done]] --[[Joey]] BTW, it seems that ordering by title is rather ordering by filename of page. For me "title" means title of page I can set using `title` parameter of [[meta|plugins/meta]] plugin :) Why do I need that feature? I've just been migrating an info site of our university [mail system](http://poczta.uw.edu.pl/) to Ikiwiki from very static, console handling Makefile+[WML](http://thewml.org/)+XML+XSL=HTML solution. I have many news files (`1.mdwn`, `2.mdwn`, etc.) and unfortunately I did very stupid thing. I've commited all of them in the same revision of our Subversion repo... Now I have a problem with sorting these files using inline plugin. I can't do sorting by age, because both old and young news files have the same age. I can't sort by title too. For example, when I sort them by title, then `9.mdwn` page is between `90.mdwn` and `89.mdwn` pages... It sucks, of course. Sorting by mtime also is not a solution for me, because it means that I can't touch/fix old news anymore. Do you have any idea how to workaround that issue? --[[Paweł|ptecza]] > Delete all files. Add files back one at a time, committing after adding > each file. Sort by date. --[[Joey]] >> The simplest solutions are the best :D Thanks for the hint! I didn't >> want to do it before, because I was affaid that my Subversion keeps >> old date of creation of file. --[[Paweł|ptecza]] > Maybe you can rename `9.mdwn` to `09.mdwn`? See `rename(1)`, it renames multiple files > in one go. --[[buo]] >> Thanks for your suggestion! But what about if number of my news files grows to 100+? >> $ ls >> 09.mdwn 100.mdwn 101.mdwn 102.mdwn 89.mdwn 90.mdwn >> I don't want to rename all previous files to add `0` prefix. --[[Paweł|ptecza]] >>> Rather than adding 0's or or a 'sorttype' parameter, I'd just fix the sort order. >>> Both MacOS and Windows use a smarter sort order than just lexical in their >>> file browsers (e.g. , >>> ). >>> >>> The [Unicode Collation algorithm](http://en.wikipedia.org/wiki/Unicode_collation_algorithm) >>> would seem to be a reasonable sort order. (See also .) >>> Unfortunately the standard perl implementation, [Unicode::Collate](http://perldoc.perl.org/Unicode/Collate.html) >>> doesn't handle the optional [numbers](http://www.unicode.org/unicode/reports/tr10/#Customization) >>> extension which is what you want. --[[Will]] --- Below is my simple patch. Feel free to use it or comment! I have also 2 considerations for inline sorting: 1. Maybe changing name of `sort` parameter to `sortby` or `sortkey` will be good idea? > No, that would break existing wikis. --[[Joey]] >> It's no problem. You just have `ikiwiki-transition` utility :D --[[Paweł|ptecza]] 1. Maybe you should use `title` sort key for title from meta plugin and `name`, `filename`, `page` or `pagename` for page names? In the future you can also sort by meta author, license or another key. > There are many places in ikiwiki that do not use meta title info and > could. I'd prefer to deal with that issue as a whole, not here, > --[[Joey]] --[[Paweł|ptecza]] --- inline.pm-orig 2008-09-02 09:53:20.000000000 +0200 +++ inline.pm 2008-09-02 10:09:02.000000000 +0200 @@ -186,7 +186,15 @@ } if (exists $params{sort} && $params{sort} eq 'title') { - @list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list; + if (! $params{sorttype} || $params{sorttype} eq 'lexical') { + @list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list; + } + elsif ($params{sorttype} eq 'numeric') { + @list=sort { pagetitle(basename($a)) <=> pagetitle(basename($b)) } @list; + } + else { + return sprintf(gettext("unknown sort type %s"), $params{sorttype}); + } } elsif (exists $params{sort} && $params{sort} eq 'mtime') { @list=sort { $pagemtime{$b} <=> $pagemtime{$a} } @list; @@ -195,7 +203,7 @@ @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list; } else { - return sprintf(gettext("unknown sort type %s"), $params{sort}); + return sprintf(gettext("unknown sort key %s"), $params{sort}); } if (yesno($params{reverse})) { > To users, "sort" already determines the type of sort. It can be by title, > or by date, etc. Adding a separate "sorttype" value is thus fairly > confusing. --[[Joey]] >> OK. I will be more careful when I play with inline plugin :) --[[Paweł|ptecza]] --- Joey, have you forgotten about that request? ;) --[[Paweł|ptecza]] > Okie. Here is a different [[patch]] based on my comment above. It doesn't introduce > a new key, but rather changes the title sorting order. Two caveats: * I've only tested this in `inline`, not the other places I changed the sort order. * I'm unsure if the regexp used in the split should be `/(-?\d+)/` instead of `/(\d+)/`. As written, '-' is interpreted as a hyphen rather than a minus sign. > --[[Will]] >> I"m not comfortable with tossing out perl's default collator and trying >> to maintain some other one going forward. Especially not for such an >> edge case. --[[Joey]] >> Hi Will! Your idea looks interesting for me, but I'm affraid that it's too big >> change in Ikiwiki... Maybe I'm wrong? ;) What do you think, Joey? --[[Paweł|ptecza]] >>> It isn't that big a change. It is just supplying a sort order to the sort. The >>> patch is a little larger because I then went through and made that sort >>> order available in other places where it makes sense. (Looking at the >>> patch again briefly, I should have also used it in the `map` plugin.) >>> >>> If you wanted a simple patch, you could just move the `titlecmp` function >>> into the inline plugin and only use it there. The problem with that is that >>> it only fixes the inline plugin. -- [[Will]] >>>> Will, I agree with you that it's improved way of sort order. But on the other >>>> hand I prefer to be careful when I change something in a several places, >>>> because I don't want to break any working things when I fix one thing. >>>> I hope that Joey agree with us too and all Ikiwiki users will be happy >>>> after applying your patch ;) --[[Paweł|ptecza]] ---- diff --git a/IkiWiki.pm b/IkiWiki.pm index c0f5dea..d001f8d 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -20,7 +20,7 @@ use Exporter q{import}; our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match bestlink htmllink readfile writefile pagetype srcfile pagename displaytime will_render gettext urlto targetpage - add_underlay + add_underlay titlecmp %config %links %pagestate %renderedfiles %pagesources %destsources); our $VERSION = 2.00; # plugin interface version, next is ikiwiki version @@ -835,6 +835,42 @@ sub titlepage ($) { return $title; } +sub titlecmp ($$) { + my $titleA=shift; + my $titleB=shift; + + my @listA=split(/(\d+)/,$titleA); + my @listB=split(/(\d+)/,$titleB); + + while (@listA && @listB) { + # compare bits of text + my $a = shift @listA; + my $b = shift @listB; + my $c = ($a cmp $b); + return $c if ($c); + + if (@listA && @listB) { + # compare numbers + $a = shift @listA; + $b = shift @listB; + $c = $a <=> $b; + return $c if ($c); + + # 01 is different to 1 + $c = (length($a) <=> length($b)); + return $c if ($c); + + $c = ($a cmp $b); + return $c if ($c); + } + } + + return 1 if (@listA); + return -1 if (@listB); + + return 0; +} + sub linkpage ($) { my $link=shift; my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_"; diff --git a/IkiWiki/Plugin/brokenlinks.pm b/IkiWiki/Plugin/brokenlinks.pm index 37752dd..ccaa399 100644 --- a/IkiWiki/Plugin/brokenlinks.pm +++ b/IkiWiki/Plugin/brokenlinks.pm @@ -59,7 +59,7 @@ sub preprocess (@) { map { "
  • $_
  • " } - sort @broken) + sort titlecmp @broken) ."\n"; } diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm index 8efef3f..263e7a6 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm @@ -192,7 +192,7 @@ sub preprocess_inline (@) { } if (exists $params{sort} && $params{sort} eq 'title') { - @list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list; + @list=sort { titlecmp(pagetitle(basename($a)),pagetitle(basename($b))) } @list; } elsif (exists $params{sort} && $params{sort} eq 'mtime') { @list=sort { $pagemtime{$b} <=> $pagemtime{$a} } @list; diff --git a/IkiWiki/Plugin/orphans.pm b/IkiWiki/Plugin/orphans.pm index b910758..10a1d87 100644 --- a/IkiWiki/Plugin/orphans.pm +++ b/IkiWiki/Plugin/orphans.pm @@ -56,7 +56,7 @@ sub preprocess (@) { htmllink($params{page}, $params{destpage}, $_, noimageinline => 1). "" - } sort @orphans). + } sort titlecmp @orphans). "\n"; } diff --git a/IkiWiki/Render.pm b/IkiWiki/Render.pm index ceb7c84..00798e1 100644 --- a/IkiWiki/Render.pm +++ b/IkiWiki/Render.pm @@ -89,7 +89,7 @@ sub genpage ($$) { $template->param(have_actions => 1); } - my @backlinks=sort { $a->{page} cmp $b->{page} } backlinks($page); + my @backlinks=sort { titlecmp($a->{page}, $b->{page}) } backlinks($page); my ($backlinks, $more_backlinks); if (@backlinks <= $config{numbacklinks} || ! $config{numbacklinks}) { $backlinks=\@backlinks; ikiwiki-3.20160121/doc/todo/indyauth_support.mdwn0000644000000000000000000000102512650125230016453 0ustar this looks pretty awesome: anyone working on a plugin or has ideas on how to implement this? --[[anarcat]] > My understanding of indyauth is that the wiki owner would need to pick an > indyauth provider, which handles the communication with the Big Silos. > > I guess the wiki owner could run their own, but they'd be more likely to > run the one provided by the indyauth people. So, this is effectively > centralized, although without lock-in. > > Also, see related --[[Joey]] ikiwiki-3.20160121/doc/todo/index.html_allowed.mdwn0000644000000000000000000001221212650125230016613 0ustar This page used to be used for two patches, one of which is applied providing the usedirs option for output. The remaining patch, discussed below, concerns wanting to use foo/index.mdwn source files and get an output page name of foo, rather than foo/index. --[[Joey]] [[!tag patch]] --- I independently implemented a similar, but smaller patch. (It's smaller because I only care about rendering; not CGI, for example.) The key to this patch is that "A/B/C" is treated as equivalent to "A/B/C/index". Here it is: --Per Bothner --- IkiWiki/Render.pm~ 2007-01-11 15:01:51.000000000 -0800 +++ IkiWiki/Render.pm 2007-02-02 22:24:12.000000000 -0800 @@ -60,9 +60,9 @@ foreach my $dir (reverse split("/", $page)) { if (! $skip) { $path.="../"; - unshift @ret, { url => $path.htmlpage($dir), page => pagetitle($dir) }; + unshift @ret, { url => abs2rel(htmlpage(bestlink($page, $dir)), dirname($page)), page => pagetitle($dir) }; } - else { + elsif ($dir ne "index") { $skip=0; } } --- IkiWiki.pm~ 2007-01-12 12:47:09.000000000 -0800 +++ IkiWiki.pm 2007-02-02 18:02:16.000000000 -0800 @@ -315,6 +315,12 @@ elsif (exists $pagecase{lc $l}) { return $pagecase{lc $l}; } + else { + my $lindex = $l . "/index"; + if (exists $links{$lindex}) { + return $lindex; + } + } } while $cwd=~s!/?[^/]+$!!; if (length $config{userdir} && exists $links{"$config{userdir}/".lc($link)}) { Note I handle setting the url; slightly differently. Also note that an initial "index" is ignored. I.e. a page "A/B/index.html" is treated as "A/B". > Actually, your patch is shorter because it's more elegant and better :) > I'm withdrawing my old patch, because yours is much more in line with > ikiwiki's design and architecture. > I would like to make one suggestion to your patch, which is: diff -urX ignorepats clean-ikidev/IkiWiki/Plugin/inline.pm ikidev/IkiWiki/Plugin/inline.pm --- clean-ikidev/IkiWiki/Plugin/inline.pm 2007-02-25 12:26:54.099113000 -0800 +++ ikidev/IkiWiki/Plugin/inline.pm 2007-02-25 14:55:21.163340000 -0800 @@ -154,7 +154,7 @@ $link=htmlpage($link) if defined $type; $link=abs2rel($link, dirname($params{destpage})); $template->param(pageurl => $link); - $template->param(title => pagetitle(basename($page))); + $template->param(title => titlename($page)); $template->param(ctime => displaytime($pagectime{$page})); if ($actions) { @@ -318,7 +318,7 @@ my $pcontent = absolute_urls(get_inline_content($p, $page), $url); $itemtemplate->param( - title => pagetitle(basename($p), 1), + title => titlename($p, 1), url => $u, permalink => $u, date_822 => date_822($pagectime{$p}), diff -urX ignorepats clean-ikidev/IkiWiki/Render.pm ikidev/IkiWiki/Render.pm --- clean-ikidev/IkiWiki/Render.pm 2007-02-25 12:26:54.745833000 -0800 +++ ikidev/IkiWiki/Render.pm 2007-02-25 14:54:01.564715000 -0800 @@ -110,7 +110,7 @@ $template->param( title => $page eq 'index' ? $config{wikiname} - : pagetitle(basename($page)), + : titlename($page), wikiname => $config{wikiname}, parentlinks => [parentlinks($page)], content => $content, diff -urX ignorepats clean-ikidev/IkiWiki.pm ikidev/IkiWiki.pm --- clean-ikidev/IkiWiki.pm 2007-02-25 12:26:58.812850000 -0800 +++ ikidev/IkiWiki.pm 2007-02-25 15:05:22.328852000 -0800 @@ -192,6 +192,12 @@ return $untainted; } +sub titlename($;@) { + my $page = shift; + $page =~ s!/index$!!; + return pagetitle(basename($page), @_); +} + sub basename ($) { my $file=shift; > This way foo/index gets "foo" as its title, not "index". --Ethan I took another swing at this and subverted the dominant paradigm. Here goes:
    diff -ru ikiwiki-2.4/IkiWiki.pm ikiwiki/IkiWiki.pm
    --- ikiwiki-2.4/IkiWiki.pm	2007-06-26 15:01:57.000000000 -0700
    +++ ikiwiki/IkiWiki.pm	2007-07-25 15:58:00.990749000 -0700
    @@ -239,6 +239,7 @@
     	my $type=pagetype($file);
     	my $page=$file;
     	$page=~s/\Q.$type\E*$// if defined $type;
    +	$page=~s/\/index$// if $page =~ /\/index$/;
     	return $page;
     }
     
    
    This just makes it so that all files named foo/index become pages called foo, which is the desired effect. I haven't tested everything so far, so be careful! But you can see it working at http://ikidev.betacantrips.com/one/ again, as before. --Ethan [[done]], the indexpages setting enables this. ikiwiki-3.20160121/doc/todo/inband_acl_data.mdwn0000644000000000000000000000503612650125230016103 0ustar it [[!tag wishlist]] would be nice to have acls that get their data from wiki pages. a particular use case is the [debienna wiki](http://debienna.at/) (our local debian usergroup), where there are few admins, but everyone who has been granted edit rights to the wiki should be allowed to allow other people in. those people can register their accounts on their own, but may only write to a dedicated page where they request write privileges. the setup file should look like this: locked_pages: '!PleaseClearForEditing and !user_in_page(DebiennaGroup)' and DebiennaGroup would contain * \[[chrysn]] * \[[albert]] * \[[rhonda]] etc. a suggested implementation is published on `git://prometheus.amsuess.com/ikiwiki-plugins` and is short enough to be quoted here: #!/usr/bin/perl # Ikiwiki "user_in_page" pagespec # # The pagespec user_in_page(some_page) returns success if the currently logged # in user is mentioned in a wikilink on some_page (which might be relative to # the currently active page, which allows per-directory restrictions). # # To be precise, the string \[[${USERNAME}]] has to be present in the some_page # source file. package IkiWiki::Plugin::user_in_page; package IkiWiki::PageSpec; sub match_user_in_page ($$;@) { my $page=shift; my $userlistpage=shift; my %params=@_; my $user=$params{user}; # this is relative to page, but this is intentional my $userlistpagename = IkiWiki::bestlink($page, $userlistpage); # FIXME: pagesources seems not to be defined in do=edit my $userlistpagefile = "$userlistpagename/index.mdwn"; my $userlistpagedata = IkiWiki::readfile(IkiWiki::srcfile($userlistpagefile)); if ($userlistpagedata =~ /\Q\[[$user]]\E/ ) { return IkiWiki::SuccessReason->new("User $user is listed in $userlistpagename"); } else { return IkiWiki::FailReason->new("User $user is not listed in $userlistpagename"); } } 1 before i complete this as a proposed plugin, i'd like to know * if you have better ideas to check for the delimited user name than the \[[$user]] scheme? * i had to manually expand `$pagename` to `$pagename/index.mdwn` as %pagesources seems to be empty in the context of `?do=edit`. how is this supposed to work? --[[chrysn]] > Just for the record, this seems to be a special case of [[todo/per_page_ACLs/]]. -- [[anarcat]] ikiwiki-3.20160121/doc/todo/improved_parentlinks_styling.mdwn0000644000000000000000000000060412650125230021044 0ustar Use a styled ul for the parentlinks. On second thought, I've decided not to. Doing that would make ikiwiki less usable in browsers like w3m that don't support styled uls. ikiwiki does use styled uls for other things, such as the action bar, but displaying that as a simple unstyled list in a simple browser works well and makes sense. For parent links, it does not. --[[Joey]] [[done]] ikiwiki-3.20160121/doc/todo/improved_mediawiki_support.mdwn0000644000000000000000000000071712650125230020505 0ustar [[!tag patch todo wishlist]] I several updates to the mediawiki plugin to improve compatibility, improving img and File: support. I'd love to get them upstream. Is there any interest? Patches are at [[http://www.isi.edu/~johnh/SOFTWARE/IKIWIKI/index.html]] > The mediawiki plugin has never been included in ikiwiki, it's > [provided by a third party](https://github.com/jmtd/mediawiki.pm) and > you should send your patches to them. > [[done]] > --[[Joey]] ikiwiki-3.20160121/doc/todo/improve_globlists.mdwn0000644000000000000000000000062312650125230016600 0ustar Need to improve globlists, adding more powerful boolean expressions. The current behavior is to check for negated expressions, and not match if there are any, then check for normal expressions and match if any match, This fails if you want to do something like match only pages with tag foo that are under directory bar. I think we need parens for grouping, and probably also boolean OR. [[todo/done]] ikiwiki-3.20160121/doc/todo/improve_decentralised_wikis_documentation_and_graphics.mdwn0000644000000000000000000000200412650125230026240 0ustar i'm trying to work on the decentralised wikis documentation in [[tips/distributed_wikis]] and i would like to add more / edit SVG graphics, but I can't because i'm not admin. --[[users/anarcat]] [[!template id=gitbranch branch=anarcat/dev/decentralised_graphics author="[[anarcat]]"]] This branch is ready to be merged, I believe. There are some improvements to be done on the text of those pages, but I'd like to get the images in before going any further. [[patch]] Here are the images, to give you an idea: * * * * [[patch merged|done]], thanks. ikiwiki-3.20160121/doc/todo/ikibot.mdwn0000644000000000000000000000101012650125230014305 0ustar Random idea: create an ikiwiki IRC bot, which notices the use of ikiwiki syntax in the channel and translates. This would work nicely for "frequently-given answer" bots like dpkg on #debian, since you could give answers by linking to wiki pages. ikibot could also excerpt page content. How do I set up ikiwiki with Git? \[[setup]] http://ikiwiki.info/setup.html: "This tutorial will walk you through setting up a wiki with ikiwiki. ..." --[[JoshTriplett]] [[wishlist]] ikiwiki-3.20160121/doc/todo/hyphenation.mdwn0000644000000000000000000000274012650125230015365 0ustar [[!tag wishlist]] I recently found [Hyphenator](http://code.google.com/p/hyphenator/) which is quite cool ... but it should be possible to implement this functionality within ikiwiki and not rely on javascript and the client. A Perl implementation of the algorithm exists in [[!cpan TeX::Hyphen]]. > I'd be inclined to say that Javascript run in the client is a better > place to do hyphenation: this is the sort of non-essential, > progressive-enhancement thing that JS is perfect for. If you did it > at the server side, to cope with browser windows of different sizes > you'd have to serve HTML sprinkled with soft-hyphen entities at > every possible hyphenation point, like > > pro­gress­ive en­hance­ment > > which is nearly twice the byte-count and might stop > search engines from indexing your site correctly. > > A browser that supports Javascript probably also supports > soft-hyphen marks, but I doubt all non-JS browsers support them > correctly. > > It might be good to have a plugin to insert a reference to the > hyphenation JS into the ``, or a general way to enable > this sort of thing without writing a plugin or changing your > `page.tmpl`, though. Perhaps we should have a `local.js` > alongside `local.css`? :-) > > --[[smcv]] >> Thanks, I did not realize that the javascript does something else than add &shy;s - took a closer look at it now. >> I doubt however that adding them will increase the byte count more than transmitting the javascript. ikiwiki-3.20160121/doc/todo/httpauth_feature_parity_with_passwordauth.mdwn0000644000000000000000000000207412650125230023642 0ustar The only way to have a private ikiwiki, with a shared user database for static pages and CGI authentication, is to use [[plugins/httpauth]]. It would be good for httpauth to be on par with [[plugins/passwordauth]], i.e. to allow registering users, resetting passwords, and changing passwords; supporting some kind of `account_creation_password` configuration option would be nice, too. I'll probably propose patches implementing this at some point. I've not had a single look at the code yet, but it may be nice to factorize the relevant passwordauth code, instead of rewriting it completely in httpauth. -- [[intrigeri]] Well, on such a private wiki, one can neither register herself nor reset his password: the registration page, as any other page, would be forbidden to non-authenticated users. Admin users should then be enabled to: - register a new user - reset someone else's password In both cases, a brand new random password is sent by e-mail to the new user. An authenticated user should nevertheless be able to change his own password. -- [[intrigeri]] [[wishlist]] ikiwiki-3.20160121/doc/todo/httpauth_example/0000755000000000000000000000000012650125230015521 5ustar ikiwiki-3.20160121/doc/todo/httpauth_example/discussion.mdwn0000644000000000000000000000050212650125230020570 0ustar I wrote my experiences dealing with authentication [here](http://wiki.swclan.homelinux.org/tech/). Does it fit the bill? Its not really authentication (though its mentioned in my process) so much as just an attempt to eliminate the clear text password problem. --[Andrew Sackville-West](mailto:andrew@swclan.homelinux.org)ikiwiki-3.20160121/doc/todo/httpauth_example.mdwn0000644000000000000000000000066212650125230016414 0ustar ikiwiki should supply an example .htaccess file for use with HTTP authentication (perhaps as a [[tip|tips]]), showing how to authenticate the user for edits without requiring authentication for the entire wiki. (Ideally, recentchanges should work without authentication as well, even though it goes through the CGI.) --[[JoshTriplett]] > (Now that recentchanges is a static page, it auths the same as other wiki > pages.) --[[Joey]] ikiwiki-3.20160121/doc/todo/http_bl_support.mdwn0000644000000000000000000000323312650125230016265 0ustar [Project Honeypot](http://projecthoneypot.org/) has an HTTP:BL API available to subscribed (it's free, accept donations) people/orgs. There's a basic perl package someone wrote, I'm including a copy here. [from here](http://projecthoneypot.org/board/read.php?f=10&i=112&t=112) > The [[plugins/blogspam]] service already checks urls against > the surbl, and has its own IP blacklist. The best way to > support the HTTP:BL may be to add a plugin > [there](http://blogspam.repository.steve.org.uk/file/cc858e497cae/server/plugins/). > --[[Joey]]
    package Honeypot;
    
    use Socket qw/inet_ntoa/;
    
    my $dns = 'dnsbl.httpbl.org';
    my %types = (
    0	=> 'Search Engine',
    1	=> 'Suspicious',
    2	=> 'Harvester',
    4	=> 'Comment Spammer'
    );
    sub query {
    my $key = shift || die 'You need a key for this, you get one at http://www.projecthoneypot.org';
    my $ip = shift || do {
    warn 'no IP for request in Honeypot::query().';
    return;
    };
    
    my @parts = reverse split /\./, $ip;
    my $lookup_name = join'.', $key, @parts, $dns;
    
    my $answer = gethostbyname ($lookup_name);
    return unless $answer;
    $answer = inet_ntoa($answer);
    my(undef, $days, $threat, $type) = split /\./, $answer;
    my @types;
    while(my($bit, $typename) = each %types) {
    push @types, $typename if $bit & $type;
    }
    return {
    days => $days,
    threat => $threat,
    type => join ',', @types
    };
    
    }
    1;
    
    From the page: > The usage is simple: > use Honeypot; > my $key = 'XXXXXXX'; # your key > my $ip = '....'; the IP you want to check > my $q = Honeypot::query($key, $ip); > use Data::Dumper; > print Dumper $q; Any chance of having this as a plugin? I could give it a go, too. Would be fun to try my hand at Perl. --[[simonraven]] [[!tag wishlist]] ikiwiki-3.20160121/doc/todo/htpasswd_mirror_of_the_userdb.mdwn0000644000000000000000000000311312650125230021151 0ustar [[!tag wishlist]] Ikiwiki is static, so access control for viewing the wiki must be implemented on the web server side. Managing wiki users and access together, we can currently * use [[httpauth|plugins/httpauth/]], but some [[passwordauth|plugins/passwordauth]] functionnality [[is missing|todo/httpauth_feature_parity_with_passwordauth/]]; * use [[passwordauth|plugins/passwordauth]] plus [[an Apache `mod_perl` authentication mechanism|plugins/passwordauth/discussion/]], but this is Apache-centric and enabling `mod_perl` just for auth seems overkill. Moreover, when ikiwiki is just a part of a wider web project, we may want to use the same userdb for the other parts of this project. I think an ikiwiki plugin which would (re)generate an htpasswd version of the user/passwd base (better, two htpasswd files, one with only the wiki admins and one with everyone) each time an user is added or modified would solve this problem: * access control can be managed from the web server * user management is handled by the passwordauth plugin * htpasswd format is understood by various servers (Apache, lighttpd, nginx, ...) and languages commonly used for web development (perl, python, ruby) * htpasswd files can be mirrored on other machines when the web site is distributed -- [[nil]] > I think this is a good idea. Although unless the password hashes that > are stored in the userdb are compatible with htpasswd hashes, > the htpasswd hashes will need to be stored in the userdb too. Then > any userdb change can just regenerate the htpasswd file, dumping out > the right kind of hashes. --[[Joey]] ikiwiki-3.20160121/doc/todo/htmlvalidation.mdwn0000644000000000000000000000501512650125230016054 0ustar * Doctype is XHTML 1.0 Strict One consideration of course is that regular users might embed html that uses deprecated presentational elements like <center>. At least firefox seems to handle that mixture ok. --[[Joey]] * [ [inlinepage] ] gets wrapped in <p>...</p> which has a high chance of invalidating the page. Since markdown does this, the only way I can think to fix it is to make the inlined page text start with </p> and end with <p>. Ugly, and of course there could be problems with markdown enclosing it in other spanning tags in some cases. I've implemented this hack now. :-/ --[[Joey]] I used this 'hack' myself, but yesterday I came up with a better idea: <div class="inlinepage"> [ [inlinepage] ] </div> This prevents markdown enclosing and even adds a useful css identifier. Problem is that this should be added to every page and not in the template(s). --[[JeroenSchot]] I can make ikiwiki add that around every inlined page easily enough. However, where is it documented? Came up dry on google. --[[Joey]] From : > The only restrictions are that block-level HTML elements e.g. <div>, <table>, <pre>, <p>, etc. must be separated from surrounding content by blank lines, and the start and end tags of the block should not be indented with tabs or spaces. Markdown is smart enough not to add extra (unwanted) <p> tags around HTML block-level tags. [snip] > Note that Markdown formatting syntax is not processed within > block-level HTML tags. E.g., you can't use Markdown-style \*emphasis\* inside an HTML block. Because [ [inlinepage] ] isn't separated by a blank line it gets treated as a block-level element. Hmm, will this stop all formatting, including *'s to em-tags? --[[JeroenSchot]] Ah didn't realize you meant it fixed it at the markdown level. I'll think about making postprocessor directives into preprocessor directives instead, then I could use that fix (but I'm not sure how feasible it is to do that). --[[Joey]] Done.. inlining is now a preprocessor directive, happens before markdown, and the inlinepage template uses div as suggested, this does prevent markdown from doing any annoying escaping of the preprocessor directives, as well as preventing it wrapping subpages in <p>. --[[Joey]] This page is now valid. Test: [validate this page](http://validator.w3.org/check?url=referer) [[todo/done]] ikiwiki-3.20160121/doc/todo/html.mdwn0000644000000000000000000000026212650125230014000 0ustar Create some nice(r) stylesheets. Should be doable w/o touching a single line of code, just editing the [[templates]] and/or editing [[style.css]]. [[done]] ([[css_market]] ..) ikiwiki-3.20160121/doc/todo/hook_to_detect_markdown_links_to_wiki_pages.mdwn0000644000000000000000000000026612650125230024040 0ustar For an internal wiki, we occasionally get patches that link to internal wiki pages using the Markdown link syntax. I'd love to see an optional git hook to detect that and complain. ikiwiki-3.20160121/doc/todo/hidden_links__47__tags.mdwn0000644000000000000000000000176512650125230017326 0ustar [[!tag wishlist]] I would like to have the possibility for hidden tags or links. Using the tag functionality I could group some news items for including them into other subpages. But I don't want the links or tags to show (and I don't want Tag lists like "Tags: ?mytag"). The tagged items should not differ from the items, that are not tagged. I didn't find any way to hide the tag list or links and I don't want to have to create a "hidden" page containing links to the pages and then using the backlink functionality, because this is more prone to errors. It's easier to forget adding a link on a second page than forgetting to add a needed tag to a new newsitem. > I found out, that using the [[meta plugin|plugins/meta]] it is possible to create the hidden link, that I wanted. -- [[users/Enno]] >> Yes, [[meta link|ikiwiki/directive/meta]] will not show up as a visible link on the page, while >> also not showing up in the list of tags of a page, so it seems what you >> want. [[done]] --[[Joey]] ikiwiki-3.20160121/doc/todo/hebrew_l10n.mdwn0000644000000000000000000000075212650125230015146 0ustar Ikiwiki already has localizations for several languages, but not for Hebrew. Hebrew is also special, being a right-to-left language (same is true for Arabic and several others). I hope the page template localizations can handle the layout direction issues and make the whole wiki HTML to be right-to-left. Anyway, I started preparing a Hebrew localization. Just wanted to say it here to avoid duplication, and to let the developers/maintainers know in case it matters. --[[fr33domlover]] ikiwiki-3.20160121/doc/todo/headless_git_branches.mdwn0000644000000000000000000001537312650125230017345 0ustar Ikiwiki should really survive being asked to work with a git branch that has no existing commits. mkdir iki-gittest cd iki-gittest GIT_DIR=barerepo.git git init git clone barerepo.git srcdir ikiwiki --rcs=git srcdir destdir I've fixed this initial construction case, and, based on my testing, I've also fixed the post-update executing on a new master, and ikiwiki.cgi executing on a non-existent master cases. Please commit so my users stop whining at me about having clean branches to push to, the big babies. Summary: Change three scary loud failure cases related to empty branches into three mostly quiet success cases. [[!tag patch]] > FWIW, [[The_TOVA_Company]] apparently wants this feature (and I hope > I don't mind that I mention they were willing to pay someone for it, > but I told them I'd not done any of the work. :) ) > > Code review follows, per hunk.. --[[Joey]]
    diff --git a/IkiWiki/Plugin/git.pm b/IkiWiki/Plugin/git.pm
    index cf7fbe9..e5bafcf 100644
    --- a/IkiWiki/Plugin/git.pm
    +++ b/IkiWiki/Plugin/git.pm
    @@ -439,17 +439,21 @@ sub git_commit_info ($;$) {
     
     	my @opts;
     	push @opts, "--max-count=$num" if defined $num;
    -
    -	my @raw_lines = run_or_die('git', 'log', @opts,
    -		'--pretty=raw', '--raw', '--abbrev=40', '--always', '-c',
    -		'-r', $sha1, '--', '.');
    -
    +	my @raw_lines;
     	my @ci;
    -	while (my $parsed = parse_diff_tree(\@raw_lines)) {
    -		push @ci, $parsed;
    -	}
    +        
    +	# Test to see if branch actually exists yet.
    +	if (run_or_non('git', 'show-ref', '--quiet', '--verify', '--', 'refs/heads/' . $config{gitmaster_branch}) ) {
    +		@raw_lines = run_or_die('git', 'log', @opts,
    +			'--pretty=raw', '--raw', '--abbrev=40', '--always', '-c',
    +			'-r', $sha1, '--', '.');
    +
    +		while (my $parsed = parse_diff_tree(\@raw_lines)) {
    +			push @ci, $parsed;
    +		}
     
    -	warn "Cannot parse commit info for '$sha1' commit" if !@ci;
    +		warn "Cannot parse commit info for '$sha1' commit" if !@ci;
    +	};
     
     	return wantarray ? @ci : $ci[0];
     }
    
    My concern is that this adds a bit of slowdown (git show-ref is fast, but It's still extra work) to a very hot code path that is run to eg, update recentchanges after every change. Seems not ideal to do extra work every time to handle a case that will literally happen a maximum of once in the entire lifecycle of a wiki (and zero times more typically, since the setup automator puts in a .gitignore file that works around this problem). So as to not just say "no" ... what if it always tried to run git log, and if it failed (or returned no parsed lines), then it could look at git show-ref to deduce whether to throw an error or not. --[[Joey]] > Ah, but then git-log would still complain "bad revision 'HEAD'" > --[[Joey]] jrayhawk@piny:/srv/git/jrayhawk.git$ time perl -e 'for( $i = 1; $i < 10000; $i++) { system("git", "show-ref", "--quiet", "--verify", "--", "refs/heads/master"); }' real 0m10.988s user 0m0.120s sys 0m1.210s > FWIW, "an extra millisecond per edit" vs "full git coverage" is no > contest for me; I use that patch on seven different systems, including > freedesktop.org, because I've spent more time explaining to users either > why Ikiwiki won't work on their empty repositories or why their > repositories need useless initial commits (a la Branchable) that make > pushing not work and why denyNonFastForwards=0 and git push -f are > necessary than all the milliseconds that could've been saved in the > world. > > But, since we're having fun rearranging deck chairs on the RMS Perl > (toot toot)... > > There's some discrepency here I wasn't expecting: jrayhawk@piny:/srv/git/jrayhawk.git$ time dash -c 'i=0; while [ $i -lt 10000 ]; do i=$((i+1)); git show-ref --quiet --verify -- refs/heads/master; done' real 0m9.986s user 0m0.170s sys 0m0.940s > While looking around in the straces, I notice Perl, unlike {b,d}ash > appears to do PATH lookup on every invocation of git, adding up to > around 110 microseconds apiece on a post-2.6.38 16-thread QPI system: 29699 0.000112 execve("/home/jrayhawk/bin/git", ["git", "show-ref", "--quiet", "--verify", "--", "refs/heads/master"], [/* 17 vars */]) = -1 ENOENT (No such file or directory) 29699 0.000116 execve("/usr/local/bin/git", ["git", "show-ref", "--quiet", "--verify", "--", "refs/heads/master"], [/* 17 vars */]) = -1 ENOENT (No such file or directory) 29699 0.000084 execve("/usr/bin/git", ["git", "show-ref", "--quiet", "--verify", "--", "refs/heads/master"], [/* 17 vars */]) = 0 > You can probably save a reasonable number of context switches and > RCU-heavy (or, previously, lock-heavy) dentry lookups by doing a Perl > equivalent of `which git` and using the result. It might even add up to > a whole millisecond in some circumstances! > > No idea where the rest of that time is going. Probably cache misses > on the giant Perl runtime or something. > > ... > > Now I feel dirty for having spent more time talking about optimization > than that optimization is likely to save. This must be what being an > engineer feels like. > --jrayhawk
    @@ -474,7 +478,10 @@ sub rcs_update () {
     	# Update working directory.
     
     	if (length $config{gitorigin_branch}) {
    -		run_or_cry('git', 'pull', '--prune', $config{gitorigin_branch});
    +		run_or_cry('git', 'fetch', '--prune', $config{gitorigin_branch});
    +		if (run_or_non('git', 'show-ref', '--quiet', '--verify', '--', 'refs/remotes/' . $config{gitorigin_branch} . '/' . $config{gitmaster_branch}) ) {
    +			run_or_cry('git', 'merge', $config{gitorigin_branch} . '/' . $config{gitmaster_branch});
    +		}
     	}
     }
    
    
    Same concern here about extra work. Code path is nearly as hot, being called on every refresh. Probably could be dealt with similarly as above. Also, is there any point in breaking the pull up into a fetch followed by a merge? --[[Joey]] > The same benchmarking applies, at least. > > Re: fetch/merge: We can't test for the nonexistence of the origin branch > without fetching it, and we can't merge it if it is, indeed, > nonexistant. > > Unless you're implying that it would be better to just spam stderr with > unnecessary scary messages and/or ignore/suppress them and lose the > ability to respond appropriately to every other error condition. As > maintainer, you deal with a disproportionate amount of the resulting > support fallout, so I'm perfectly satisfied letting you make that call. > --jrayhawk
    @@ -559,7 +566,7 @@ sub rcs_commit_helper (@) {
     	# So we should ignore its exit status (hence run_or_non).
     	if (run_or_non('git', 'commit', '-m', $params{message}, '-q', @opts)) {
     		if (length $config{gitorigin_branch}) {
    -			run_or_cry('git', 'push', $config{gitorigin_branch});
    +			run_or_cry('git', 'push', $config{gitorigin_branch}, $config{gitmaster_branch});
     		}
     	}
     	
    
    This seems fine to apply. --[[Joey]] > Hooray! > --jrayhawk ikiwiki-3.20160121/doc/todo/hard-coded_location_for_man_pages_and_w3m_cgi_wrapper.mdwn0000644000000000000000000000546112650125230025576 0ustar Hi, some operating systems use PREFIX/man instead of PREFIX/share/man as the base directory for man pages and PREFIX/libexec/ instead of PREFIX/lib/ for files like CGI programs. At the moment the location of the installed man pages and the w3m cgi wrapper is hard-coded in Makefile.PL. The patch below makes it possible to install those files to alternative directories while the default stays as it is now. > It should be possible to use the existing MakeMaker variables such as > INSTALLMAN1DIR (though MakeMaker lacks one for man8). I'd prefer not > adding new variables where MakeMaker already has them. --[[Joey]] [[!tag patch patch/core]]
    
      - Introduce two variables, IKI_MANDIR and IKI_W3MCGIDIR, to be set from
        the command line. This enables locations for man pages and the w3m
        cgi wrapper other than the hard-coded defaults in Makefile.PL.
    
    --- Makefile.PL.orig    2007-05-20 03:03:58.000000000 +0200
    +++ Makefile.PL
    @@ -3,9 +3,32 @@ use warnings;
     use strict;
     use ExtUtils::MakeMaker;
     
    +my %params = ( 'IKI_MANDIR' => '$(PREFIX)/share/man',
    +               'IKI_W3MCGIDIR' => '$(PREFIX)/lib/w3m/cgi-bin'
    +             );
    +
    +@ARGV = grep {
    +  my ($key, $value) = split(/=/, $_, 2);
    +  if ( exists $params{$key} ) {
    +    $params{$key} = $value;
    +    print "Using $params{$key} for $key.\n";
    +    0
    +  } else {
    +    1
    +  }
    +} @ARGV;
    +
    +
     # Add a few more targets.
     sub MY::postamble {
    -q{
    +  package MY;
    +
    +  my $scriptvars = <<"EOSCRIPTVARS";
    +IKI_MANDIR = $params{'IKI_MANDIR'}
    +IKI_W3MCGIDIR = $params{'IKI_W3MCGIDIR'}
    +EOSCRIPTVARS
    +
    +  my $script = q{
     all:: extra_build
     clean:: extra_clean
     install:: extra_install
    @@ -56,23 +79,24 @@ extra_install:
                    done; \
            done
     
    -       install -d $(DESTDIR)$(PREFIX)/share/man/man1
    -       install -m 644 ikiwiki.man $(DESTDIR)$(PREFIX)/share/man/man1/ikiwiki.1
    +       install -d $(DESTDIR)$(IKI_MANDIR)/man1
    +       install -m 644 ikiwiki.man $(DESTDIR)$(IKI_MANDIR)/man1/ikiwiki.1
            
    -       install -d $(DESTDIR)$(PREFIX)/share/man/man8
    -       install -m 644 ikiwiki-mass-rebuild.man $(DESTDIR)$(PREFIX)/share/man/ma
    n8/ikiwiki-mass-rebuild.8
    +       install -d $(DESTDIR)$(IKI_MANDIR)/man8
    +       install -m 644 ikiwiki-mass-rebuild.man $(DESTDIR)$(IKI_MANDIR)/man8/iki
    wiki-mass-rebuild.8
            
            install -d $(DESTDIR)$(PREFIX)/sbin
            install ikiwiki-mass-rebuild $(DESTDIR)$(PREFIX)/sbin
     
    -       install -d $(DESTDIR)$(PREFIX)/lib/w3m/cgi-bin
    -       install ikiwiki-w3m.cgi $(DESTDIR)$(PREFIX)/lib/w3m/cgi-bin
    +       install -d $(DESTDIR)$(IKI_W3MCGIDIR)
    +       install ikiwiki-w3m.cgi $(DESTDIR)$(IKI_W3MCGIDIR)
     
            install -d $(DESTDIR)$(PREFIX)/bin
            install ikiwiki.out $(DESTDIR)$(PREFIX)/bin/ikiwiki
     
            $(MAKE) -C po install PREFIX=$(PREFIX)
    -}
    +};
    +  return $scriptvars.$script;
     }
     
     WriteMakefile(
    
    
    ikiwiki-3.20160121/doc/todo/graphviz.mdwn0000644000000000000000000000244012650125230014666 0ustar How about a plugin providing a [[preprocessor_directive|ikiwiki/directive]] to render a [[!debpkg graphviz]] file as an image via one of the graphviz programs ("dot" by default) and include the resulting image on the page, using the "cmapx" image map format? graphviz files themselves could also render the same way into an HTML file with the same basename as the graphviz file; format and program could come either from an ikiwiki configuration option or comments/directives in the file. (For example, "digraph" could imply "dot", and "graph" could imply "neato".) To complement this, ikiwiki could support creating and editing graphviz files through the CGI interface, as a new page type; preview could render the file. It would also help to have some sort of graphviz extension attribute for linking to a wiki page, which would become a standard href or URL attribute in the input passed to the particular graphviz program. > Editing graphviz files safely online might be tricky. Graphvis would need > to be audited. --[[Joey]] >> I've added a [[graphviz_plugin|plugins/graphviz]] which adds a preprocessor >> directive to render inline graphviz graphs, addressing part of this todo >> item. It doesn't yet support graphviz files as a separate page type, image >> maps, or wikilinks.--[[JoshTriplett]] ikiwiki-3.20160121/doc/todo/git_recentchanges_should_not_show_merges.mdwn0000644000000000000000000000205412650125230023351 0ustar The recentchanges page can currently display merge commits, such as "Merge branch 'master' of ssh://git.kitenet.net/srv/git/ikiwiki.info". It should filter these out somehow, but I'm not sure how to do that. A merge in general is a commit with two parents, right? But such a merge might be what gitweb calls a "simple merge", that is I think, just a fast-forward. Or it could be a merge that includes manual conflict resolution, and should be shown in recentchanges. Seems that the problem is that it's calling git-log with the -m option, which makes merges be listed with the diff from the perspective of each parent. I think it would be better to not use that (or possibly to use the -c option instead?). The -m makes it show the merge from the POV of each of the parents. If the -m is left off, none of the changes in the merge are shown, even if it includes changes not in any of the parents (manual conflict resolution). With -c, it seems to show only the unique changes introduced by the merge. [[done]], using -c, hope that was the right choice --[[Joey]] ikiwiki-3.20160121/doc/todo/git_attribution/0000755000000000000000000000000012650125230015354 5ustar ikiwiki-3.20160121/doc/todo/git_attribution/discussion.mdwn0000644000000000000000000001030512650125230020425 0ustar I'd strongly recommend this modification to ikiwiki. Any particular limitations that anyone can think of? I might even have a try at this patch, though I'd have to hack the user preferences page to include author name... As to the question of whether the committer was the 'script' or the wiki editor... I'm not sure. Marking it as the script somehow (`ikiwiki-cgi `)? seems to make sense and would make it easier to manage. [[harningt]] I've been thinking a bit about the GIT attribution in ikiwiki... If no email set, I think "$USERNAME" is reasonable... no point in the '<>' causing clutter. >> **adjustement wrt comments**: leave the '<>' in due to requirements in git If no username set... then something like '@[IPADDR]' makes sense... (not in email brackets). > Why not put it in email brackets? --[[Joey]] In the case of OpenID login.. I think that's a special case... I don't think attempting to munge something meaningful out of the OpenID makes sense... but I think some massaging might need to be done. Ex: I've noticed in the current mode where logging in w/ harningt.eharning.us/ shows up in the logs w/o HTTP and if I login w/ http://harningt.eharning.us/ is shows up w/ the http... causing some inconsistency. I think it oughtta make sure that it has the properly discovered, canonicalized form (ex: if there's a redirect to another site (harningt.eharning.us -> www.eharning.us) then technically the target site is the 'real' openid (at least according to how most OpenID RPs take it). ... For OpenID edits, I think there should be a way to tell it what username to show in the preferences dialog (so you can have a 'normal' $USER <$EMAIL> setup.) This could by default be filled in w/ sreg nickname value (as well as email for that matter)... To convey the openid used to make the edit, I think it would be important that some sort of footer line along the lines of the Signed-off: $USER <$EMAIL> conventions I've seen. Perhaps an OpenID: $OPENID_URL would make sense. This could help w/ making sure that no one irrefutably spoofs a post by someone (since w/ the setup where email and effective username are configurable, there's no determination of uniqueness) >> **adj re git req**: "$OPENID_URL <>" [[harningt]] [[madduck]]: git requires `Name ` format, as far as I know. > Yes, it does: > > joey@kodama:~/tmp/foo/bar>git commit --author "foo" > fatal: malformed --author parameter > > It seems to be happy with anything of the form "foo " -- doesn't seem to > do any kind of strict checking. Even "http://joey.kitenet.net <>" will be > accepted. --[[Joey]] >> >>Sounds good to me, >> >> --[[harningt]] > I think the thing to do is, as Josh suggested originally, use > GIT_AUTHOR_NAME and GIT_AUTHOR_EMAIL. Note that setting these > individually is best, so git can independently validate/sanitize both > (which it does do somewhat). Always put the username/openid/IP in > GIT_AUTHOR_NAME; if the user has configured an email address, > GIT_AUTHOR_EMAIL can also be set. > > There is one thing yet to be solved, and that is how to tell the > difference between a web commit by 'Joey Hess ', > and a git commit by the same. I think we do want to differentiate these, > and the best way to do it seems to be to add a line to the end of the > commit message. Something like: "\n\nWeb-commit: true" > > For backwards compatability, the code that parses the current stuff needs > to be left in. But it will need to take care to only parse that if the > commit isn't flagged as a web commit! Else web committers could forge > commits from others. --[[Joey]] > > BTW, I decided not to use the user's email address in the commit, because > then the email becomes part of project history, and you don't really > expect that to happen when you give your email address on signup to a web > site. > > The problem with leaving the email empty is that it confuses some things > that try to parse it, including: > * cia (wants a username in there): > * git pull --rebase (?) > * github pushes to twitter ;-) > > So while I tried that way at first, I'm now leaning toward encoding the > username in the email address. Like "user ", or > "joey ". ikiwiki-3.20160121/doc/todo/git_attribution.mdwn0000644000000000000000000000064612650125230016251 0ustar When run with the [[rcs/Git]] backend, ikiwiki should use `GIT_AUTHOR_NAME` and `GIT_AUTHOR_EMAIL` rather than munging the commit message. Depending on the semantics you want to imply (does a web edit constitute a commit by the user or by the script?), it could also set `GIT_COMMITTER_NAME` and `GIT_COMMITTER_EMAIL` to the same values. --[[JoshTriplett]] > See [[!debbug 451023]] for a [[patch]] --[[Joey]] [[done]] ikiwiki-3.20160121/doc/todo/git-rev-list_requires_relative_path___40__fixes_git_ctime__41__.mdwn0000644000000000000000000000142512650125230027335 0ustar Index: IkiWiki/Rcs/git.pm =================================================================== --- IkiWiki/Rcs/git.pm (revision 4532) +++ IkiWiki/Rcs/git.pm (working copy) @@ -275,6 +275,9 @@ my $file = shift || q{--}; + # Remove srcdir prefix to appease git-rev-list + $file =~ s/^$config{srcdir}\/?//; + # Ignore error since a non-existing file might be given. my ($sha1) = run_or_non('git-rev-list', '--max-count=1', 'HEAD', $file); if ($sha1) { I actually see a bug in this patch. :-) If srcdir = "foo" and the wiki contains a "foo/bar" and a "bar", this will make it, in the non-ctime case, get the sha1 of the wrong file, "bar", when "foo/bar" is asked for. Better to strip the path out in getctime, I guess. --[[Joey]] [[!tag patch done]] ikiwiki-3.20160121/doc/todo/git-annex_support/0000755000000000000000000000000012650125230015633 5ustar ikiwiki-3.20160121/doc/todo/git-annex_support/discussion.mdwn0000644000000000000000000000340512650125230020707 0ustar Hi, > Hi! :) First, many thanks for this enlightening information! You are leading the way in a glorious trail of flames! ;) -a I'm currently using git-annex with ikiwiki for a website of mine. [stockholm.kalleswork.net](http://stockholm.kalleswork.net). The site relies heavily on the osm and album plugins. The site just went online and is a bit of an experiment, things seem a bit fragile but works for now. (ikiwiki 3.20130904.1)(git-annex 4.20130904.1) The trick I had to do to get git-annex, ikiwiki and the album plugin to work was to initialize an annex remote in the `$srcdir`. Then block the bare origin (ie wiki.git folder) with `annex-sync = false` and `annex-ignore = true` on **both** the server and the working laptop. This is to avoid polluting the wiki.git bare repo with annex stuff. > Why are you ignoring the bare remote actually? It seems it would be the proper way to batch-upload files into the server... -a Another critical thing for this to work was to set the `$srcdir` on the server to direct mode `git-annex direct` before adding any content anywhere. On the laptop I can use indirect mode without problems. For now syncing annex, pushing updates to mdwns etc all seem to work and the album plugin picks up images etc. I'm sure there are problems ahead with this setup but it does seem to work. I barely know how to use git, git-annex and ikiwiki and much less understand how it works behind the scenes. So do let me know if there are fatal flaws in my setup ;) -- Kalle > I am not sure how git-annex'd files are pushed to the `$srcdir`. Usually, you clone from the bare repo, so any git annex command will work on that repository, yet you explicitely ignore it. How *do* you push files into the `$srcdir`? Only on the web interface? Thanks again! --[[anarcat]] ikiwiki-3.20160121/doc/todo/git-annex_support.mdwn0000644000000000000000000002476012650125230016533 0ustar A dear [[wishlist]] which would resolve [[this question|forum/ikiwiki_and_big_files]]: ikiwiki should support git-annex repositories. I am not sure how this would work, but from my POV, it should do a `git annex get` when new commits are pushed to its bare repo. This would assume, of course, that there's another repo somewhere that ikiwiki has access to, which works for HTTP-style remotes, but could be more problematic for SSH remotes that require a key. Another solution would be to make ikiwiki a remote itself and allow users to push big files to it. The only problem I see with this is those files would end up in the bare repository and not necessarily show up in the web rendering. Ideally, a big file pushed would be hardlinked between the two repos, but it seems [git-annex doesn't support that yet](http://git-annex.branchable.com/todo/wishlist:_use_hardlinks_for_local_clones). --[[anarcat]] > One technical problem with this is that ikiwiki doesn't allow symlinks > for [[security]], but git-annex relies on symlinks (unless you're in > direct mode, but I'm not sure that's really desirable here). > I'd like to make symlinks possible without compromising security, > but it'll be necessary to be quite careful. --[[smcv]] First implementation ==================== So as the [[discussion]] shows, it seems it's perfectly possible to actually do this! There's this [gallery site](http://stockholm.kalleswork.net) which uses the [[plugins/contrib/album]] plugin and git-annex to manage its files. The crucial steps are: 1. setup a git annex remote in `$srcdir` 2. configure direct mode because ikiwiki ignores symlinks for [[security]] reasons: cd $srcdir git annex init git annex direct 3. configure files to be considered by git-annex (those will be not committed into git directly): git config annex.largefiles 'largerthan=100kb and not (include=*.mdwn or include=*.txt)' 4. make the bare repository (the remote of `$srcdir`) ignored by git-annex: cd $srcdir git config remote.origin.annex-ignore true git config remote.origin.annex-sync false (!) This needs to be done on *ANY* clone of the repository, which is annoying, but it's important because we don't want to see git-annex stuff in the bare repo. (why?) 5. deploy the following crappy plugin to make commits work again and make sure the right files are added in git-annex: [[!format perl """ #!/usr/bin/perl package IkiWiki::Plugin::gitannex; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "gitannex", call => \&getsetup); hook(type => "savestate", id => "gitannex", call => \&rcs_commit); # we need to handle all rcs commands maybe? } sub getsetup () { return plugin => { safe => 1, # rcs plugin rebuild => undef, section => "misc", }, } # XXX: we want to copy or reuse safe_git sub rcs_commit (@) { chdir $config{srcdir}; `git annex add --auto`; `git annex sync`; } sub rcs_commit_staged (@) { rcs_commit($@); } 1 """]] This assumes you know what `srcdir`, `repository` and so on mean, if you forgot (like me), see this reference: [[rcs/git/]]. What doesn't work ----------------- * the above plugin is kind of flaky and ugly. * it's not an RCS plugin, but probably should be, replacing the git plugin, because really: git doesn't work at all anymore at this point What remains to be clarified ---------------------------- * how do files get pushed to the `$srcdir`? Only through the web interface? * why do we ignore the bare repository? See the [[discussion]] for a followup on that. --[[anarcat]] Alternative implementation ========================== An alternative implementation, which remains to be detailed but is mentionned in [[forum/ikiwiki_and_big_files]], is to use the [[underlay]] feature combined with the `hardlink` option to deploy the git-annex'd files. Then git-annex is separate from the base ikiwiki git repo. See also [[tips/Ikiwiki_with_git-annex__44___the_album_and_the_underlay_plugins]] for an example. Also note that ikiwiki-hosting has a [patch waiting](https://ikiwiki-hosting.branchable.com/todo/git-annex_support) to allow pushes to work with git-annex. This could potentially be expanded to sync content to the final checkout properly, avoiding some of the problems above (esp. wrt to non-annex bare repos). Combined with the [[underlay]] feature, this could work very nicely indeed... --[[anarcat]] Here's an attempt:
    cd /home/user
    git clone source.git source.annex
    cd source.annex
    git annex direct
    cd ../source.git
    git annex group . transfer
    git remote add annex ../source.annex
    git annex sync annex
    
    Make sure the `hardlink` setting is enabled, and add the annex as an underlay, in `ikiwiki.setup`:
    hardlink: 1
    add_underlays:
    - /home/w-anarcat/source.annex
    
    Then moving files to the underlay is as simple as running this command in the bare repo:
    #!/bin/sh
    
    echo "moving big files to annex repository..."
    git annex move --to annex
    
    I have added this as a hook in `$HOME/source.git/hooks/post-receive` (don't forget to `chmod +x`). The problem with the above is that the underlay wouldn't work: for some reason it wouldn't copy those files in place properly. Maybe it's freaking out because it's a full copy of the repo... My solution was to make the source repository itself a direct repo, and then add it as a remote to the bare repo. --[[anarcat]] Back from the top ================= Obviously, the final approach of making the `source` repository direct mode will fail because ikiwiki will try to commit files there from the web interface which will fail (at best) and (at worst) add big files into git-annex (or vice-versa, not sure what's worse actually). Also, I don't know how others here made the underlay work, but it didn't work for me. I think it's because in the "source" repository, there are (dead) symlinks for the annexed files. This overrides the underlay, because of [[security]] - although I am unclear as to why this is discarded so early. So in order to make the original idea above work properly (ie. having a separate git-annex repo in direct mode) work, we must coerce ikiwiki into tolerating symlinks in the srcdir a little more:
    diff --git a/IkiWiki.pm b/IkiWiki.pm
    index 1043ef4..949273c 100644
    --- a/IkiWiki.pm
    +++ b/IkiWiki.pm
    @@ -916,11 +916,10 @@ sub srcfile_stat {
            my $file=shift;
            my $nothrow=shift;
    
    -       return "$config{srcdir}/$file", stat(_) if -e "$config{srcdir}/$file";
    -       foreach my $dir (@{$config{underlaydirs}}, $config{underlaydir}) {
    -               return "$dir/$file", stat(_) if -e "$dir/$file";
    +       foreach my $dir ($config{srcdir}, @{$config{underlaydirs}}, $config{underlaydir}) {
    +               return "$dir/$file", stat(_) if (-e "$dir/$file" && ! -l "$dir/$file");
            }
    -       error("internal error: $file cannot be found in $config{srcdir} or underlay") unless $nothrow;
    +       error("internal error: $file cannot be found in $config{srcdir} or underlays @{$config{underlaydirs}} $config{underlaydir}") unless $nothrow;
            return;
     }
    
    diff --git a/IkiWiki/Render.pm b/IkiWiki/Render.pm
    index 9d6f636..e0b4cf8 100644
    --- a/IkiWiki/Render.pm
    +++ b/IkiWiki/Render.pm
    @@ -337,7 +337,7 @@ sub find_src_files (;$$$) {
    
                    if ($underlay) {
                            # avoid underlaydir override attacks; see security.mdwn
    -                       if (! -l "$abssrcdir/$f" && ! -e _) {
    +                       if (1 || ! -l "$abssrcdir/$f" && ! -e _) {
                                    if (! $pages{$page}) {
                                            push @files, $f;
                                            push @IkiWiki::underlayfiles, $f;
    
    Now obviously this patch is incomplete: I am not sure we actually avoid the attack, ie. i am not sure the check in `srcdir()` is sufficient to remove completely the check in `find_src_files()`. After reviewing the code further, it seems that `find_src_files` in three places in ikiwiki:
    ../IkiWiki/Render.pm:421:	find_src_files(1, \@files, \%pages);
    ../IkiWiki/Render.pm:846:		($files, $pages)=find_src_files();
    ../po/po2wiki:18:my ($files, $pages)=IkiWiki::find_src_files();
    
    The first occurence is in `IkiWiki::Render::process_changed_files`, where it is used mostly for populating `@IkiWiki::underlayfiles`, the only side effect of `find_src_files`. The second occurence is in `IkiWiki::Render::refresh`. There things are a little more complicated (to say the least) and a lot of stuff happens. To put it in broad terms, first it does a `IkiWiki::Render::scan` and then a `IkiWiki::Render::render`. The last two call `srcfile()` appropriately (where i put an extra symlink check), except for `will_render()` in `scan`, which I can't figure out right now and that seems to have a lot of global side effects. It still looks fairly safe at first glance. The `rcs_get_current_rev`, `refresh`, `scan` and `rendered` hooks are also called in there, but I assume those to be safe, since they are called with sanitized values already. The patch does work: the files get picked up from the underlay and properly hardlinked into the target `public_html` directory! So with the above patch, then the following hook in `source.git/hooks/post-receive`:
    #!/bin/sh
    
    OLD_GIT_DIR="$GIT_DIR"
    unset GIT_DIR
    echo "moving big files to annex repository..."
    git annex copy --to annex
    git annex sync annex
    
    (I am not sure anymore why GIT_DIR is necessary, but I remember it destroyed all files in my repo because git-annex synced against the `setup` branch in the parent directory. fun times.) Then the `annex` repo is just a direct clone of the source.git:
    cd /home/user
    git clone --shared source.git annex
    cd annex
    git annex direct
    cd ../source.git
    git remote add annex ../annex
    
    And we need the following config:
    hardlink: 1
    add_underlays:
    - /home/w-anarcat/annex
    add_plugins:
    - underlay
    
    ... and the `ikiwiki-hosting` patch mentionned earlier to allow git-annex-shell to run at all. Also, the `--shared` option will [make git-annex use hardlinks itself between the two repos](https://git-annex.branchable.com/todo/wishlist:_use_hardlinks_for_local_clones/), so the files will be available for download as well. --[[anarcat]] > ...aaaand this doesn't work anymore. :( i could have sworn this was working minutes ago, but for some reason the annexed files get skipped again now. :( Sorry for the noise, the annex repo wasn't in direct mode - the above works! --[[anarcat]] ikiwiki-3.20160121/doc/todo/geotagging.mdwn0000644000000000000000000000046112650125230015150 0ustar Would be nice to see a way of geotagging pages in an ikiwiki, and search/sort pages by distance to a given location, as well as showing page locations on a map (Google Map, OpenStreetMap, etc). -- [[users/vibrog]] [[!tag wishlist]] > [[!cpan Geo::Coordinates::UTM]] would probably be useful. --[[smcv]] ikiwiki-3.20160121/doc/todo/generic_insert_links.mdwn0000644000000000000000000000202712650125230017235 0ustar The attachment plugin's Insert Links button currently only knows how to insert plain wikilinks and img directives (for images). [[wishlist]]: Generalize this, so a plugin can cause arbitrary text to be inserted for a particular file. --[[Joey]] Design: Add an insertlinks hook. Each plugin using the hook would be called, and passed the filename of the attachment. If it knows how to handle the file type, it returns a the text that should be inserted on the page. If not, it returns undef, and the next plugin is tried. This would mean writing plugins in order to handle links for special kinds of attachments. To avoid that for simple stuff, a fallback plugin could run last and look for a template named like `templates/embed_$extension`, and insert a directive like: \[[!template id=embed_vp8 file=my_movie.vp8]] Then to handle a new file type, a user could just make a template that expands to some relevant html. In the example above, `templates/embed_vp8` could make a html5 video tag, possibly with some flash fallback code even. ikiwiki-3.20160121/doc/todo/generic___39__do__61__goto__39___for_CGI.mdwn0000644000000000000000000000315512650125230022242 0ustar The [[plugins/recentchanges]] plugin has a `do=recentchanges_link` feature that will redirect to a given wiki page, or an error page with a creation link. In the [[plugins/contrib/comments]] plugin I've found that it would be useful to do the same for users. For now I've just cloned the functionality into the comments plugin, but perhaps this functionality could be renamed to `do=goto` or something, and moved to `IkiWiki/CGI.pm`? > Now implemented as the 'goto' branch in my git repository, along with > [[apache_404_ErrorDocument_handler]]. --[[smcv]] >> Looks good, the only things I wonder are: >> * Should it be a separate plugin? In particular `cgi_page_from_404()` is >> pretty big, and only works if apache is configured so seems somewhat >> pluginaable. >>> I've split out `goto` and `apache404` plugins in the branch. I think >>> you're right that apache404 should be a plugin. If you think goto is small >>> and general enough to not be a plugin, just don't merge my most recent >>> patch! --[[smcv]] >> * I wish there were some way to generalize the workaround for the stupid >> MSIE behavior. Actually, I wish we could ignore the MSIE stupidity, >> as I tend to do, but perhaps it's too stupid in this case for that to >> fly.. >> * Is there any reason to require do=goto before checking for >> `REDIRECT_STATUS`? Seems that if that code were moved >> out of the enclosing if block, the apache 404 handler could >> be set direct to the cgi, which seems simpler to remember. >> --[[Joey]] >>> No, good point - the `REDIRECT_STATUS` check is sufficiently unambiguous >>> already. Fixed. --[[smcv]] [[done]] ikiwiki-3.20160121/doc/todo/generated_po_stuff_not_ignored_by_git.mdwn0000644000000000000000000000022412650125230022621 0ustar [[!tag patch]] The recent merge of the po branch didn't come with a .gitignore. It eventually annoyed me enough to fix it :-) --[[smcv]] [[done]] ikiwiki-3.20160121/doc/todo/generate_HTML5_by_default.mdwn0000644000000000000000000000556212650125230017745 0ustar The `html5` option was added in 2010 and marked as "not experimental" in 2011 but is not the default. According to , current versions of all recent versions of all major browsers - even IE (9+) - support the HTML5 semantic elements (`
    ` etc.), except for `
    ` which IkiWiki doesn't use anyway. However, IE 8 is not a current version, but has ~ 4% market share and doesn't support `
    ` and friends; so there's still a compatibility concern there. In particular, non-HTML5 mode uses `` which doesn't allow newer markup like the `role` attribute, so we can't close [[todo/add_aria_landmarks_to_make_ikiwiki_websites_more_accessible]] while remaining XHTML 1.0 Strict. The recommended pseudo-doctype for HTML5, and for HTML with ARIA markup, is ``. (I do think we should continue to use `` and output well-formed XML so people who want to do XSLT tricks with IkiWiki's output can do so, though.) In practice, real browsers have never actually implemented a strict XHTML mode: they've always parsed `text/html` as "tag soup", because they need a tag-soup parser anyway, and nobody wants to maintain two parsers. Kai also wants a HTML5 doctype for [[bugs/more mobile friendly default themes]]. Options include: * set html5 to 1 by default but retain the dual-mode templates, breaking IE 8 by default * remove the option and always behave as if it had been 1, simplifying the templates and breaking IE 8 unconditionally * either of the above and include [html5shiv](https://code.google.com/p/html5shiv/) to de-break IE 8 * change the doctype to `` unconditionally, stop trying to limit ourselves to XHTML 1.0 Strict (use HTML5 features that degrade gracefully, like [[ARIA roles|todo/add aria landmarks to make ikiwiki websites more accessible]]), but avoid using the new elements like `
    ` that require specific browser support unless `html5` is set to 1. That would get rid of the backwards-compatibility concerns while keeping the ability to use post-2000 markup; we can use `html5` to mean "be more enthusiastic about HTML5 features even if they might fail on older browsers". Using the HTML5 doctype does mean we lose the ability to validate the output against a DTD (as `wdg-html-validator` does), but DTDs have very little to do with practical browser compatibility in any case. [[!template id=gitbranch branch=smcv/ready/html5 author="[[Simon McVittie|smcv]]" browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/html5]] [[!tag patch users/smcv/ready]] At the moment my preferred option is the last, for which see my `ready/html5` branch. I'll apply this at some point if there are no objections. --[[smcv]] > [[merged|done]] --[[smcv]] ikiwiki-3.20160121/doc/todo/friendly_markup_names.mdwn0000644000000000000000000000113612650125230017413 0ustar On the edit form when you are creating a new page, you are given an option of page types that can be used. The string presented to the user here is not particularly friendly: e.g., mdwn, txtl... it would be nice if the drop-down contents were "Markdown", "Textile", etc. (the values in the option tags can remain the same). I've written a first-take set of patches for this. They are in git://github.com/jmtd/ikiwiki.git in the branch "friendly_markup_names". [[!tag patch]] -- [[Jon]] [[merged|done]], TFTP! (I have not checked if any other format plugins would benefit from a longer name) --[[Joey]] ikiwiki-3.20160121/doc/todo/fortune:_select_options_via_environment.mdwn0000644000000000000000000000246612650125230023215 0ustar diff -up fortune.pm.ORIG fortune.pm.MODIFIED --- fortune.pm.ORIG 2008-01-11 19:07:48.000000000 +0100 +++ fortune.pm.MODIFIED 2008-01-12 07:58:44.000000000 +0100 @@ -1,5 +1,11 @@ #!/usr/bin/perl -# Include a fortune in a page +# Include a fortune in a page. +# If the environment variable IKIWIKI_FORTUNE_COMMAND is defined, use it. +# This allows to run e.g.: +# $IKIWIKI_FORTUNE_COMMAND='fortune ~/.fortune/myfortunes' \ +# ikiwiki -setup ~/.ikiwiki/ikiwiki.setup +# Combining this with cron could make regenerated wiki content. +# This may or may not be a good thing wrt. version control. package IkiWiki::Plugin::fortune; use warnings; @@ -12,7 +18,13 @@ sub import { sub preprocess (@) { $ENV{PATH}="$ENV{PATH}:/usr/games:/usr/local/games"; - my $f = `fortune 2>/dev/null`; + my $f; + if (exists ($ENV{'IKIWIKI_FORTUNE_COMMAND'})) { + $f = `$ENV{'IKIWIKI_FORTUNE_COMMAND'} 2>/dev/null` + } + else { + $f = `fortune 2>/dev/null`; + } if ($?) { return "[[".gettext("fortune failed")."]]"; > An environment variable is not the right approach. Ikiwiki has a setup > file, and plugins can use configuration from there. --[[Joey]] ikiwiki-3.20160121/doc/todo/format_escape.mdwn0000644000000000000000000002704512650125230015654 0ustar Since some preprocessor directives insert raw HTML, it would be good to specify, per-format, how to pass HTML so that it goes through the format OK. With Markdown we cross our fingers; with reST we use the "raw" directive. I added an extra named parameter to the htmlize hook, which feels sort of wrong, since none of the other hooks take parameters. Let me know what you think. --Ethan Seems fairly reasonable, actually. Shouldn't the `$type` come from `$page` instead of `$destpage` though? Only other obvious change is to make the escape parameter optional, and only call it if set. --[[Joey]] > I couldn't figure out what to make it from, but thinking it through, > yeah, it should be $page. Revised patch follows. --Ethan >> I've updated the patch some more, but I think it's incomplete. ikiwiki >> emits raw html when expanding WikiLinks too, and it would need to escape >> those. Assuming that escaping html embedded in the middle of a sentence >> works.. --[[Joey]] >>> Revised again. I get around this by making another hook, htmlescapelink, >>> which is called to generate links in whatever language. In addition, it >>> doesn't (can't?) generate >>> spans, and it doesn't handle inlineable image links. If these were >>> desired, the approach to take would probably be to use substitution >>> definitions, which would require generating two bits of code for each >>> link/html snippet, and putting one at the end of the paragraph (or maybe >>> the document?). >>> To specify that (for example) Discussion links are meant to be HTML and >>> not rst or whatever, I added a "genhtml" parameter to htmllink. It seems >>> to work -- see for an example. >>> --Ethan ## Alternative solution [Here](http://www.jk.fr.eu.org/ikiwiki/format-escapes-2.diff) is a patch largely inspired from the one below, which is up to date and written with [[todo/multiple_output_formats]] in mind. "htmlize" hooks are generalized to "convert" ones, which can be registered for any pair of filename extensions. Preprocessor directives are allowed to return the content to be inserted as a hash, in any format they want, provided they provide htmlize hooks for it. Pseudo filename extensions (such as `"_link"`) can also be introduced, which aren't used as real extensions but provide useful intermediate types. --[[JeremieKoenig]] > Wow, this is in many ways a beautiful patch. I did notice one problem, > if a link is converted to rst and then from there to a hyperlink, the > styling info usially added to such a link is lost. I wonder if it would > be better to lose _link stuff and just create link html that is fed into > the rst,html converter. Other advantage to doing that is that link > creation has a rather complex interface, with selflink, attrs, url, and > content parameters. > > --[[Joey]] >> Thanks for the compliment. I must confess that I'm not too familiar with >> rst. I am using this todo item somewhat as a pretext to get the conversion >> stuff in, which I need to implement some other stuff. As a result I was >> less careful with the rst plugin than with the rest of the patch. >> I just updated the patch to fix some other problems which I found with >> more testing, and document the current limitations. >> Rst cannot embed raw html in the middle of a paragraph, which is why >> "_link" was necessary. Rst links are themselves tricky and can't be made to >> work inside of words without knowledge about the context. >> Both problems could be fixed by inserting marks instead of the html/link, >> which would be replaced at a later stage (htmlize, format), somewhat >> similiar to the way the toc plugin works. When I get more time I will >> try to fix the remaining glitches this way. >> Also, I think it would be useful if ikiwiki had an option to export >> the preprocessed source. This way you can use docutils to convert your >> rst documents to other formats. Raw html would be loosed in such a >> process (both with directives and marks), which is another >> argument for `"_link"` and other intermediate forms. I think I can >> come up with a way for rst's convert_link to be used only for export >> purposes, though. >> --[[JeremieKoenig]] > Another problem with this approach is when there is some html (say a > table), that contains a wikilink. If the link is left up to the markup > lamguage to handle, it will never convert it to a link, since the table > will be processed as a chunk of raw html. > --[[Joey]] ### Updated patch I've created an updated [patch](http://www.idletheme.org/code/patches/ikiwiki-format-escapes-rlk-2007-09-24.diff) against the current revision. No real functionality changes, except for a small test script, one minor bugfix (put a "join" around a scalar-context "map" in convert_link), and some wrangling to get it merged properly; I thought it might be helpful for anyone else who wants to work on the code. (With that out of the way, I think I'm going to take a stab at Jeremie's plan to use marks which would be replaced post-htmlization. I've also got an eye towards [[todo/multiple_output_formats]].) --Ryan Koppenhaver ## Original patch [[!tag patch patch/core plugins/rst]]
    Index: debian/changelog
    ===================================================================
    --- debian/changelog	(revision 3197)
    +++ debian/changelog	(working copy)
    @@ -24,6 +24,9 @@
         than just a suggests, since OpenID is enabled by default.
       * Fix a bug that caused link(foo) to succeed if page foo did not exist.
       * Fix tags to page names that contain special characters.
    +  * Based on a patch by Ethan, add a new htmlescape hook, that is called
    +    when a preprocssor directive emits inline html. The rst plugin uses this
    +    hook to support inlined raw html.
     
       [ Josh Triplett ]
       * Use pngcrush and optipng on all PNG files.
    Index: IkiWiki/Render.pm
    ===================================================================
    --- IkiWiki/Render.pm	(revision 3197)
    +++ IkiWiki/Render.pm	(working copy)
    @@ -96,7 +96,7 @@
     		if ($page !~ /.*\/\Q$discussionlink\E$/ &&
     		   (length $config{cgiurl} ||
     		    exists $links{$page."/".$discussionlink})) {
    -			$template->param(discussionlink => htmllink($page, $page, gettext("Discussion"), noimageinline => 1, forcesubpage => 1));
    +			$template->param(discussionlink => htmllink($page, $page, gettext("Discussion"), noimageinline => 1, forcesubpage => 1, genhtml => 1));
     			$actions++;
     		}
     	}
    Index: IkiWiki/Plugin/rst.pm
    ===================================================================
    --- IkiWiki/Plugin/rst.pm	(revision 3197)
    +++ IkiWiki/Plugin/rst.pm	(working copy)
    @@ -30,15 +30,36 @@
     html = publish_string(stdin.read(), writer_name='html', 
            settings_overrides = { 'halt_level': 6, 
                                   'file_insertion_enabled': 0,
    -                              'raw_enabled': 0 }
    +                              'raw_enabled': 1 }
     );
     print html[html.find('')+6:html.find('')].strip();
     ";
     
     sub import {
     	hook(type => "htmlize", id => "rst", call => \&htmlize);
    +	hook(type => "htmlescape", id => "rst", call => \&htmlescape);
    +	hook(type => "htmlescapelink", id => "rst", call => \&htmlescapelink);
     }
     
    +sub htmlescapelink ($$;@) {
    +	my $url = shift;
    +	my $text = shift;
    +	my %params = @_;
    +
    +	if ($params{broken}){
    +		return "`? <$url>`_\ $text";
    +	}
    +	else {
    +		return "`$text <$url>`_";
    +	}
    +}
    +
    +sub htmlescape ($) {
    +	my $html=shift;
    +	$html=~s/^/  /mg;
    +	return ".. raw:: html\n\n".$html;
    +}
    +
     sub htmlize (@) {
     	my %params=@_;
     	my $content=$params{content};
    Index: doc/plugins/write.mdwn
    ===================================================================
    --- doc/plugins/write.mdwn	(revision 3197)
    +++ doc/plugins/write.mdwn	(working copy)
    @@ -121,6 +121,26 @@
     The function is passed named parameters: "page" and "content" and should
     return the htmlized content.
     
    +### htmlescape
    +
    +	hook(type => "htmlescape", id => "ext", call => \&htmlescape);
    +
    +Some markup languages do not allow raw html to be mixed in with the markup
    +language, and need it to be escaped in some way. This hook is a companion
    +to the htmlize hook, and is called when ikiwiki detects that a preprocessor
    +directive is inserting raw html. It is passed the chunk of html in
    +question, and should return the escaped chunk.
    +
    +### htmlescapelink
    +
    +	hook(type => "htmlescapelink", id => "ext", call => \&htmlescapelink);
    +
    +Some markup languages have special syntax to link to other pages. This hook
    +is a companion to the htmlize and htmlescape hooks, and it is called when a
    +link is inserted. It is passed the target of the link and the text of the 
    +link, and an optional named parameter "broken" if a broken link is being
    +generated. It should return the correctly-formatted link.
    +
     ### pagetemplate
     
     	hook(type => "pagetemplate", id => "foo", call => \&pagetemplate);
    @@ -355,6 +375,7 @@
     * forcesubpage  - set to force a link to a subpage
     * linktext - set to force the link text to something
     * anchor - set to make the link include an anchor
    +* genhtml - set to generate HTML and not escape for correct format
     
     #### `readfile($;$)`
     
    Index: doc/plugins/rst.mdwn
    ===================================================================
    --- doc/plugins/rst.mdwn	(revision 3197)
    +++ doc/plugins/rst.mdwn	(working copy)
    @@ -10,10 +10,8 @@
     Note that this plugin does not interoperate very well with the rest of
     ikiwiki. Limitations include:
     
    -* reStructuredText does not allow raw html to be inserted into
    -  documents, but ikiwiki does so in many cases, including
    -  [[WikiLinks|ikiwiki/WikiLink]] and many
    -  [[Directives|ikiwiki/Directive]].
    +* Some bits of ikiwiki may still assume that markdown is used or embed html
    +  in ways that break reStructuredText. (Report bugs if you find any.)
     * It's slow; it forks a copy of python for each page. While there is a
       perl version of the reStructuredText processor, it is not being kept in
       sync with the standard version, so is not used.
    Index: IkiWiki.pm
    ===================================================================
    --- IkiWiki.pm	(revision 3197)
    +++ IkiWiki.pm	(working copy)
    @@ -469,6 +469,10 @@
     	my $page=shift; # the page that will contain the link (different for inline)
     	my $link=shift;
     	my %opts=@_;
    +	# we are processing $lpage and so we need to format things in accordance
    +	# with the formatting language of $lpage. inline generates HTML so links
    +	# will be escaped seperately.
    +	my $type=pagetype($pagesources{$lpage});
     
     	my $bestlink;
     	if (! $opts{forcesubpage}) {
    @@ -494,12 +498,17 @@
     	}
     	if (! grep { $_ eq $bestlink } map { @{$_} } values %renderedfiles) {
     		return $linktext unless length $config{cgiurl};
    -		return " "create",
    -				page => pagetitle(lc($link), 1),
    -				from => $lpage
    -			).
    +		my $url = cgiurl(
    +				 do => "create",
    +				 page => pagetitle(lc($link), 1),
    +				 from => $lpage
    +				);
    +
    +		if ($hooks{htmlescapelink}{$type} && ! $opts{genhtml}){
    +			return $hooks{htmlescapelink}{$type}{call}->($url, $linktext,
    +							       broken => 1);
    +		}
    +		return "?$linktext"
     	}
     	
    @@ -514,6 +523,9 @@
     		$bestlink.="#".$opts{anchor};
     	}
     
    +	if ($hooks{htmlescapelink}{$type} && !$opts{genhtml}) {
    +	  return $hooks{htmlescapelink}{$type}{call}->($bestlink, $linktext);
    +	}
     	return "$linktext";
     }
     
    @@ -628,6 +640,14 @@
     				preview => $preprocess_preview,
     			);
     			$preprocessing{$page}--;
    +
    +			# Handle escaping html if the htmlizer needs it.
    +			if ($ret =~ /[<>]/ && $pagesources{$page}) {
    +				my $type=pagetype($pagesources{$page});
    +				if ($hooks{htmlescape}{$type}) {
    +					return $hooks{htmlescape}{$type}{call}->($ret);
    +				}
    +			}
     			return $ret;
     		}
     		else {
    
    ikiwiki-3.20160121/doc/todo/for_amazon_s3_pre-gzip-encode_safe_files.mdwn0000644000000000000000000000204212650125230023022 0ustar Regarding the [[Amazon_S3_Plugin|plugins/amazon_s3]]: Amazon S3 doesn't seem to support automatic GZIP encoding content (such as HTML, JavaScript, and CSS) that might be compressed by a full-capability webserver. (I'll also note that NearlyFreeSpeech.NET doesn't support compressing out-going files on-the-fly). However, Amazon S3 does support setting some response headers, such as Transfer-Encoding and the like. One possibility of decreasing bandwidth costs/download sizes would be to GZIP all content on the site and set the necessary header... however there are certain browser compatibility issues to be navigated. Another side item that would be useful potentially would be to have a config option to create a mapping of files that can be gzipped as an alternate name... For example: gzipped_files => { js => "js.gz" } Would take all js files and gzip them w/ the altered extension. *This* could allow for using JavaScript to customize what other JS/CSS code gets loaded in based on browser-detection JS code. --[[harningt]] ikiwiki-3.20160121/doc/todo/flexible_relationships_between_pages/0000755000000000000000000000000012650125230021573 5ustar ikiwiki-3.20160121/doc/todo/flexible_relationships_between_pages/blocks.pm.mdwn0000644000000000000000000000521712650125230024357 0ustar #!/usr/bin/perl # Ikiwiki "blocks" relationship plugin. package IkiWiki::Plugin::blocks; use warnings; use strict; use IkiWiki 3.00; sub import { hook(type => "getsetup", id => "blocks", call => \&getsetup); hook(type => "checkconfig", id => "skeleton", call => \&checkconfig); } sub getsetup () { return plugin => { safe => 1, rebuild => 1, }, blocks_names => { type => "string", example => "blocks/blockedby", description => "comma separated list of defined relationship pairs, the forward and backward name separated by a slash", safe => 1, rebuild => 1, }, } sub checkconfig () { my $blocksnames; if (defined $config{blocks_names}) { $blocksnames = $config{blocks_names}; } else { $blocksnames = "blocks/blockedby"; } while ( $blocksnames =~ /([^ ]+)/g ) { if ( $1 =~ m@([a-zA-Z0-9]+)(/([a-zA-Z0-9]+))?@ ) { my $from = $1; my $to = $3; hook( type => "preprocess", shortcut => 1, # gets interpreted by listdirectives; see doc/bugs/cannot_preview_shortcuts.mdwn / ikiwiki commit 354d22e2 no_override => 1, id => $from, scan => 1, call => sub { preprocess_blocks($from, 1, @_); } ); if ($to) { hook( type => "preprocess", shortcut => 1, no_override => 1, id => $to, scan => 1, call => sub { preprocess_blocks($from, 0, @_); } ); } my $backward_match; my $backward_name; my $forward_match; my $forward_name; $backward_match = sub ($$;@) { my $page=shift; my $glob=shift; return IkiWiki::PageSpec::match_backlink($page, $glob, linktype => $from, @_); }; $backward_name = "IkiWiki::PageSpec::match_$from"; if ($to) { $forward_match = sub ($$;@) { my $page=shift; my $glob=shift; return IkiWiki::PageSpec::match_link($page, $glob, linktype => $from, @_); }; $forward_name = "IkiWiki::PageSpec::match_$to"; } { no strict 'refs'; if ($to) { *$forward_name = $forward_match; } *$backward_name = $backward_match; } } else { error gettext("Malformed option in blocks_names"); } } } sub preprocess_blocks ($$@) { # with flip=0, the directive occurring on page A pointing at page B # means that A $relation B, with flip=1, it means B $relation A my $relation = shift; my $flip = shift; if (! @_) { return ""; } my %params=@_; my $page = $params{page}; delete $params{page}; delete $params{destpage}; delete $params{preview}; foreach my $blocks (keys %params) { $blocks=linkpage($blocks); # hidden WikiLink if ( $flip == 0 ) { add_link($page, $blocks, $relation); } else { add_link($blocks, $page, $relation); } } return ""; } 1 ikiwiki-3.20160121/doc/todo/flexible_relationships_between_pages.mdwn0000644000000000000000000001511512650125230022465 0ustar it has been some years since the [[matching different kinds of links]] issue was tackled, but hardly a plugin is using it. in order to enhance on the [[todo/rel attribute for links]] and [[todo/better bug tracking support]] issues and to provide a more general infrastructure, i'd like to propose a generic plugin for typed links. it can be also viewed of a way to have [[todo/structured page data]] that consists of URLs inside the wiki. following the use case i've developed it for, i'll call it `blocks` for the moment (but am open to better suggestions). outline ======= the plugin has a **configuration option** called `blocks_names`, which consists of pairs of verbs; the typical example is `blocks/blockedby`, but other values could be `next/prev up/down` or `owner/owns`. for each verb in the options, there is a **directive** which is used to state the relationship; relationships can be declared on both ends, so a page `bugA` with the contents `\[[!blocks bugB]]` is semantically equivalent to a page `bugB` with the contents `\[[!blockedby bugA]]`. for each verb, there is also a **pagespec** which matches all pages that are the origin of a relationship to a given page. if `developerA` `\[[!owns bug1]]`, then if `bug1` contains `\[[!map pages="owns(.)"]]`, it will show the owning developer. these specs match both ways, ie. if `bug1` `\[[!owner developerA]]`, the said map directive will still produce the same result. details ======= * single word relationships vs. symmetric relationships with some verbs, it is possible that a relationship is only used in one direction (eg `index`, even though one could declare it as `index/isindexof`). > isindexof is not a very interesting relationship - it just clogs up > the link-map, since the index is "the index of" all pages. I can't > see any situation in which you'd want to do pagespec matching > on it? --[[smcv]] >> that's why i used `index` as an example of a one-direction relationship. >> >> it wouldn't clog up the link map, though: in order to cleanly match both >> directions, when the "inverse" term of a relationship is used, the link in >> taggedlinks uses the "forward" term, but switches the objects. >> >> --[[chrysn]] other verbs are symmetric, eg. `equivalent`, which need different treatment. * "taglink" style directives the [[plugins/tag]] plugin would be a special case for this plugin (apart from the autotag and tagdir features). as there is a `\[[!taglink ...]]` directive, there could be an analogous directive for every single directive. > This is basically the traillink/trailitem duality, too. > I'd be quite tempted to generalize to something like this: > > We can't fix [[!link blocks="bug123" text="Bug 123"]] until we do this. > > [[!hiddenlink owner="smcv"]] > > but perhaps that's too wordy? > > I think both trail and tag need their own special processing beyond the > general case, but maybe not? --[[smcv]] >> i'd be all in favor of having this unified and deeper; there has been the >> idea of a `\[[!link]]` directive [[again|todo/link plugin perhaps too general__63__]] >> and [[again|todo/do not make links backwards]]. >> >> i like the `\[[!link text=""]]` and `[[!hiddenlink]]` conventions, but >> think that ${REL}="${TARGET}" isn't ideal because it implies that a single >> link can have more than one target. instead, i'd go for >> `\[[!link to="bug123" rel="blocks" text="Bug 123"]]; as with the html rel >> parameter, rel would be a list of whitespace separated values. >> >> positional parameters (`\[[!link bug123 rel="blocks" text="Bug 123"]]` or >> even `\[[!link Bug 123|bug123 rel="blocks"]]`) would be possible, but i >> prefer explicit syntax and not joining stings back again with the >> whitespace that was split off it before. >> >> if the '|' character is not widespread in page names (which i assume it is >> not), instead of using positional parameters in `\[[!link]]` for >> shortcuts, we could extend the regular link syntax; the same relationship >> could then be declared as `\[[Bug 123|bug123|blocks]]`; this would be an >> easy extension to the original link syntax. it would even work for hidden links >> (`\[[|smcv|owner]]`), which previously made no sense because a link with >> neither a physicial representation nor metadat is of no use. >> >> --[[chrysn]] * implementation notes the way pagespec hooks are implemented required some nasty perl tricks, for which the people who showed me felt very bad for having spoilt me. indeed, `no strict refs;` and `*$forward_name = $forward_match;` are not exactly ideal. a change in the pagespec declaration api (why not just `hook` like everything else) would make the implementation cleaner. > How about replacing `blockedby(bug*)` with `linktype(blockedby bug*)` or > something? Then you'd only need one pseudo-hook. --[[smcv]] >> there has been the topic of pagespecs like `typedlink(type glob)` back in >> the [[matching different kinds of links]] discussion, but it was removed >> in favor of per-type matchers. --[[chrysn]] >>> note to self: should use the ``inject`` function to avoid `no strict refs`. --[[chrysn]] * configuration location i aimed for static configuration of the `block_names` in the setup file. this could be made more general like in the [[plugins/shortcut]] plugin, but that would make things more complex. * no html links with `rel=` yet as there are no taglink style links between the articles so far, no htmllink gets rendered that could carry the relationship name in its rel field. having the inverse relationship description in backlinks (as in the link created by the map directive in the example above) would be hard to implement. (actually, i think it'd be easier to determine the rel values from the taggedlinks for *every* htmllink than to influence the backlinks in this plugin). * one direction also creates a normal link due to the way add\_link treats relationships, the forward relationship is always going to be reflected in the links/backlinks. a section of [[todo/matching different kinds of links]] was dismissed with "let's not worry about it", this plugin might be reason to worry about it again. (i'd consider what is in @links to be a representation of which hyperlinks are there, and in this case, none are generated). > taglink and traillink already count as wikilinks without generating > any visible HTML. --[[smcv]] implementation ============== there is a working but slightly incomplete (basically where it comes to the details mentioned above) implementation in [[blocks.pm]]. --[[chrysn]] ikiwiki-3.20160121/doc/todo/firm_up_plugin_interface.mdwn0000644000000000000000000000600612650125230020075 0ustar Reopening this for 3.0, to consider adding new functions. I don't want this interface to be too firm; it's ok for a plugin like `ddate` to redefine an internal function like IkiWiki::displaytime if it wants to.. But plugins that still access stuff through IkiWiki:: should be aware that that stuff can change at any time and break them. Possibly without perl's type checking catching the breakage, in some cases. Plugins that only use exported symbols should not be broken by future ikiwiki changes. ## Most often used functions, by number of calls from plugin code 27 IkiWiki::possibly_foolish_untaint Not very happy about exporting, it's not ikiwiki-specific, and plugins that need to untaint things should think about it, hard. 12 IkiWiki::userinfo_get 5 IkiWiki::userinfo_set Used by only 4 plugins, all of which are fairly core, so thinking don't export. 11 IkiWiki::preprocess 8 IkiWiki::filter 4 IkiWiki::linkify 4 IkiWiki::htmlize The page rendering chain. Note that it's very common to call `preprocess(filter(text))`, or `htmlize(linkify(preprocess(filter(text))))`, while `htmlize(linkify(preprocess(text))` is called less frequently, and it's also not unheard of to leave out a step and do `htmlize(preprocess(text))`. (I haven't checked if any of those cases are bugs.) It would be nice if the api could avoid exposing the details of the render chain, by providing a way to say "I have filtered text, and would like html", or "I have raw text and would like to get it up to the preprocess stage". Another problimatic thing is plugins often define functions named 'preprocess', etc. 12 IkiWiki::linkpage 11 IkiWiki::pagetitle 6 IkiWiki::titlepage These go together; linkpage is needed by all link plugins, and the others are used widely. All should be exported. (Done) 7 IkiWiki::saveindex 5 IkiWiki::loadindex Still too internal to ever be exported? 7 IkiWiki::redirect Only used by 4 plugins, and not in IkiWiki.pm itself, so probably not to be exported. 7 IkiWiki::dirname 4 IkiWiki::basename Not ikiwiki-specific, don't export. 6 IkiWiki::refresh Very internal, not part of IkiWiki.pm, don't export. 5 IkiWiki::yesno Not ikiwiki-specific, but worth exporting to get a consistent localised yes/no parser for directives. 5 IkiWiki::showform 4 IkiWiki::decode_form_utf8 Only used by 3 fairly core plugins, not in IkiWiki.pm, don't export. 5 IkiWiki::rcs_update 4 IkiWiki::rcs_prepedit 5 IkiWiki::is_admin 5 IkiWiki::cgi_savesession 4 IkiWiki::cgiurl Not enough use, I think, to export. 5 IkiWiki::enable_commit_hook 5 IkiWiki::disable_commit_hook Deep internal magic, if exported people will use it wrong, only used by core plugins. 4 IkiWiki::check_canedit Probably needs to evolve more and be more widely used before being exported. ## Variables used by plugins but not exported yet * %IkiWiki::pagecase (aggregate) * %IkiWiki::backlinks (pagestats) [[done]] (until 4.0).. ikiwiki-3.20160121/doc/todo/finer_control_over___60__object___47____62__s.mdwn0000644000000000000000000001127312650125230023463 0ustar IIUC, the current version of [HTML::Scrubber][] allows for the `object` tags to be either enabled or disabled entirely. However, while `object` can be used to add *code* (which is indeed a potential security hole) to a document, reading [Objects, Images, and Applets in HTML documents][objects-html] reveals that the “dangerous” are not all the `object`s, but rather those having the following attributes: classid %URI; #IMPLIED -- identifies an implementation -- codebase %URI; #IMPLIED -- base URI for classid, data, archive-- codetype %ContentType; #IMPLIED -- content type for code -- archive CDATA #IMPLIED -- space-separated list of URIs -- It seems that the following attributes are, OTOH, safe: declare (declare) #IMPLIED -- declare but don't instantiate flag -- data %URI; #IMPLIED -- reference to object's data -- type %ContentType; #IMPLIED -- content type for data -- standby %Text; #IMPLIED -- message to show while loading -- height %Length; #IMPLIED -- override height -- width %Length; #IMPLIED -- override width -- usemap %URI; #IMPLIED -- use client-side image map -- name CDATA #IMPLIED -- submit as part of form -- tabindex NUMBER #IMPLIED -- position in tabbing order -- Should the former attributes be *scrubbed* while the latter left intact, the use of the `object` tag would seemingly become safe. Note also that allowing `object` (either restricted in such a way or not) automatically solves the [[/todo/svg]] issue. For Ikiwiki, it may be nice to be able to restrict [URI's][URI] (as required by the `data` and `usemap` attributes) to, say, relative and `data:` (as per [RFC 2397][]) ones as well, though it requires some more consideration. — [[Ivan_Shmakov]], 2010-03-12Z. [[wishlist]] > SVG can contain embedded javascript. >> Indeed. >> So, a more general tool (`XML::Scrubber`?) will be necessary to >> refine both [XHTML][] and SVG. >> … And to leave [MathML][] as is (?.) >> — [[Ivan_Shmakov]], 2010-03-12Z. > The spec that you link to contains > examples of objects that contain python scripts, Microsoft OLE > objects, and Java. And then there's flash. I don't think ikiwiki can > assume all the possibilities are handled securely, particularly WRT XSS > attacks. > --[[Joey]] >> I've scanned over all the `object` examples in the specification and >> all of those that hold references to code (as opposed to data) have a >> distinguishing `classid` attribute. >> While I won't assert that it's impossible to reference code with >> `data` (and, thanks to `text/xhtml+xml` and `image/svg+xml`, it is >> *not* impossible), throwing away any of the “insecure” >> attributes listed above together with limiting the possible URI's >> (i. e., only *local* and certain `data:` ones for `data` and >> `usemap`) should make `object` almost as harmless as, say, `img`. >>> But with local data, one could not embed youtube videos, which surely >>> is the most obvious use case? >>>> Allowing a “remote” object to render on one's page is a security issue by itself. Though, of course, having an explicit whitelist of URI's may make this issue more tolerable. — [[Ivan_Shmakov]], 2010-03-12Z. >>> Note that youtube embedding uses an >>> object element with no classid. The swf file is provided via an >>> enclosed param element. --[[Joey]] >>>> I've just checked a random video on YouTube and I see that the `.swf` file is provided via an enclosed `embed` element. Whether to allow those or not is a different issue. — [[Ivan_Shmakov]], 2010-03-12Z. >> (Though it certainly won't solve the [[SVG_problem|/todo/SVG]] being >> restricted in such a way.) >> Of the remaining issues I could only think of recursive >> `object` — the one that references its container document. >> — [[Ivan_Shmakov]], 2010-03-12Z. ## See also * [Objects, Images, and Applets in HTML documents][objects-html] * [[plugins/htmlscrubber|/plugins/htmlscrubber]] * [[todo/svg|/todo/svg]] * [RFC 2397: The “data” URL scheme. L. Masinter. August 1998.][RFC 2397] * [Uniform Resource Identifier — the free encyclopedia][URI] [HTML::Scrubber]: http://search.cpan.org/~podmaster/HTML-Scrubber-0.08/Scrubber.pm [MathML]: http://en.wikipedia.org/wiki/MathML [objects-html]: http://www.w3.org/TR/1999/REC-html401-19991224/struct/objects.html [RFC 2397]: http://tools.ietf.org/html/rfc2397 [URI]: http://en.wikipedia.org/wiki/Uniform_Resource_Identifier [XHTML]: http://en.wikipedia.org/wiki/XHTML ikiwiki-3.20160121/doc/todo/filtering_content_when_inlining.mdwn0000644000000000000000000000156112650125230021464 0ustar It would help to allow filtering of content when [[inlining|plugins/inline]] pages. For example, given some way to filter out the display of inlines within other inlines, a blog post could allow easy inline commenting by putting an inline directive with post form at the bottom of the post. > That's trying to do the same thing as the todo item > [[discussion_page_as_blog]]. Difference is that you're suggesting > displaying the comments in the blog post that they comment on, instead > of on the separate disucssion page. Which leads to the problem of those > comments showing up inlined into the blog. > > I know there are benefits to having the comments on the same page and not > a separate discussion page, but it does add compliciations and ikiwiki > already has discussion pages, so I'm more likely to go the route > described in [[discussion_page_as_blog]]. --[[Joey]] ikiwiki-3.20160121/doc/todo/fileupload/0000755000000000000000000000000012650125230014271 5ustar ikiwiki-3.20160121/doc/todo/fileupload/soc-proposal/0000755000000000000000000000000012650125230016712 5ustar ikiwiki-3.20160121/doc/todo/fileupload/soc-proposal/discussion.mdwn0000644000000000000000000001145312650125230021770 0ustar There's nothing in [[fileupload]] that suggests putting the file upload limit in the body of a page. That would indeed be a strange choice. Ikiwiki already uses [[PageSpecs|ikiwiki/PageSpec]] in the Preferences page (for specifying locked pages, banned users, and subscriptions), and I had envisioned putting the file upload controls there, and possibly subsuming some of those other controls into them. > Thanks for clarifying; I clearly misunderstood the original text. -- Ben It's not clear to me that the concept of attaching files to a page fits ikiwiki very well; unlike most wikis, ikiwiki supports subdirectories and [[SubPages|ikiwiki/SubPage]], which allows for hierarchical placement of uploaded files, which is a much more flexible concept than simple attachment. Futhermore, the idea of listing all attached files at the bottom of a page seems somewhat inflexible. What if I want to make a podcast, using inline's existing support for that -- I won't want a list of every "attached" file at the bottom of my podcast's page then. > If a file was attached to _some-dir/some-page_, it would be stored in _some-dir/_ and linked from _some-page_. That would seem reasonably hierarchical to me. What do you suggest as an alternative? >> I'd suggest `some-dir/some-page/file`, which nicely makes clear that the file is "attached" to some-page, allows easy wikilinks to "file" from some-page, and has other nice properties. >>> So _some-dir/some-page_ would feature an upload form that stored its payload in _some-dir/some-page/file_? IOW, we'd still be attaching files, but making the relationship between attacher and attached more explicit? --Ben >>>> More explicit or less, I don't know.. :-) It seems to make sense for most of the use cases I've thought of to put the uploaded file there, but there might be use cases where it would go somewhere else, and so maybe the UI should allow specifying where it goes (similarly to how ikwiki allows specifying where to put a page when creating a new page). >>>> Exactly where the upload form should be I don't know. Putting it directly on the page seems somewhat strange, I know that some wikis have an actions menu that includes file upload and deletion, I think others make the Edit form include support for uploading files. Maybe survey other wikis and base it on a UI that works well. > As for the attachment list, I envisaged that being optional. --Ben >> So some kind of preprocessor directive that is added to a page to generate the attachment list? >>> Absolutely. I don't understand why the file size would need to be stored in the index file; this information is available by statting the file, surely? Similarly, the mime type can be determined through inspection, unless there turns out to be a reason to need to cache it for speed. --[[Joey]] For images, videos, etc. it would be nice to have some kind of meta data file to go along with it (eg. image.jpg.meta), to store information like creator, title, description, tags, length, width, height, compression, etc. which could be initially created by 'ikiwiki --generate-meta-stuff'. Then PageSpec should be teached to use these. Galleries could then be generated by means of \[[!inline pages="type(image/*) and year(2007)" template="gallery"]]. It should of course be possible to edit this information via ikiwiki.cgi and with any text editor (Name: value). This should also allow for creations of default .html pages with the image/video/file/... and a discussion page. Probably named image.mdwn and image/discussion. I think that would fit nicely with the ikiwiki concept. Comments? --enodev > Replying to myself. Just appending .meta gives problems when \$usedirs is enabled as the original file and the directory containing the html file will have the same name. Taking away the original extensions has problems with filenames having different extensions and the same basename. So something like 'about-image.jpg.meta'? That would require no changes to the core to support it and is reasonably easy to the eye. > I also pondered about putting this info in the rcs log, but that is problematic when you just 'cp'/'mv'/whatever the directory. Same goes for using something like svn properties, which git does not even support. Storing this info in the index file is problematic, because that isn't versioned. Major problem I see with this approach would be the disconnected nature of having two files. Posix extended attributes? ;-( > This could also be used to specify the license of the file. > I did an proof of concept implementation of this idea [here](http://ng.l4x.org/brainstorm/gallery/) yesterday night, including the link to the source code. I'd really love to hear comments about this approach. (note1: I'm really not interested in any kind of http interface to that thing, just testing ways of storing the meta data, note2: I'm no perl programmer) ikiwiki-3.20160121/doc/todo/fileupload/soc-proposal.mdwn0000644000000000000000000001172212650125230017604 0ustar # SoC Proposal for Implementation of a File Upload Interface I intend to extend Ikiwiki such that it accepts file uploads, subject to access control, and integrates said uploads with the interface. What follows is a **very rough draft** of my thoughts on the matter. Comments are welcomed, either on the discussion page or via e-mail (_me_ at _inelegant.org_). I suggest we adopt the Trac/Wikipedia concept of "attaching" files to a given page. In this scenario, each page for which file upload has been enabled, will sport an `` construct along with an _Attach_ button. Upon successfully attaching a file, its name will be appended to an _"Attachments"_ list at the bottom of the page. The names in the list will link to the appropriate files. Architecturally, this means that after a file has been attached to a page, the page will have to be rebuilt. Files will be uploaded in a background thread via XMLHTTPRequest. This allows us to provide visual indicators of upload status, support multiple uploads at a time, and reduces the amount of template code we must write. After an upload has been started, another text entry field will be rendered, enabling the user to commence a new upload. ## Metadata It is necessary to associate metadata with the uploaded file. The IkiWiki index file already associates rudimentary metadata with the files it renders, but there has been interest from multiple sources in creating a general purpose metadata layer for IkiWiki which supports the association of arbitrary metadata with a file. This work is outside the scope of the file upload feature, but I will attempt a basic implementation nevertheless. A key decision involves the storage of the metadata. IkiWiki must be as usable from the CLI as from the web, so the data being stored must be easily manipulatable using standard command line tools. It is infeasible to expect users to embed arbitrary metadata in arbitrary files, so we will use a plaintext file consisting of name-value pairs for recording metadata. Each file in the IkiWiki source directory may have its own metadata file, but they are always optional. The metadata for a file, _F_, will be stored in a file named _F.meta_. For example, the metadata for this page would be in _todo/fileupload/soc-proposal.mdwn.meta_. For instance: `cat "license: gpl\n" >> software.tar.gz.meta`. It would be trivial to distribute a tool with IkiWiki that made this even easier, too, e.g. `ikiwiki-meta license gpl software.tar.gz`. An open issue is how this metadata will be added from the web interface. For source files, this approach conflicts with the [_meta_ plugin](http://ikiwiki.info/plugins/meta/), so there needs to be some integration between the two. In keeping with the current architecture of IkiWiki, we can make this metadata available to plugins by using a hash keyed on the filename, e.g. `$metadata{'software/software.tar.gz'}{'license'} eq 'gpl'`. In general, we will only use the _.meta_ files to store data that cannot be automatically determined from the file itself. For uploaded files this will be probably include the uploader's IP address, for example. ## Configuration In [[todo/fileupload]] it is specified that the upload feature must be highly configurable. Joey suggests the use of the preferences page to specify some of these options, but it is not yet clear which ones are important enough to expose in this way. All options will be configurable via the config file. We will (or do) support configuring: * The allowable MIME types of uploaded files. * The maximum size of the uploaded file. * The maximum size of the upload temporary directory. * The maximum size of the source directory. * The IP addresses allowed to upload. * The pages which can have files attached to them. * The users who are allowed to upload. * The users who are prohibited from uploading. etc. ## Operation 1. File upload forms will be rendered on all wiki pages which have been allowed in the global configuration file. By default, this will probably be none of them. 2. The forms will interface with _ikiwiki.cgi_, passing it the filename, the file contents, and the name of the page to which it is being attached. 3. The CGI will consult the config file and any embedded pagespecs in turn, to determine whether the access controls permit the upload. If they don't, an error message will be displayed to the user, and the process will abort. 4. The uploaded file will be saved to a temporary upload directory. 5. Access controls which work on the entire file will be ran. The process will abort if they fail, or if the upload appears to have been aborted. Before the process is aborted, the file will be deleted from the temp directory. 6. The file is moved to the appropriate directory. 7. The _$file.meta_ file will be created and populated. 8. The uploaded file will be committed to the RCS. 9. _.ikiwiki/index_ will be modified to reflect the new upload (as above). 10. The page to which the file is attached (and any other affected pages) will be regenerated. --Ben ikiwiki-3.20160121/doc/todo/fileupload/discussion.mdwn0000644000000000000000000000426512650125230017352 0ustar * Limits to size of files that can be uploaded. Prevent someone spamming the wiki with CD isos.. > CGI.pm has a limitation that you can't prevent someone uploading > something huge and filling up your server. > However it is obviously possible to not actually put it in to the > wiki if it's too large. > Presumably there is also a way to limit the size of POST requests > in the server. * Limits to the type of files that can be uploaded. To prevent uploads of virii, css, raw html etc, and avoid file types that are not safe. Should default to excluding all files types, or at least all except a very limited set, and should be able to open it up to more types. Would checking for file extensions (.gif, .jpg) etc be enough? Some browsers are probably too smart for their own good and may ignore the extension / mime info and process as the actual detected file type. It may be necessary to use file to determine a file's true type. > I think using the extension is too risky, and as much information as > possible should go in to the decision. Saving the file to disk, then > checking the type before using it seems like the best approach to me, > as long as the file is deleted properly. > Have you any thoughts on what the interface should be? I can see three > options. First add a box to the file creation page that allows you > to upload a file instead of the page. The second is an upload file > link that asks for a page. The last would be an attachments system > that e.g. Twiki use, where the file could be uploaded as a subpage. > How about the limit setting etc.? Add it as a box on the admin's > preference page, allow it anywhere using preprocessor directives, > or have a configuration page that only the admin is allowed to edit > (and perhaps people named on the page?) > The syntax of the conditionals isn't too hard, as the things that > are being added fit in nicely. It might be nice to allow plugins > to register new functions for them, and provide callbacks to > provide a yes no answer. I'm haven't looked at the code yet, > are the pagespecs uniform in all places, or is the conditional > usage an extended one? i.e. can I lock pages based on date etc? > --[[JamesWestby]] ikiwiki-3.20160121/doc/todo/fileupload.mdwn0000644000000000000000000000507012650125230015162 0ustar (I've written a [[proposal|todo/fileupload/soc-proposal]] for this feature --Ben). Support for uploading files is useful for many circumstances: * Uploading images. * Uploading local.css files (admin only). * Uploading mp3s for podcasts. * Etc. ikiwiki should have an easy to use interface for this, but the real meat of the work is in securing it. Several classes of controls seem appropriate: * Limits to size of files that can be uploaded. Prevent someone spamming the wiki with CD isos.. * Limits to the type of files that can be uploaded. To prevent uploads of virii, css, raw html etc, and avoid file types that are not safe. Should default to excluding all files types, or at least all except a very limited set, and should be able to open it up to more types. Would checking for file extensions (.gif, .jpg) etc be enough? Some browsers are probably too smart for their own good and may ignore the extension / mime info and process as the actual detected file type. It may be necessary to use `file` to determine a file's true type. * Optional ability to test a file using a virus scanner like clamav. * Limits to who can upload what type of files. * Limits to what files can be uploaded where. It seems that for max flexability, rules should be configurable by the admin to combine these limits in different ways. If we again extend the pagespec for this, as was done for [[conditional_text_based_on_ikiwiki_features]], the rules might look something like this: ( maxsize(30kb) and type(webimage) ) or ( user(joey) and maxsize(1mb) and (type(webimage) or *.mp3) ) or ( user(joey) and maxsize(200mb) and (*.mov or *.avi) and videos/*) With a small extension, this could even be used to limit the max sizes of normal wiki pages, which could be useful if someone was abusing an open wiki as a wikifs. Maybe. ( type(page) and maxsize(32k) ) And if that's done, it can also be used to lock users from editing a pages or the whole wiki: !(( user(spammer) and * ) or ( user(42.12.*) and * ) or ( user(http://evilopenidserver/*) and * ) or ( user(annoying) and index) or ( immutable_page )) That would obsolete the current simple admin prefs for banned users and locked pages. Suddenly all the access controls live in one place. Wonderbar! (Note that pagespec_match will now return an object that stringifies to a message indicating why the pagespec matched, or failed to match, so if a pagespec lock like the above prevents an edit or upload from happening, ikiwiki could display a reasonable message to the user, indicating what they've done wrong.) [[!tag soc done]] ikiwiki-3.20160121/doc/todo/feed_enhancements_for_inline_pages.mdwn0000644000000000000000000001431512650125230022056 0ustar [[!template id=gitbranch branch=GiuseppeBilotta/inlinestuff author="[[GiuseppeBilotta]]"]] I rearranged my patchset once again, to clearly identify the origin and motivation of each patch, which is explained in the following. In my ikiwiki-based website I have the following situation: * `$config{usedirs}` is 1 * there are a number of subdirectories (A/, B/, C/, etc) with pages under each of them (A/page1, A/page2, B/page3, etc) * 'index pages' for each subdirectory: A.mdwn, B.mdwn, C.mdwn; these are rather barebone, only contain an inline directive for their respective subpages and become A/index.html, etc * there is also the main index.mdwn, which inlines A.mdwn, B.mdwn, C.mdwn, etc (i.e. the top-level index files are also inlined on the homepage) With the upstream `inline` plugin, the feeds for A, B, C etc are located in `A/index.atom`, `B/index.atom`, etc; their title is the wiki name and their main link goes to the wiki homepage rather than to their respective subdir (e.g. I would expect `A/index.atom` to have a link to `http://website/A` but it actually points to `http://website/`). This is due to them being generated from the main index page, and is fixed by the first patch: ‘inline: base feed urls on included page name’. As explained in the commit message for the patch itself, this is a ‘forgotten part’ from a previous page vs destpage fix which has already been included upstream. > Applied. --[[Joey]] >> Thanks. The second patch, ‘inline: improve feed title and description management’, aligns feed title and description management by introducing a `title` option to complement `description`, and by basing the description on the page description if the entry is missing. If no description is provided by either the directive parameter or the page metadata, we use a user-configurable default based on both the page title and wiki name rather than hard-coding the wiki name as description. > Reviewing, this seems ok, but I don't like that > `feed_desc_fmt` is "safe => 0". And I question if that needs > to be configurable at all. I say, drop that configurable, and > only use the page meta description (or wikiname for index). > > Oh, and could you indent your `elsif` the same as I? --[[Joey]] >> I hadn't even realized that I was nesting ifs inside else clauses, >> sorry. I think you're also right about the safety of the key, after >> all it only gets interpolated with known, safe strings. >>> I did not mean to imply that I thought it safe. --[[Joey]] >>>> Sorry for assuming you implied that. I do think it is safe, though >>>> (I defaulted to not safe just to err on the safe side). >> The question is what to do for pages that do not have a description >> (and are not the index). With your proposal, the Atom feed subtitle >> would turn up empty. We could make it conditional in the default >> template, or we could have `$desc` default to `$title` if nothing >> else is provided, but at this point I see no reason to _not_ allow >> the user to choose a way to build a default description. >>> RSS requires the `` element be present, it can't >>> be conditionalized away. But I see no reason to add the complexity >>> of an option to configure a default value for a field that >>> few RSS consumers likely even use. That's about 3 levels below useful. >>> --[[Joey]] >>>> The way I see it, there are three possibilities for non-index pages >>>> which have no description meta: (1) we leave the >>>> description/subtitle in feed blank, per your current proposal here >>>> (2) we hard-code some string to put there and (3) we make the >>>> string to put there configurable. Honestly, I think option #1 sucks >>>> aesthetically and option #2 is conceptually wrong (I'm against >>>> hard-coding stuff in general), which leaves option #3: however >>>> rarely used it would be, I still think it'd be better than #2 and >>>> less unaesthetical than #1. >>>> I'm also not sure what's ‘complex’ about having such an option: >>>> it's definitely not going to get much use, but does it hurt to have >>>> it? I could understand not wasting time putting it in, but since >>>> the code is written already … (but then again I'm known for being a >>>> guy who loves options). The third patch, ‘inline: allow assigning an id to postform/feedlink’, does just that. I don't currently use it, but it can be particularly useful in the postform case for example for scriptable management of multiple postforms in the same page. > Applied. --[[Joey]] >> Thanks. In one of my wiki setups I had a terminating '/' in `$config{url}`. You mention that it should not be present, but I have not seen this requirement described anywhere. Rather than restricting the user input, I propose a patch that prevents double slashes from appearing in links created by `urlto()` by fixing the routine itself. > If this is fixed I would rather not put the overhead of fixing it in > every call to `urlto`. And I'm not sure this is a comprehensive > fix to every problem a trailing slash in the url could cause. --[[Joey]] >> Maybe something that sanitizes the config value would be better instead? >> What is the policy about automatic changing user config? >>> It's impossible to do for perl-format setup files. --[[Joey]] >>>> Ok. In that case I think that we should document that it must be >>>> slash-less. I'll cook up a patch in that sense. The inline plugin is also updated (in a separate patch) to use `urlto()` rather than hand-coding the feed urls. You might want to keep this change even if you discard the urlto patch. > IIRC, I was missing a proof that this always resulted in identical urls, > which is necessary to prevent flooding. I need such a proof before I can > apply that. --[[Joey]] >> Well, the URL would obviously change if the `$config{url}` ended in >> slash and the `urlto` patch (or other equivalent) went into effect. >> Aside from that, if I read the code correctly, the only other extra >> thing that `urlto` does is to `beautify_url_path` the `"/".$to` part, >> and the only way this would cause the url to be altered is if the >> feed name was "index" (which can easily happen) and >> `$config{htmlext}` was set to something like `.rss` or >> `.rss.1`. >> So there is a remote possibility that a different URL would be >> produced. ikiwiki-3.20160121/doc/todo/fastcgi_or_modperl_installation_instructions.mdwn0000644000000000000000000000434212650125230024306 0ustar There has got to be a way to run the CGI wrapper under fastcgi or modperl (apache 2). Are there easy to follow instructions describing how to set this up? > AFAIK noone has done this. One immediate problem would be permissions; > the CGI wrapper runs setuid to you so it can write to the wiki -- if > running in fastcgi/modperl I guess it would run as the web server, unless > there's some way to control that. So you'd need to set up the perms > differenly, to let the web server commit changes to the wiki. > > I've not looked at what code changes fastcgi or modperl would require in > ikiwiki. --[[Joey]] > > Looking at nginx support in [[tips/dot_cgi]], I had to figure that out, and it's not so complicated. The hackish way that's documented there right now (and also supported by [answers on serverfault.com](http://serverfault.com/questions/93090/installing-ikiwiki-on-nginx-fastcgi-fcgi-wrapper) or [other](http://vilain.net/comp/ikiwiki_setup.html) [guides](https://library.linode.com/web-applications/wikis/ikiwiki/arch-linux)), and involves starting up a fcgi wrapper, which I find personnally quite weird. > > > > Otherwise the general idea would be to launch a daemon per site that would have a pool of fastcgi processes to answer requests. The common setup pattern here is that users have a fixed quota of processes running as their user, listening either on the network (hackish: a port need to be allocated for each user) or on a socket (documented above, but then the webserver needs write access). > > > > Perl has had extensive support for FastCGI for quite a while. It seems to me a simple daemon could be written to wrap around the `.cgi`, it's a common way things are deployed. [RT](http://rt.bestpractical.com/) for example can run as a regular CGI, under `mod_perl` or `FastCGI` indiscrimenatly, the latter being more reliable and faster. They use [Plack](http://search.cpan.org/dist/Plack/) to setup that server (see the [startup script](https://github.com/bestpractical/rt/blob/stable/sbin/rt-server.in) for an example). But of course, [TIMTOWTDI](http://search.cpan.org/search?query=fastcgi&mode=all). --[[anarcat]] > > > > Also related: [[todo/multi-thread_ikiwiki]], [[todo/rewrite_ikiwiki_in_haskell]]. :) --[[anarcat]] [[!tag wishlist]] ikiwiki-3.20160121/doc/todo/fancypodcast/0000755000000000000000000000000012650125230014623 5ustar ikiwiki-3.20160121/doc/todo/fancypodcast/discussion.mdwn0000644000000000000000000001705712650125230017707 0ustar # Round 1 ikiwiki's simple podcasting, while elegant and minimal, doesn't (as mentioned in [[todo/blogging]]) produce full-featured feeds. In fancy podcasts, episodes are accompanied by text content. The feeds also have lots more metadata. ## Design 7. For each fancy podcast episode, write a blog post containing `\[[!meta enclosure="WikiLink/to/media.mp3"]]`. (Don't specify more than one enclosure -- but if you do, last one wins.) 7. When rendering to HTML (single-page or inlined), append a link to the media file. 7. When rendering to RSS/Atom, the text is the entry's content and the media file is its enclosure. 7. Don't break simple podcasts in pursuit of fancy podcasts. ## Implementation ### Completed * Cover the existing simple podcast behavior with tests. * Add an `enclosure` field to [[plugins/meta]] that expands the given [[ikiwiki/WikiLink]] to an absolute URL (feed enclosures pretty much need to be, and the reference feeds I've looked at all do this). * Write failing tests for the desired single-page and inlined HTML behavior, then make them pass by adding enclosure stanzas to `{,inline}page.tmpl`. * Write failing tests for the desired RSS/Atom behavior, then make them pass via changes to `{atom,rss}item.tmpl` and [[plugins/inline]]. * Match feature-for-feature with [tru_podcast](http://www.rainskit.com/blog/542/tru_podcast-a-podcasting-plugin-for-textpattern) (what [[schmonz]] will be migrating from). * Enrich [feed metadata](http://cyber.law.harvard.edu/rss/rss.html) by catching up `rsspage.tmpl` to `atompage.tmpl`. * Verify that [[plugins/more]] plays well with fancy podcasts. * Verify that the feeds validate. * Subscribe to a fancy feed in some common podcatchers and verify display details against a reference podcast. * Verify smooth transitions for two common use cases (see testing details below). * Code review: don't add enclosure divs unless we have enclosures. * Code review: genericize download link for more use cases. * Code review: don't confuse old readers with Atom names in RSS. * Code review: instead of hacking back to `$link`, just provide it. * Code review: show author in addition to feedname, if different. ### Code review + # XXX better way to compute relative to srcdir? + my $file = $absurl; + $file =~ s|^$config{url}/||; I don't think ikiwiki offers a better way to do that, because there is normally no reason to do that. Why does it need an url of this form here? --[[Joey]] > In all the popular, production-quality podcast feeds I've looked > at, enclosure URLs are always absolute (even when they could be > expressed concisely as relative). [Apple's > example](http://www.apple.com/itunes/podcasts/specs.html#example) > does too. So I told \[[!meta]] to call `urlto()` with the third > parameter true, which means the \[[!inline]] code here gets an > absolute URL in `$pagestate{$p}{meta}{enclosure}`. To compute the > enclosure's metadata, though, we of course need it as a local path. > I didn't see a less > [ongepotchket](http://www.jewish-languages.org/jewish-english-lexicon/words/1402) > way at the time. If you have a better idea, I'm happy to hear it; > if not, I'll add an explanatory comment. --[[schmonz]] >> I would be more comfortable with this if two two different forms of url >> you need were both generated by calling urlto. It'd be fine to call >> it more than once. --[[Joey]] >>> Heh, it was even easier than that! (Hooray for tests.) Done. >>> --[[schmonz]] +
    + Can't we avoid adding this div when there's no enclosure? --[[Joey]] > Sure, I've moved the `` check to outside the > section-and-div block for `{,inline}page.tmpl`. --[[schmonz]] +Download this episode "Download this episode" is pretty specific to particular use cases. Can this be made more generic, perhaps just "Download"? --[[Joey]] > Yep, I got a little carried away. Done. --[[schmonz]] - - <TMPL_VAR AUTHOR ESCAPE=HTML>: <TMPL_VAR TITLE> - This change removes the author name from the title of the rss feed, which does not seem necessary for fancy podcasts. And it is a change that could negatively impact eg, Planet style aggregators using ikiwiki. --[[Joey]] > While comparing how feeds render in podcatchers, I noticed that > RSS and Atom were inconsistent in a couple ways, of which this was > one. The way I noticed it: with RSS, valuable title space was being > spent to display the author. I figured Atom's display was the one > worth matching. You're right, of course, that planets using the > default template and somehow relying on the current author-in-the-title > rendering for RSS feeds (but not Atom feeds!) would be broken by > this change. I'm having trouble imagining exactly what would break, > though, since guids and timestamps are unaffected. Would it suffice > to provide a note in the changelog warning people to be careful > upgrading their planets, and to customize `rssitem.tmpl` if they > really prefer the old behavior (or don't want to take any chances)? > --[[schmonz]] >> A specific example I know of is updo.debian.net, when used with >> rss2email. Without the author name there, one cannot see who posted >> an item. It's worth noting that planet.debian.org does the same thing >> with its rss feed. (That's probably what I copied.) Atom feeds may >> not have this problem, don't know. --[[Joey]] >>> Okay, that's easy to reproduce. It looks like this _might_ be >>> a simple matter of getting \[[!aggregate]] to populate author in >>> `add_page()`. I'll see what I can figure out. --[[schmonz]] >>>> Yep, that was mostly it. If the feed entry defines an author, >>>> and the author is distinct from the feed name, we now show `NAME: >>>> AUTHOR`, else just show `NAME` (same as always). In addition, >>>> the W3 feed validator says `` is invalid, so >>>> I replaced it with ``, and all of a sudden `r2e` >>>> gives me better `From:` headers. With the latest on my branch, >>>> when I generate the same planet as updo and run `r2e` over it, >>>> the names I get in `From:` look like so: * `"updo: Junio C Hamano"` * `"updo: Greg Kroah-Hartman"` * `"updo: Eric Raymond: esr"` (article author != feed name, so we get both) * `"updo: Jannis Pohlman: Jannis Pohlmann"` (oops! I tweaked the real updo) >>>> --[[schmonz]] +++ b/templates/rsspage.tmpl + xmlns:atom="http://www.w3.org/2005/Atom" + Why is it using atom namespace inside an rss feed? What are the chances every crummy rss reader on earth is going to understand this? I'd put it at about 0%; I doubt ikiwiki's own rss reader understands such a mashup. --[[Joey]] > The validator I used () told me to. > Pretty sure it doesn't make anything work better in the podcatchers > I tried. Hadn't considered that it might break some readers. > Removed. --[[schmonz]] +ikiwiki Does this added tag provide any benefits? --[[Joey]] > Consistency with the Atom feed, and of course it trumpets ikiwiki > to software and/or curious humans who inspect their feeds. The tag > arrived only in RSS 2.0, but that's already the version we're > claiming to be, and it's over a decade old. Seems much less risky > than the atom namespace bits. --[[schmonz]] >> Sounds ok then. --[[Joey]] ikiwiki-3.20160121/doc/todo/fancypodcast.mdwn0000644000000000000000000000713612650125230015521 0ustar ## Status [[!template id=gitbranch branch=schmonz/fancypodcast author="[[schmonz]]"]] [[!tag patch]] Nothing new since 2013/07/21 [[merge|done]] to `master`. ## Features [[!table data=""" Feature |iTunes RSS|iTunes Atom|Downcast RSS|Downcast Atom Feed image | | | | Feed title |(./) |(./) |(./) |(./) Feed publisher | | | | Feed "category" | | | | Feed date |(./) |(./) |(./) |(./) Feed description |(./) |(./) |(./) | Episode image | | | | Episode title |(./) |(./) |(./) |(./) Episode date |(./) |(./) |(./) |(./) Episode duration | | | | Episode author | | | | Episode description|(./) |(./) |(./) | Episode enclosure |(./) |(./) |(./) |(./) """]] ## Future improvements ### Embedded audio/video player in browsers * For a given enclosure, depending on autodetected MIME: * `audio/*` -> `

    $text_ok