stx2any/ 0000755 0001750 0001750 00000000000 10471044066 014126 5 ustar pkalliok pkalliok 0000000 0000000 stx2any/man/ 0000755 0001750 0001750 00000000000 10471044066 014701 5 ustar pkalliok pkalliok 0000000 0000000 stx2any/man/make.lm4 0000644 0001750 0001750 00000006532 10471044066 016242 0 ustar pkalliok pkalliok 0000000 0000000 w_doc_id(s2aimpl.man)dnl
This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski
and released under the license in ../LICENSE
w_use(examples/reflection-disclaimer)
Definitions for man. (Of course, we also use some raw troff and tbl.)
Metadata man-ism:
{{{
define(`w_section', `w_set_or_get(`@w_section', `$1')')
define(`w_man_desc',
`ifdef(`@w_man_has_desc',`.br',`.SH NAME')`'w_nl`'define(`@w_man_has_desc',t)'dnl
`ifelse(`$2',,`w_title \- $1`'', `$1 \- $2`'')')
}}}
Paragraphs and headings.
{{{
define(`w_softpara', `w_dump_footnotes(`.br`'w_nl`'',)`'dnl')
define(`w_paragraph', `.PP`'w_nl')
define(`w_headline',
`ifelse(`$1',1,`.SH $2',`$1',2,`.SS $2',
`$1',3,`.PP`'w_nl.B $2',`.PP`'w_nl.SB $2')')
}}}
Block system environments.
Oh my god, do we do things by hand! It almost makes me happy of all the
infrastructure we have for this. Paragraphs within list items have to
be made special. We do list numbering by hand. List nesting is done
via RS and RE, but it should probably have more complicated rules.
{{{
w_define_env(`w_man_list',
`pushdef(`w_paragraph', `.IP "" 4`'w_nl`'').RS 4`'w_nl`'',
`.RE`'w_nl`'popdef(`w_paragraph')w_softopen')
w_derive_env(`-', `w_man_list', 0,,,,)
define(`w_listitem', `.IP \(bu 4`'w_nl\&')
w_derive_env(`#', `w_man_list', 0,
`w_newcounter(enumlist)',,,`w_delcounter(enumlist)')
w_define_env(`#i',
`w_stepcounter(enumlist).IP w_counter_arabic(enumlist). 4`'w_nl\&')')
w_define_env(`q', `.RS 8`'w_nl`'', `.RE`'w_nl`'')
define(`w_defnterm', `.TP`'w_nl`'\&$1`'')
w_define_env(`t',
`pushdef(`w_paragraph', `.TP`'w_nl\&w_nl`'')',
`popdef(`w_paragraph')')
w_define_env(`:',
`.RS 3`'w_nl`'',
`.RE`'w_nl`'w_softopen')
}}}
Other environments.
{{{
w_define_env(`litblock', `w_beg_para.nf`'w_nl\fC', `\fP`'w_nl.fi')
define(`w_footnotemark', `\&w_nl.SM [$1]w_nl\&')
w_define_env(center, `.ce 10000`'w_nl`'', `.ce 0`'w_nl`'')
w_define_env(comment,
`pushdef(`w_softbr', `.\" ')pushdef(`w_softpara', `.\"')`'w_nl.\"',
``'popdef(`w_softbr')popdef(`w_softpara')w_nl\&')
define(`w_caption', `.SM $1')
}}}
Emphasis.
{{{
define(`w_literal', `\fC`'$1\fP`'')
define(`w_emph', `\fI`'$1\fP`'')
define(`w_strong', `\fB`'$1\fP`'')
define(`w_quotation', `\(lq`'$1\(rq`'')
}}}
Other inlines.
{{{
define(`w_linebr', `w_nl`'.br')
define(`w_link', `$2`'w_footnote(`$1')')
define(`w_img', `\&w_nl.PSPIC "w_file(`$1.'w_picture_suffix)"w_nl\&')
define(`w_label', `$2')
define(`w_refer', `$2')
}}}
Tables.
tbl is very sensitive to white space. We try to protect from errors,
but I really can't make sure that all combinations of markup and tables
work.
{{{
define(`w_make_tablespec',
`ifelse(`$*',,,`ifelse(`$1',p,l,`$1') w_make_tablespec(shift($@))')')
w_define_env(w_table,
`pushdef(`w_eline',` \')\&w_nl`'.TS`'w_nl`'w_make_tablespec($@).w_nl`'',
`\&w_nl`'.TE`'w_nl`'popdef(`w_eline')')
w_define_env(w_row,,
`undefine(`@w_in_row_flag')define(`w_eline',`define(`w_eline',` \')')')
w_define_env(w_cell,
`ifdef(`@w_in_row_flag',` ')define(`@w_in_row_flag',t)'dnl
`ifelse(`$2',p,`T{w_nl\&')',
`ifelse(`$2',p,`\&w_nl`'T}')')
define(`w_table_rule', `_')
}}}
Special and quoted characters.
{{{
define(`w_bldot', `\&.')
define(`w_bs', `\\')
define(`w_blap', `\&w_apo')
define(`w_emdash', `\`('em')
define(`w_endash', `\-')
define(`w_ellipsis', `.\|.\|.')
define(`w_copyrightsign', `\`('co')
define(`w_trademarksign', `\`('tm')
define(`w_rarrow', `\`('->')
define(`w_larrow', `\`('<-')
}}}
stx2any/man/settings.lsh 0000644 0001750 0001750 00000000302 10424125243 017237 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski
and released under the license in ../LICENSE
{{{
test -z "$NUMBERING" && NUMBERING=off
test -z "$PIC_SUFFIX" && PIC_SUFFIX=eps
}}}
stx2any/man/templ.lm4 0000644 0001750 0001750 00000000603 10424125216 016432 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski
and released under the license in ../LICENSE
{{{
.TH "defn(`@w_title')" "defn(`@w_section')" "defn(`@w_date')" "defn(`@w_author')"
ifdef(`@w_iso_language',`.hla defn(`@w_iso_language')',`dnl')
w_dumpdiv(frontmatter)dnl
w_dumpdiv(ingr)dnl
ifelse(w_make_toc,true,`w_dumpdiv(toc)')dnl
w_dumpdiv(body)
w_dumpdiv(backmatter)
}}}
stx2any/TODO 0000644 0001750 0001750 00000001344 10423671243 014620 0 ustar pkalliok pkalliok 0000000 0000000
- Features:
* end-section notes
* e-mail style block quotes
* make footnotes only from indirect link abbrevs
* a citing / bibliography system, support for cites in generic links?
- but it can be quite nicely handled with link abbrevs already...
* abbreviations for environments
* python and/or scheme implementation
- Bugs:
* How to deal with the dissimilarity of LaTeX tables and other tables?
* Clean up gather_stx_titles?
* Clean up caption system...
- Tests that remain to be written:
* tests for quote interactions (phooie)
* hook tests: temporary redefinition of macros
* command line options?
* cross links
* html2stx tests
- Documentation:
* adding new output formats
* extensions (with w_use)
stx2any/html/ 0000755 0001750 0001750 00000000000 10446213102 015061 5 ustar pkalliok pkalliok 0000000 0000000 stx2any/html/make.lm4 0000644 0001750 0001750 00000007504 10424125200 016417 0 ustar pkalliok pkalliok 0000000 0000000 w_doc_id(s2aimpl.html)dnl
This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski
and released under the license in ../LICENSE
w_use(examples/reflection-disclaimer)
Definitions for HTML.
HTML provides its own hack diversion, ''metas''.
{{{
w_define_div(metas)
}}}
Paragraphs and headings.
{{{
define(`w_softpara', `w_dump_footnotes(
,)')
define(`w_paragraph', `
')
define(`w_headline',
`
,) w_define_env(:, `pushdef(`w_pending_block_hook',)
', `') define(`w_footnotemark', `[$1]') w_define_env(center, `
$1') }}} Other inlines. {{{ define(`w_linebr',
This is the day/month/year
version, written by Author Name.
stx2any/regression/metadata.test 0000644 0001750 0001750 00000000317 10423671243 020770 0 ustar pkalliok pkalliok 0000000 0000000 title: The Title
doc_id: metadata-test
author: Author Name
date: day/month/year
section: 8
language: english
char_coding: ascii
! w_title -- introduction
This is the "w_date" version, written by w_author.
stx2any/regression/weird.titles 0000644 0001750 0001750 00000001642 10423671243 020651 0 ustar pkalliok pkalliok 0000000 0000000 define(`@w_file_exists_ession/abbrev.test', t)dnl
define(`@w_file_exists_ession/crossref.test', t)dnl
define(`@w_file_exists_ession/defn-nest.test', t)dnl
define(`@w_file_exists_ession/link-abbrev-bug.test', t)dnl
define(`@w_file_exists_ession/link-abbrev.test', t)dnl
define(`@w_file_exists_ession/link-abbrev-toc.test', t)dnl
define(`@w_file_exists_ession/list.test', t)dnl
define(`@w_file_exists_ession/markup.test', t)dnl
define(`@w_file_exists_ession/metadata.test', t)dnl
define(`@w_file_exists_ession/slid', t)dnl
define(`@w_file_exists_ession/tabl', t)dnl
define(`@w_file_exists_ession/underscor', t)dnl
define(`@w_filename_of_crossref-test', `ession/crossref.test')dnl
define(`@w_title_of_'`ession/metadata.test', `The Title')dnl
define(`@w_filename_of_metadata-test', `ession/metadata.test')dnl
define(`@w_title_of_'`ession/slid', `slide test')dnl
define(`@w_title_of_'`ession/underscor', `underscore quoting tests')dnl
stx2any/regression/metadata.latex 0000644 0001750 0001750 00000000604 10423671243 021125 0 ustar pkalliok pkalliok 0000000 0000000 \documentclass[a4paper]{article}
\usepackage[english]{babel}
\usepackage[T1]{fontenc}
\newcommand{\strongemph}[1]{\textbf{#1}}
\newcommand{\litfmt}[1]{\texttt{#1}}
\title{The Title}
\author{Author Name}
\date{day/month/year}
\begin{document}
\maketitle
\tableofcontents
\section{The Title --- introduction}
This is the ``day/month/year'' version, written by Author Name.
\end{document}
stx2any/regression/link-abbrev.man 0000644 0001750 0001750 00000004167 10423671243 021207 0 ustar pkalliok pkalliok 0000000 0000000 .TH "" "" "" ""
.SH Testing link abbreviations
.PP
First paragraph. It does
not have anything weird.
.PP
Second paragraph. It doesn't have anything weird, either. There's
just a line that begins with something that looks quite a lot like a
\&
.SM [1]
\& block. Do we manage it?
.br
\&
.SM [1]
\& linkdata
.br
.PP
Okay, now for the real\&
.SM [2]
\& thing. There should be all kinds of\&
.SM [3]
\&
links here. See \(lqTesting link abbreviations\(rq to get a hold of what
they all mean\&
.SM [4]
\&. \&
.SM [5]
\&.
.br
\&
.SM [2]
\& mailto:some.email@address.fi
.br
\&
.SM [3]
\& ftp://ftp.spoogle.com/
.br
\&
.SM [4]
\& http://www.meaning.org/
.br
\&
.SM [5]
\& This is a longer footnote,
possibly spanning multiple lines.
.br
.PP
There are longer links: \fCftp://ftp.rfc-editor.org/in-notes/rfc1355.txt\fP and friends. The problem with
URIs is that you\&
.SM [6]
\& have hard time knowing where they stop.
Take for example \fChttp://www.plt-scheme.org/\fP: why is the slash included but
the colon not\&
.SM [7]
\&? (Another example is
\fChttp://c2.com/cgi/quickChanges\fP, or \fChttp://sange.fi/~atehwa/index.html\fP.\|.\|.)
.br
\&
.SM [6]
\& the reader
.br
\&
.SM [7]
\& Don't tell me that properly quoted URI's won't have colons in
such positions. People never properly quote URI's, as required by
RFC2396.
.br
.PP
Here is also an \&
.PSPIC "pictures/zebra.eps"
\&. Please jump back.
I want you to consider an unadored relative link\&
.SM [8]
\& and
another\&
.SM [9]
\&.
.br
\&
.SM [8]
\& ./foo.html
.br
\&
.SM [9]
\& ../index.html
.br
.PP
Does the old-style footnote\&
.SM [10]
\& work anymore in
the presence of link abbreviations\&
.SM [11]
\&?\&
.SM [12]
\&
.br
\&
.SM [10]
\& such as this one here
.br
\&
.SM [11]
\& as tested by this file
.br
\&
.SM [12]
\& And
does it work if broken into
multiple lines?
.br
.SS Explicit label (is not guaranteed to work properly)
.PP
How do multiple labels in the same line work?
What about Second-label conflicts?
.PP
\(em \(em \(em
.PP
This is an interesting way to produce a bibliography:
.PP
RFC2396: Uniform Resource Identifies (URI): Generic Syntax
(\fCftp://ftp.rfc-editor.org/in-notes/rfc2396.txt\fP)
stx2any/regression/metadata.param 0000644 0001750 0001750 00000000027 10423671243 021107 0 ustar pkalliok pkalliok 0000000 0000000 --latex-params a4paper
stx2any/regression/list.man 0000644 0001750 0001750 00000002740 10423671243 017761 0 ustar pkalliok pkalliok 0000000 0000000 .TH "" "" "" ""
.RS 4
.IP \(bu 4
\&a
.IP \(bu 4
\&b
.RE
.RS 4
.IP \(bu 4
\&c
.IP \(bu 4
\&d
.RS 4
.IP \(bu 4
\&e
.RS 4
.IP 1. 4
\&f
.IP 2. 4
\&g # foo
.RE
.IP \(bu 4
\&inner
.RS 4
.IP 1. 4
\&zoo
.IP 2. 4
\&boo
.RE
.RS 4
.IP 1. 4
\&new list
.IP 2. 4
\&item2
.RE
.RS 8
.IP "" 4
quote?
.RE
quote?
.RE
.IP "" 4
quote?
.RE
.PP
\(em interlude \(em
.RS 4
.IP \(bu 4
\&one
.IP \(bu 4
\&two
.IP \(bu 4
\&three
in a row
for a cow
.IP \(bu 4
\&four
.RS 3
.TP
\&this is so
as definitions go
too low
.RE
.IP "" 4
final blow
.IP \(bu 4
\&five
.IP "" 4
it's alive
.RS 8
consider quote
.RE
back we go
.RE
.RS 4
.IP \(bu 4
\&list intercept
.RS 3
.TP
\&this is the term
show some respect
.RE
.IP "" 4
it is so firm
.IP \(bu 4
\&the problem's not here anymore
.RE
.RS 3
.TP
\&term one
.RS 4
.IP \(bu 4
\&lists are fun
.RE
.TP
\&
the fun's begun
.TP
\&term two
definitions, too!
another for you.
.TP
\&
They can have paragraphs
.TP
\&term three a
.TP
\&term three b
a tree is a tree
.br
be or not to be?
.RS 4
.IP \(bu 4
\&what's happening to me?
.RE
.RS 4
.IP \(bu 4
\&be or not to be?
.RE
.RS 4
.IP \(bu 4
\&what can you say?
.RE
.RE
.RS 4
.IP \(bu 4
\&what's for the delay?
.RE
.RS 4
.IP \(bu 4
\&let's play
.RE
.PP
\(em interlude 2 \(em
.RS 4
.IP \(bu 4
\&foo
foo2
.RS 4
.IP \(bu 4
\&bar
bar2
.RE
.RE
.RS 4
.IP \(bu 4
\&baz
baz2
.RS 4
.IP \(bu 4
\&quux
quux2
.IP \(bu 4
\&fooox \&
.SM [1]
\& fooox2 \&
.SM [2]
\& fooox3
.RS 4
.IP \(bu 4
\&koe
.br
\&
.SM [1]
\&
footnote?
.br
\&
.SM [2]
\& footnote?
.br
.RE
.RE
.RE
stx2any/regression/defn-nest.man 0000644 0001750 0001750 00000001424 10423671243 020667 0 ustar pkalliok pkalliok 0000000 0000000 .TH "" "" "" ""
.RS 3
.TP
\&foo
.RS 4
.IP \(bu 4
\&bar
.IP \(bu 4
\&baz
.IP \(bu 4
\&quux
.RE
.TP
\&
hyvin toimii
.TP
\&foo2
.RE
.RS 4
.IP \(bu 4
\&bar2
.IP \(bu 4
\&baz2
.IP \(bu 4
\&quux2
.RE
.RS 3
.TP
\&[foo3] hakasuluissa
.RE
.RS 4
.IP \(bu 4
\&meep
.RE
.PP
huonosti toimii
.RS 3
.TP
\&foo3
.RS 4
.IP 1. 4
\&flooz
.IP 2. 4
\&sooz
.RE
.RE
.RS 4
.IP 1. 4
\&mooz
.IP 2. 4
\&booz
.RS 4
.IP 1. 4
\&dooz
.IP 2. 4
\&klooz
.RE
.RE
.RS 3
.TP
\&foo4
Kiitos
.RE
.RS 3
.TP
\&foo5
.RE
.RS 4
.IP \(bu 4
\&ekkooo
.IP \(bu 4
\&tokkoo
.RE
.PP
Ja vielä:
.RS 3
.TP
\&foo6
.RE
.RS 4
.IP \(bu 4
\&ekkkooo
.IP \(bu 4
\&tokkoooo
.RE
.RS 3
.TP
\&defnlist
.TP
\&
foo
.RS 3
.TP
\&nested defn
bar
.TP
\&nested defn 2
baz
.RE
.TP
\&
continuation
.TP
\&defnlist2
.RS 3
.TP
\&immediate nested defn
stuff
.RE
.RE
stx2any/regression/link-abbrev-bug.docbook-xml 0000644 0001750 0001750 00000000451 10423671243 023415 0 ustar pkalliok pkalliok 0000000 0000000
This test is © 1851 E. A. Poe. — I think. Making smooth transitions is hard work. It's not worth hardening the hard; you should look into means of making things go more smoothly.
Thank you for listening… the percussions.
smooth
hard
hard
percussions
stx2any/regression/crossref.test 0000644 0001750 0001750 00000000704 10423671243 021036 0 ustar pkalliok pkalliok 0000000 0000000 w_begdiv(defs)
w_doc_id(crossref-test)
w_define_div(index)
w_indexword(index, hard)
w_indexword(index, smooth)
w_indexword(index, percussions)
w_enddiv(defs)dnl
This test is (c) 1851 E. A. Poe. -- I think.
Making smooth transitions is hard work. It's not worth
hardening the hard; you should look into means of making
things go more smoothly.
w_index(ingr, This goes into the ingress)
Thank you for listening... the percussions.
w_dumpdiv(index)
stx2any/regression/slide.latex 0000644 0001750 0001750 00000001140 10423671243 020441 0 ustar pkalliok pkalliok 0000000 0000000 \documentclass[a4,notitlepage]{seminar}
\usepackage[T1]{fontenc}
\usepackage[latin1]{inputenc}
\newcommand{\strongemph}[1]{\textbf{#1}}
\newcommand{\litfmt}[1]{\texttt{#1}}
\title{slide test}
\author{somebody}
\date{??.??.????}
\newpagestyle{slidepage}
{\hfill{}slide test\hfill{}}{\hfill{}somebody ??.??.????\hfill{}}
\begin{document}
\slidepagestyle{slidepage}
\begin{slide}
This is a slide.
\begin{itemize}
\item markup should work ordinarily.
\item nothing special here, either.
\end{itemize}
\end{slide}
\begin{slide}
\section*{Important points}
This is the second slide.
\end{slide}\end{document}
stx2any/regression/link-abbrev.latex 0000644 0001750 0001750 00000005000 10423671243 021534 0 ustar pkalliok pkalliok 0000000 0000000 \documentclass[a4paper,notitlepage]{article}
\usepackage[T1]{fontenc}
\usepackage[latin1]{inputenc}
\newcommand{\strongemph}[1]{\textbf{#1}}
\newcommand{\litfmt}[1]{\texttt{#1}}
\title{}
\author{}
\date{}
\usepackage{url}
\usepackage{graphicx}
\begin{document}
\maketitle
\section*{\label{Testing.link.abbreviations}Testing link abbreviations}
First paragraph. It does
not have anything weird.
\label{Second.paragraph}Second paragraph. It doesn't have anything weird, either. There's
just a line that begins with something that looks quite a lot like a
\footnote{linkdata} block. Do we manage it?
Okay, now for the real\footnote{\url{mailto:some.email@address.fi}} thing. There should be all kinds of\footnote{\url{ftp://ftp.spoogle.com/}}
links here. See ``Testing link abbreviations (\pageref{Testing.link.abbreviations})'' to get a hold of what
they all mean\footnote{\url{http://www.meaning.org/}}. \footnote{This is a longer footnote,
possibly spanning multiple lines.}.
There are longer links: \url{ftp://ftp.rfc-editor.org/in-notes/rfc1355.txt} and friends. The problem with
URIs (\pageref{RFC2396}) is that you\footnote{the reader} have hard time knowing where they stop.
Take for example \url{http://www.plt-scheme.org/}: why is the slash included but
the colon not\footnote{Don't tell me that properly quoted URI's won't have colons in
such positions. People never properly quote URI's, as required by
RFC2396 (\pageref{RFC2396}).}? (Another example is
\url{http://c2.com/cgi/quickChanges}, or \url{http://sange.fi/~atehwa/index.html}\ldots{})
Here is also an \includegraphics{pictures/zebra.eps}. Please jump back (\pageref{Second.paragraph}).
I want you to consider an unadored relative link\footnote{\url{./foo.html}} and
another\footnote{\url{../index.html}}.
Does the old-style footnote\footnote{such as this one here} work anymore in
the presence of link abbreviations\footnote{as tested by this file}?\footnote{And
does it work if broken into
multiple lines?}
\subsection*{\label{w.autolabel.Explicit.label...is.not.guaranteed.to.work.properly.}\label{Explicit.label}Explicit label (is not guaranteed to work properly)}
How do \label{multiple.labels}multiple labels in the same line \label{work}work?
What about \label{Second.label.conflicts}Second-label conflicts?
\begin{center}\rule{0.5\textwidth}{0.04em}\end{center}
This is an interesting way to produce a bibliography:
\label{RFC2396}RFC2396: Uniform Resource Identifies (URI): Generic Syntax
(\url{ftp://ftp.rfc-editor.org/in-notes/rfc2396.txt})
\end{document}
stx2any/regression/slide.param 0000644 0001750 0001750 00000000033 10423671243 020424 0 ustar pkalliok pkalliok 0000000 0000000 --numbering off -Dw_s5url=
stx2any/regression/underscore.docbook-xml 0000644 0001750 0001750 00000002140 10423671243 022614 0 ustar pkalliok pkalliok 0000000 0000000
day/month/year
version, written by Author Name.
Oh yes, go back to foo.
stx2any/regression/link-abbrev-toc.test 0000644 0001750 0001750 00000000115 10423671243 022163 0 ustar pkalliok pkalliok 0000000 0000000
! foo
!! bar
!!! baz
! quux
!! buux
! zuux
Oh yes, go back to [foo].
stx2any/regression/comment-lines.stripped 0000644 0001750 0001750 00000000345 10423671243 022636 0 ustar pkalliok pkalliok 0000000 0000000 --title: The Title
--doc_id: metadata-test
--author: Author Name
--date: day/month/year
--section: 8
--language: english
--char_coding: ascii
--
--! w_title -- introduction
--
--This is the "w_date" version, written by w_author.
stx2any/regression/markup.man 0000644 0001750 0001750 00000003011 10471044066 020275 0 ustar pkalliok pkalliok 0000000 0000000 .TH "" "" "" ""
This should eventually end up in the ingress.
Thank you!
.RS 8
This should be the next thing in the ingress. ?
.RE
.SH NAME
foo \- a useful program
.br
\- a regression test document
.PP
foo begins
.br
foo in action, this is a link to heaven\&
.SM [1]
\&.
.br
foo ends
.br
\&
.SM [1]
\& http://heaven/
.br
.PP
.ce 10000
.PP
This should be a centered,
.br
compact,
.br
nice list.
.br
.ce 0
.PP
Normal running text, \fBstrong\fP stuff, \fIemphasised\fP stuff,
\fCliteral\fP stuff. See this?
.PP
Google\&
.SM [2]
\& - named link;
\fChttp://www.google.fi/\fP - unnamed link.
.br
\&
.SM [2]
\& http://www.google.fi/
.br
.RS 8
.PP
There are two kinds of common people:
.br
kind people, and people in common.
.br
\(em Frank D. Roosevelt,esq.
.RE
.PP
\&
.PSPIC "myimage.eps"
\&
testpackage used
.PP
Float 1.
.PP
\(em \(em \(em \(em \(em
.PP
Nothing here, actually.
.PP
.SM The first float.
.PP
\(em \(em \(em \(em \(em
.PP
Float 2.
Please see float 2 for details.
.br
.PP
\(em \(em \(em \(em \(em
.PP
Nothing here, either.
.PP
.SM The second float.
.PP
\(em \(em \(em \(em \(em
.PP
Float 3.
Float 4.
Good we had that sorted out, isn't it?
.br
.PP
\(em \(em \(em \(em \(em
.PP
Well, here is at least something:
\&
.PSPIC "marine_band.eps"
\&
.PP
.SM The third float.
.PP
\(em \(em \(em \(em \(em
.PP
\(em \(em \(em \(em \(em
.PP
Still nothing.
.PP
.SM The fourth float.
.PP
\(em \(em \(em \(em \(em
Note:
.RS 8
.PP
This is a note for your interest.
.RE
Go slow:
.RS 8
.PP
This admonition
will give gentle words of caution.
.RE
stx2any/regression/strip.stripped 0000644 0001750 0001750 00000000276 10423671243 021230 0 ustar pkalliok pkalliok 0000000 0000000 content
content
content
content
content
content [empty line above]
/emphasis/
*strong emphasis*
line breaking //
"quoted text" AFAIK
multi-"line
span stuff"
foo /bar/
stx2any/regression/defn-nest.docbook-xml 0000644 0001750 0001750 00000004636 10423671243 022342 0 ustar pkalliok pkalliok 0000000 0000000
Emigration is nice! | foo |
Immigration is even nicer! | bar |
account | name | miscellaneous notes |
pkalliok | Panu | not trustworthy, but a nice fellow overall. Good to get acquainted with. |
atehwa | Panu | the same person |
root | superuser | found in every Unix under the sky |
j | " | , |
L | J | |
` | _ | ' |
First paragraph. It does not have anything weird.
Second paragraph. It doesn't have anything weird, either. There's
just a line that begins with something that looks quite a lot like a
[1] block. Do we manage it?
[1] linkdata
Okay, now for the real thing. There should be all kinds of
links here. See Testing link abbreviations
to get a hold of what
they all mean. [2].
[2] This is a longer footnote,
possibly spanning multiple lines.
There are longer links: ftp://ftp.rfc-editor.org/in-notes/rfc1355.txt and friends. The problem with
URIs is that you[3] have hard time knowing where they stop.
Take for example http://www.plt-scheme.org/: why is the slash included but
the colon not[4]? (Another example is
http://c2.com/cgi/quickChanges, or http://sange.fi/~atehwa/index.html…)
[3] the reader
[4] Don't tell me that properly quoted URI's won't have colons in
such positions. People never properly quote URI's, as required by
RFC2396.
Here is also an . Please jump back.
I want you to consider an unadored relative link and
another.
Does the old-style footnote[5] work anymore in
the presence of link abbreviations[6]?[7]
[5] such as this one here
[6] as tested by this file
[7] And
does it work if broken into
multiple lines?
How do multiple labels in the same line work? What about Second-label conflicts?
This is an interesting way to produce a bibliography:
RFC2396: Uniform Resource Identifies (URI): Generic Syntax (ftp://ftp.rfc-editor.org/in-notes/rfc2396.txt) stx2any/regression/slide.test 0000644 0001750 0001750 00000000453 10423671243 020311 0 ustar pkalliok pkalliok 0000000 0000000 title: slide test author: somebody date: ??.??.???? w_slideheader(w_title)dnl w_slidefooter(w_author w_date)dnl w_beg(slide) This is a slide. * markup should work ordinarily. * nothing special here, either. w_end(slide) w_beg(slide) ! Important points This is the second slide. w_end(slide) stx2any/regression/link-abbrev.test 0000644 0001750 0001750 00000003645 10423671243 021413 0 ustar pkalliok pkalliok 0000000 0000000 ! Testing link abbreviations First paragraph. It does not have anything weird. [1] This is a footnote. [2] This is a longer footnote, possibly spanning multiple lines. [3] http://www.ithaka.net/ [4] ftp://ftp.spoogle.com/ [5] mailto:some.email@address.fi [6] img:pictures/zebra [7] Foobar. I just want to show off. [+Second paragraph+]. It doesn't have anything weird, either. There's just a line that begins with something that looks quite a lot like a [linkdata] block. Do we manage it? Okay, now for the real[5] thing. There should be all [kinds of][4] links here. See "[Testing link abbreviations]" to get a hold of what they all mean[http://www.meaning.org/]. [2]. [azimuth] img:http://zooie.org/pict/flounder [RFC1355] ftp://ftp.rfc-editor.org/in-notes/rfc1355.txt There are longer links: [RFC1355] and friends. The problem with URIs[RFC2396] is that you[the reader] have hard time knowing where they stop. Take for example http://www.plt-scheme.org/: why is the slash included but [the colon not][note1]? (Another example is http://c2.com/cgi/quickChanges, or http://sange.fi/~atehwa/index.html...) [note1] Don't tell me that properly quoted URI's won't have colons in such positions. People never properly quote URI's, as required by [RFC2396]. Here is also an [inline image][6]. Please jump back[Second paragraph]. I want you to consider an [unadored relative link][./foo.html] and another[../index.html]. Does the old-style footnote[[ such as this one here ]] work anymore in the presence of link abbreviations[[-as tested by this file-]]?[[-And does it work if broken into multiple lines?-]] !! [+Explicit label+] (is not guaranteed to work properly) How do [+multiple labels+] in the same line [+work+]? What about [+Second-label conflicts+]? ---- This is an interesting way to produce a bibliography: [+RFC2396+]: Uniform Resource Identifies (URI): Generic Syntax (ftp://ftp.rfc-editor.org/in-notes/rfc2396.txt) stx2any/regression/link-abbrev-bug.html 0000644 0001750 0001750 00000000443 10423671243 022144 0 ustar pkalliok pkalliok 0000000 0000000
BB
C
stx2any/regression/link-abbrev-bug.test 0000644 0001750 0001750 00000000010 10423671243 022145 0 ustar pkalliok pkalliok 0000000 0000000 A
B
C
stx2any/regression/change-suffix.gather_stx_titles 0000644 0001750 0001750 00000000017 10423671243 024511 0 ustar pkalliok pkalliok 0000000 0000000 -f test -t stx
stx2any/regression/link-abbrev-bug.latex 0000644 0001750 0001750 00000000422 10423671243 022312 0 ustar pkalliok pkalliok 0000000 0000000 \documentclass[a4paper,notitlepage]{article}
\usepackage[T1]{fontenc}
\usepackage[latin1]{inputenc}
\newcommand{\strongemph}[1]{\textbf{#1}}
\newcommand{\litfmt}[1]{\texttt{#1}}
\title{}
\author{}
\date{}
\begin{document}
\maketitle
\tableofcontents
A
BB
C
\end{document}
stx2any/regression/link-abbrev-bug.param 0000644 0001750 0001750 00000000101 10423671243 022267 0 ustar pkalliok pkalliok 0000000 0000000 --link-abbrevs --sed-preprocessor regression/link-abbrev-bug.sed
stx2any/regression/weird.gather_stx_titles 0000644 0001750 0001750 00000000022 10423671243 023070 0 ustar pkalliok pkalliok 0000000 0000000 -p regr -f e.test
stx2any/regression/link-abbrev-toc.latex 0000644 0001750 0001750 00000000741 10423671243 022326 0 ustar pkalliok pkalliok 0000000 0000000 \documentclass[a4paper,notitlepage]{article}
\usepackage[T1]{fontenc}
\usepackage[latin1]{inputenc}
\newcommand{\strongemph}[1]{\textbf{#1}}
\newcommand{\litfmt}[1]{\texttt{#1}}
\title{}
\author{}
\date{}
\begin{document}
\maketitle
\tableofcontents
\section{\label{foo}foo}
\subsection{\label{bar}bar}
\subsubsection{\label{baz}baz}
\section{\label{quux}quux}
\subsection{\label{buux}buux}
\section{\label{zuux}zuux}
Oh yes, go back to foo (\pageref{foo}).
\end{document}
stx2any/regression/link-abbrev-toc.param 0000644 0001750 0001750 00000000046 10423671243 022307 0 ustar pkalliok pkalliok 0000000 0000000 --link-abbrevs --table-of-contents on
stx2any/regression/link-abbrev-toc.docbook-xml 0000644 0001750 0001750 00000001214 10423671243 023423 0 ustar pkalliok pkalliok 0000000 0000000
quoted stuff
end of line, quoted stuff
, at the beginning of linelong quoted string
with
to try (
quotes
Quotes within
quotes
Quotes within
quotes
Quotes within
mark*ers Testing link abbreviations
to get a hold of what
they all
quote?quote?
quote?
— interlude —
final blow
it's alive
consider quoteback we go
They can have paragraphs
— interlude 2 —
This should be the next thing in the ingress. ?foo — a useful program
foo begins
foo in action, this is a link to heaven.
foo ends
This should be a centered,
compact,
nice list.
Normal running text, strong stuff, emphasised stuff, literal stuff. See this?
Google - named link; http://www.google.fi/ - unnamed link.
There are two kinds of common people:
kind people, and people in common.
— Frank D. Roosevelt,esq.
testpackage used
Float 1.
The first float.
Float 3.
The third float.
The fourth float.
Go slow:This is a note for your interest.
stx2any/regression/markup.test 0000644 0001750 0001750 00000003565 10471044066 020517 0 ustar pkalliok pkalliok 0000000 0000000 w_define_div(anglais)dnl w_define_env(foo,dnl `w_beg_para`'foo begins w_linebr`'w_nl',dnl `w_linebr`'w_nl`'foo ends`'w_nl')dnl w_def_in_fmt(html, Thank, )dnl w_man_desc(foo, a useful program) w_man_desc(a regression test document) w_beg(foo) foo in action, this is a link to w_link(http://heaven/, heaven). w_end(foo) w_begdiv(anglais)dnl This should eventually end up in the ingress. w_enddiv(anglais)dnl w_begdiv(ingr)dnl w_dumpdiv(anglais)dnl Thank you! w_enddiv(ingr)dnl w_beg(center) w_beg(compactlist) This should be a centered, compact, nice list. w_end(compactlist) w_end(center) w_beg(abstract) This should be the next thing in the ingress. ? w_end(abstract) Normal running text, w_strong(strong) stuff, w_emph(emphasised) stuff, w_literal(literal) stuff. w_beg(ifeq, foo, foo) See this? w_beg(ifeq, foo, bar) see this not? w_end(ifeq) w_end(ifeq) w_link(http://www.google.fi/, Google) - named link; w_url(http://www.google.fi/) - unnamed link. w_beg(citation, Frank D. Roosevelt, esq.) There are two kinds of common people: // kind people, and people in common. w_end(citation) w_img(myimage, A picture of a kaenggrakieppura posing) w_use(regression/testpackage)dnl w_use(regression/testpackage)dnl Float 1. w_beg(float, hnfm, The first float.) Nothing here, actually. w_end(float) Float 2. w_beg(float, nfm, w_label(f2, The second float.)) Nothing here, either. w_end(float) Please see w_refer(f2, float 2) for details. Float 3. w_beg(float, fmnh, The third float.) Well, here is at least something: w_img(marine_band, some drunken marines trying to produce music) w_end(float) Float 4. w_beg(float, mnh, The fourth float.) Still nothing. w_end(float) Good we had that sorted out, isn't it? w_beg(admonition, Note) This is a note for your interest. w_end(admonition) w_beg(admonition, Go slow) This admonition will give gentle words of caution. w_end(admonition) stx2any/regression/defn-nest.html 0000644 0001750 0001750 00000001747 10423671243 021070 0 ustar pkalliok pkalliok 0000000 0000000This admonition will give gentle words of caution.
hyvin toimii
huonosti toimii
Ja vielä:
foo
continuation
This should receive emphasis This might not receive emphasis This _ probably should receive emphasis Underscores _f nested b_ and between_words
Continuing multiple lines should work
Even now it should probably work indeed
foobar empha not_empha can't foobar empha not_empha can't
This is line: 20 in file: stdin
stx2any/regression/underscore.test 0000644 0001750 0001750 00000001060 10423671243 021355 0 ustar pkalliok pkalliok 0000000 0000000 title: underscore quoting tests char_coding: latin1 This should receive _emphasis_ This w_emph(might) not receive _emphasis_ This _ probably should receive _emphasis_ Underscores _f _nested_ b_ and between_words Continuing _multiple lines_ should work w_strong(Even now) it _should probably_ work indeed foobar _empha_ not_empha _can't_ `foobar' _empha_ not_empha _can't_ This is line: __line__ in file: __file__ In blockquote, how does an em dash -- even a little one -- work at the beginning of a line? (This has nothing to do with underscores.) stx2any/regression/run-tests 0000755 0001750 0001750 00000001664 10423671243 020207 0 ustar pkalliok pkalliok 0000000 0000000 #!/bin/sh #This file is under the license in ../LICENSE. export PAPERSIZE=a4 failed="" run_test() { param=regression/$1.param test=regression/$1.test ./scripts/stx2any `test -f $param && cat $param` -T $2 $test | \ diff -u regression/$1.$2 - || failed="$failed $1($2)" } for i in regression/*.test; do echo Running $i ... base=`basename $i .test` for fmt in html man latex docbook-xml; do run_test $base $fmt done done for i in regression/*.strip_stx; do echo Running $i ... base=`basename $i .strip_stx` ./scripts/strip_stx `cat $i` | \ diff -u regression/$base.stripped - || failed="$failed strip_stx($base)" done for i in regression/*.gather_stx_titles; do echo Running $i ... base=`basename $i .gather_stx_titles` ./scripts/gather_stx_titles `cat $i` regression/*.test | \ diff -u regression/$base.titles - || failed="$failed gather_stx_titles($base)" done test -n "$failed" && echo Failed tests: $failed test -z "$failed" stx2any/regression/markup.docbook-xml 0000644 0001750 0001750 00000005030 10471044066 021743 0 ustar pkalliok pkalliok 0000000 0000000In blockquote, how does an em dash — even a little one — work at the beginning of a line? (This has nothing to do with underscores.)
Frank D. Roosevelt,esq. There are two kinds of common people: kind people, and people in common.
quote?
consider quote
end of line, quoted stuff
end of line, emphasised stuff
end of line, strongly emphasised stuff
end of line, literal-formatted stuff.
quoted stuff
, at the beginning of line[1]
/emphasised / not, another try; nested emphasis for kiddies--
literal-formatted stuff
spanning multiple lines, emphasis
improperly nested emphasis markers
spanning multiple lines for fun
[1] this is a footnote
very long quoted string
with /inner/ emphasis and other
way *around* for them if you please and
goodies
to try (thank you)
Emphasis within
quotes
Quotes within
emphasis
Strongem within
quotes
Quotes within
strongem
Literal within
quotes
Quotes within
literal
Literal ''within'' emphasis
Emphasis _within_ literal
Literal ''within'' strongem
Strongem *within* literal
Quoted "quotes" for you
Quoted _emphasis_ for you
Quoted *strongem* for you
Quoted ''litfmt'' for you
unmatched end-of-emphasis/ marker "Different * emphasis / characters "" out '' of ''' context '''' j
proper /path/to/file and its literal cousin, /path/to/file
Some */random mixed** emphasis
mark*ers [2]
[2] and a footnote
that has
multiple lines
*strongemph*broken by* char /emphasis/broken by/ char literal'not broken by char ''literal''broken by'' char
literal'with quotes not broken anymore
literal'spanning multiple lines not broken by char
*strongemph * broken by* space-sep char /emphasis / broken by/ space-sep char One char c emph One char c strongemph
(underscores)
emphasis type 1 an underscore _ by itself; between_words double _nested emphasis for_ convenience This _should_not_ work
(line breaking)
// begin
in the // middle
end-line
end-paragraph
footnotes[3] should be
properly // numbered.[4] Does it not seem to be[5] so?
[3] footnote 1 for your pleasure
[4] footnote 2
[5] a word
with very many meanings
This[6] is bad practice
text
Paragraph followed by adjacent
Some text
normal text
Paragraph followed by
both section markers here are adjacent
Some text
Once more
{{{Block}}}
Another block:
content content content
And then:
content content content [empty line above]followed by an explanation.
/emphasis/ *strong emphasis* line breaking // "quoted text" AFAIK multi-"line span stuff"
foo /bar/
Hello World!™ is a common program → stupid. stupid ← me, also.
This is a range in January–February of 2004. There are some special considerations—such as the em dash here. How many test do you need for changing a light bulb?—I guess 6–8.
stx2any/regression/abbrev.test 0000644 0001750 0001750 00000006047 10423671243 020457 0 ustar pkalliok pkalliok 0000000 0000000 ! headline 1 !! headline 2 !!! headline 3 !!!! headline 4 !! miscellaneous inline end of line, "quoted stuff" end of line, /emphasised stuff/ end of line, *strongly emphasised stuff* end of line, ''literal-formatted stuff''. "quoted stuff", at the beginning of line[[ this is a footnote ]] /emphasised / not, /another try/; /nested *emphasis* for kiddies/-- ''literal-formatted stuff // spanning multiple lines'', /emphasis/ improperly /nested *emphasis markers/ spanning multiple lines* for fun very "long quoted string with */inner/ emphasis* and other way /*around* for them/ if you please and goodies" to try (*thank you*) Emphasis "_within_" quotes Quotes _"within"_ emphasis Strongem "*within*" quotes Quotes *"within"* strongem Literal "''within''" quotes Quotes ''"within"'' literal Literal _''within''_ emphasis Emphasis ''_within_'' literal Literal *''within''* strongem Strongem ''*within*'' literal Quoted `"'quotes" for you Quoted `_'emphasis_ for you Quoted `*'strongem* for you Quoted '`''litfmt'' for you unmatched end-of-emphasis/ marker "Different * emphasis / characters "" out '' of ''' context '''' j proper /path/to/file and its literal cousin, ''/path/to/file'' Some */random mixed** "/emphasis"/ mark*ers [[ and a footnote that /has multiple lines/ ]] *strongemph*broken by* char /emphasis/broken by/ char ''literal'not broken by'' char ''literal''broken by'' char ''literal'with quotes'' `not broken anymore' ''literal'spanning multiple lines'' not broken by char *strongemph * broken by* space-sep char /emphasis / broken by/ space-sep char One char /c/ emph One char *c* strongemph (underscores) _emphasis_ type 1 an underscore _ by _itself_; between_words double _nested _emphasis_ for_ convenience This _should_not_ work (line breaking) // begin in the // middle end-line // end-paragraph // !! footnotes footnotes[[ footnote 1 for your pleasure ]] should be properly // numbered.[[ footnote 2 ]] Does it not seem to be[[ a word with very many meanings ]] so? This[[ footnote 4 ]] is bad practice !!! [[ footnote ]] in heading text !! section breaking Paragraph followed by adjacent !!! headline Paragraph in between !!! another headline Some text !!! headline followed by normal text !!! two adjacent !!! headlines Paragraph followed by -- section marker --- both section markers here are adjacent ---- Some text ----- Once more ------ ------- !! preformatted {{{Block}}} Another block: {{{ content content content }}} And then: {{{ content content content [empty line above] }}} followed by an explanation. - this one is within a list, I reckon: {{{ /emphasis/ *strong emphasis* line breaking // "quoted text" AFAIK multi-"line span stuff" }}} - embedded list and literal block: {{{ foo /bar/ }}} !! special characters Hello World!(tm) is a common program -> stupid. stupid <- me, also. This is a range in January--February of 2004. There are some special considerations -- such as the em dash here. How many test do you need for changing a light bulb? -- I guess 6--8. -- blockquote ends this thing -- stx2any/regression/table.man 0000644 0001750 0001750 00000000743 10423671243 020076 0 ustar pkalliok pkalliok 0000000 0000000 .TH "" "" "" "" \& .TS l c . T{ \&Emigration is nice! \& T} foo T{ \&Immigration is even nicer! \& T} bar \& .TE \& .TS l l l . account name T{ \& miscellaneous notes \& T} _ pkalliok Panu T{ \& not trustworthy, but a nice fellow overall. Good to \ get acquainted with. \& T} atehwa Panu T{ \& the same person \& T} root superuser T{ \& found in every Unix under the sky \& T} \& .TE \& .TS r c l . j " , L J ` _ ' \& .TE .PP .RS 4 .IP \(bu 4 \&still works? .RE stx2any/regression/prefix-strip.gather_stx_titles 0000644 0001750 0001750 00000000016 10423671243 024415 0 ustar pkalliok pkalliok 0000000 0000000 -p regression stx2any/regression/table.latex 0000644 0001750 0001750 00000001410 10423671243 020430 0 ustar pkalliok pkalliok 0000000 0000000 \documentclass[a4paper,notitlepage]{article} \usepackage[T1]{fontenc} \usepackage[latin1]{inputenc} \newcommand{\strongemph}[1]{\textbf{#1}} \newcommand{\litfmt}[1]{\texttt{#1}} \title{} \author{} \date{} \begin{document} \maketitle \tableofcontents \begin{tabular}{p{0.3\textwidth}c} Emigration is nice! & foo \\ Immigration is even nicer! & bar \\ \end{tabular} \begin{tabular}{llp{0.3\textwidth}} account & name & miscellaneous notes \\ \hline pkalliok & Panu & not trustworthy, but a nice fellow overall. Good to get acquainted with. \\ atehwa & Panu & the same person \\ root & superuser & found in every Unix under the sky \\ \end{tabular} \begin{tabular}{rcl} j&"&,\\ L& &J\\ `&_&'\\ \end{tabular} \begin{itemize} \item still works? \end{itemize} \end{document} stx2any/LICENSE 0000644 0001750 0001750 00000001152 10423671243 015132 0 ustar pkalliok pkalliok 0000000 0000000 ! stx2any - an implementation of conversion from Stx to multiple formats Authors:: - (c) 2003, 2004, 2005, 2006 Panu Kalliokoski This software is available free of charge for distribution, modification and use (by executing the program) as long as the following conditions are met: # Every work copied or derived from this software distributed in any form must come with this license; # The only permitted change to this license is adding one's name in the authors section when having modified the software. THE AUTHORS CANNOT BE HELD RESPONSIBLE FOR ANY DIRECT OR INDIRECT HARM THIS SOFTWARE MIGHT CAUSE. stx2any/latex/ 0000755 0001750 0001750 00000000000 10446213102 015232 5 ustar pkalliok pkalliok 0000000 0000000 stx2any/latex/make.lm4 0000644 0001750 0001750 00000010521 10424125205 016566 0 ustar pkalliok pkalliok 0000000 0000000 w_doc_id(s2aimpl.latex)dnl This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE w_use(examples/reflection-disclaimer) Definitions for LaTeX. LaTeX provides its own hack diversion, ''preamble''. There is also some neatism to its own name. {{{ w_define_div(preamble) define(`LaTeX', ``\LaTeX{}'') define(`@w_documentclass', `article') define(`w_documentclass', `w_set_or_get(`@w_documentclass', `$1')') }}} Some macros (URLs and inline graphics) need package declarations. We use this to only include them once. {{{ define(`w_ensure_pkg', `ifdef(`@w_has_pkg_$1',, `define(`@w_has_pkg_$1',t)w_begdiv(preamble)\usepackage{$1} w_enddiv(preamble)')') }}} Paragraphs and headings. We leave paragraphs to LaTeX itself. It has similar paragraph rules to ours. {{{ define(`w_paragraph',) define(`w_headl', `w_newindent(0)'dnl `\w_pickn($1, section, subsection, subsubsection, paragraph)`''dnl `ifelse(w_do_numbering,true,,*)'dnl `{ifelse(w_do_link_abbr,true,`w_autolabel(`$2')',`$2')}') }}} Block system environments. Finding definitions in lists is left to LaTeX. It has similar rules to ours. {{{ w_define_env(-, `\begin{itemize}w_nl`'', `\end{itemize}w_nl`'') w_define_env(#, `\begin{enumerate}w_nl`'', `\end{enumerate}w_nl`'') w_define_env(:, `\begin{description}w_nl`'', `\end{description}w_nl`'') w_define_env(q, `\begin{quotation}w_nl`'', `\end{quotation}w_nl`'') w_define_env(t,,) define(`w_listitem', `\item ') define(`w_defnterm', `\item[{$1}] ') }}} Other environments. {{{ w_define_env(`footnote', `\footnote{', `}') w_define_env(`litblock', `w_ensure_pkg(alltt)\begin{alltt}', `\end{alltt}') w_define_env(`center', `\begin{center}', `\end{center}') w_define_env(`abstract', `w_begdiv(ingr)\begin{abstract}`'w_nl', `\end{abstract}w_enddiv(ingr)`'w_nl') w_define_env(`comment', `pushdef(`w_softbr', `% ')pushdef(`w_softpara', `% ')% ', `w_nl`'popdef(`w_softbr')popdef(`w_softpara'){}') w_define_env(`w_float_n', `\begin{figure}[htb]', `\caption{$1}w_nl\end{figure}') w_define_env(`w_float_f', `\begin{figure}[tbp]', `\caption{$1}w_nl\end{figure}') w_define_env(`w_float_m', `\marginpar{', `\\ $1 }') }}} Emphasis. {{{ define(`w_literal', `\litfmt{$1}') define(`w_emph', `\emph{$1}') define(`w_strong', `\strongemph{$1}') }}} Other inlines. {{{ define(`w_linebr', `\\') define(`w_sectbreak', `w_nl\begin{center}\rule{0.5\textwidth}{0.0$1em}\end{center}w_nl') define(`w_link', `w_ensure_pkg(url)`'$2`'w_footnote(`\url{$1}')') define(`w_img', `w_ensure_pkg(graphicx)\includegraphics{w_file(`$1.'w_picture_suffix)}') define(`w_label', `\label{$1}$2') define(`w_refer', `$2 (\pageref{$1})') define(`w_url', `w_ensure_pkg(url)\url{$1}') }}} Slides. {{{ w_define_env(`slide', `w_ensure_slides\begin{slide}', `\end{slide}') define(`w_ensure_slides', `w_documentclass(seminar)'dnl `ifdef(`@w_slidestyle_done',, `define(`@w_slidestyle_done',t)undefine(`w_make_title')'dnl `define(`@w_doctype_parms', w_gather(patsubst(defn(`@w_doctype_parms'), a4paper, a4)))'dnl `ifelse(w_slideheader`'w_slidefooter,,,`w_slidestyle_setup')')') define(`w_slidestyle_setup', `w_begdiv(preamble)\newpagestyle{slidepage} {\hfill{}w_slideheader\hfill{}}{\hfill{}w_slidefooter\hfill{}} w_enddiv(preamble)w_begdiv(frontmatter)\slidepagestyle{slidepage} w_enddiv(frontmatter)') }}} Tables. {{{ define(`w_make_tablespec', `ifelse(`$*',,, `ifelse(`$1',p,`p{0.3\textwidth}',`$1')`'w_make_tablespec(shift($@))')') w_define_env(`w_table', `pushdef(`w_caption', `\caption{$1}')'dnl `\begin{tabular}{w_make_tablespec($@)}w_nl', `popdef(`w_caption')\end{tabular}w_nl') w_define_env(`w_row',,`undefine(`@w_in_row_flag')\\') w_define_env(`w_cell', `ifdef(`@w_in_row_flag',`&')define(`@w_in_row_flag',t)',) define(`w_table_rule', `\hline') }}} Special and quoted characters. {{{ define(`w_lt', `\ensuremath{<}') define(`w_gt', `\ensuremath{>}') define(`w_bs', `\ensuremath{\backslash}') define(`w_obr', `\{') define(`w_bar', `\ensuremath{|}') define(`w_cbr', `\}') define(`w_amp', `\&') define(`w_us', `\_') define(`w_ct', `\^{}') define(`w_td', `\~{}') define(`w_dol', `\$') define(`w_hs', `\#') define(`w_pct', `\%') define(`w_emdash', `---') define(`w_endash', `--') define(`w_ellipsis', `\ldots{}') define(`w_copyrightsign', `\copyright{}') define(`w_trademarksign', `\texttrademark{}') define(`w_larrow', `\ensuremath{\leftarrow{}}') define(`w_rarrow', `\ensuremath{\rightarrow{}}') }}} stx2any/latex/settings.lsh 0000644 0001750 0001750 00000000757 10424125242 017616 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE {{{ test -z "$NUMBERING" && NUMBERING=on test -z "$PAPERCONF" && PAPERCONF=/etc/papersize test -z "$PAPERSIZE" && test -f "$PAPERCONF" && \ PAPERSIZE=`sed '/^#/d' "$PAPERCONF"` test -z "$PAPERSIZE" && PAPERSIZE=a4 test -z "$LATEX_PARAMS" && LATEX_PARAMS="${PAPERSIZE}paper,notitlepage" M4OPTIONS="$M4OPTIONS -D@w_doctype_parms=$LATEX_PARAMS" test -z "$PIC_SUFFIX" && PIC_SUFFIX=eps }}} stx2any/latex/templ.lm4 0000644 0001750 0001750 00000001363 10424125211 016773 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE {{{ \documentclass[defn(`@w_doctype_parms')]{defn(`@w_documentclass')} ifdef(`@w_language',`\usepackage[defn(`@w_language')]{babel}',`dnl') \usepackage[T1]{fontenc} ifelse(defn(`@w_char_coding'),ascii,`dnl',`\usepackage[defn(`@w_char_coding')]{inputenc}') \newcommand{\strongemph}[1]{\textbf{#1}} \newcommand{\litfmt}[1]{\texttt{#1}} \title{defn(`@w_title')} \author{defn(`@w_author')} \date{defn(`@w_date')} w_dumpdiv(preamble)dnl \begin{document} w_dumpdiv(frontmatter)dnl ifelse(w_make_title,true,\maketitle,`dnl') w_dumpdiv(ingr)dnl ifelse(w_make_toc,true,\tableofcontents,`dnl') w_dumpdiv(body)dnl w_dumpdiv(backmatter)dnl \end{document} }}} stx2any/README 0000644 0001750 0001750 00000004265 10423671243 015015 0 ustar pkalliok pkalliok 0000000 0000000 w_title(Stx document tools distribution)dnl w_doc_id(stxreadme)dnl w_author(Panu A. Kalliokoski)dnl ! What is this? This software distribution is a bunch of tools to deal with structured text (Stx). Stx is a kind of plain text format with semantic markup. Stx is documented in: - [Stx-doc.html] - [Stx-ref.html] Of some interest may be also [stxaccred]. There are [a homepage][1] (currently in the author's wiki), and a distribution page at [2] for this project. You can contact the author at [mymail]. [1] http://sange.fi/~atehwa/cgi-bin/piki.cgi/stx2any [2] http://sange.fi/~atehwa/ [mymail] mailto:atehwa@sange.fi ! Included tools The following tools are included: [s2aman]:: This is the main tool, an utility to convert Stx into other formats. [ssman]:: A simple literal programming tool for document-programs written in Stx. [gstman]:: A utility for generating cross references between documents written in Stx. [eufsman]:: A utility for producing "Usage:" messages and manual pages from the same source. [h2sman]:: Conversion tool from HTML to Stx. This makes it easy to import documents written in other formats into Stx. Besides, you can tidy an HTML document by converting it first into Stx, then back into (X)HTML. ''stx-mode.el'':: An ''emacs'' mode for writing Stx documents. Provides syntax highlighting, paragraph filling, and simple interfaces to ''stx2any'' for those who cannot use the command line. ! Installation A traditional ''make && make install'' will do. You can override the ''make'' variable ''PREFIX'' to install somewhere else than ''/usr''. Other variables you might want to override include ''EMACSDIR'' and ''MANDIR''. Take a look at the Makefile[../Makefile]. The installation process of stx2any normally uses ''w3m'' to produce "usage" messages from Stx. If you don't want to install ''w3m'', you can give ''make'' the option ''BUILD_USAGE_WITH=plain'' to avoid using ''w3m''. The only consequence is that "usage" messages (the output of ''stx2any --help'', for instance) will look cruder. If you have a debian box, you can build a debian package directly by saying ''fakeroot debian/rules binary'' and installing the resulting package with ''dpkg -i''. stx2any/scripts/ 0000755 0001750 0001750 00000000000 10424125514 015611 5 ustar pkalliok pkalliok 0000000 0000000 stx2any/scripts/strip_stx 0000755 0001750 0001750 00000001537 10424125505 017604 0 ustar pkalliok pkalliok 0000000 0000000 #!/bin/sh # This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski # and released under the license in ../LICENSE BASE=. usage() { cat $BASE/messages/strip_stx.usage 1>&2 exit $1 } NOT_PROG=d SEP=`echo | tr '\012' '\001'` BEG_COMMENT=d END_COMMENT=d BEG_STR= END_STR= while true; do case "$1" in -c) NOT_PROG="s$SEP^$SEP$2$SEP" shift ;; -B) BEG_COMMENT="s$SEP$SEP$2$SEP" END_COMMENT="s$SEP$SEP$3$SEP" BEG_STR="$2" END_STR="$3" test "$NOT_PROG" = d && NOT_PROG=b shift shift ;; --help|-\?) usage 0 ;; --version|-V) cat $BASE/messages/version.msg exit 0 ;; *) break esac shift done ERRORS= test -n "$BEG_STR" && echo "$BEG_STR" sed -e "/^{{{$/,/^}}}$/! $NOT_PROG" \ -e "/^}}}\$/$BEG_COMMENT" \ -e "/^{{{\$/$END_COMMENT" $@ || ERRORS=yes test -n "$END_STR" && echo "$END_STR" test -z "$ERRORS" || usage 1 stx2any/scripts/html2stx 0000755 0001750 0001750 00000020532 10424125472 017331 0 ustar pkalliok pkalliok 0000000 0000000 #!/usr/bin/env python # This file is copyright (c) 2004 Aaron Swartz, copyright (c) 2004, 2005, 2006 Panu Kalliokoski # This file is released under the GNU General Public License (GPL), version 2. # Derived from html2text, version 2.11, by Aaron Swartz. """html2stx: Turn HTML into neat Stx source, stripping everything that cannot be expressed in Stx.""" __author__ = "Panu A. Kalliokoski" __copyright__ = "(C) 2004 Aaron Swartz; 2004, 2005 Panu Kalliokoski. GNU GPL 2." import re, sys, urllib, htmlentitydefs, codecs, StringIO import sgmllib sgmllib.charref = re.compile('([xX]?[0-9a-fA-F]+)[^0-9a-fA-F]') # Use Unicode characters instead of their ascii psuedo-replacements UNICODE_SNOB = 0 ### Entity Nonsense ### def name2cp(k): if k == 'apos': return ord("'") if hasattr(htmlentitydefs, "name2codepoint"): # requires Python 2.3 return htmlentitydefs.name2codepoint[k] else: k = htmlentitydefs.entitydefs[k] if k.startswith("") and k.endswith(";"): return int(k[2:-1]) # not in latin-1 return ord(codecs.latin_1_decode(k)[0]) unifiable = {'rsquo':"'", 'lsquo':"'", 'rdquo':'"', 'ldquo':'"', 'mdash':' -- ', 'ndash':'--'} unifiable_n = {} for k in unifiable.keys(): unifiable_n[name2cp(k)] = unifiable[k] def charref(name): if name[0] in ['x','X']: c = int(name[1:], 16) else: c = int(name) if not UNICODE_SNOB and c in unifiable_n.keys(): return unifiable_n[c] else: return unichr(c) def entityref(c): if not UNICODE_SNOB and c in unifiable.keys(): return unifiable[c] else: try: name2cp(c) except KeyError: return "&%s;" % c else: return unichr(name2cp(c)) def replaceEntities(s): s = s.group(1) if s[0] == "#": return charref(s[1:]) else: return entityref(s) r_unescape = re.compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));") def unescape(s): return r_unescape.sub(replaceEntities, s) def fixattrs(attrs): # Fix bug in sgmllib.py if not attrs: return [] newattrs = [] for attr in attrs: newattrs.append((attr[0], unescape(attr[1]))) return newattrs ### End Entity Nonsense ### def hn(tag): if not (tag[0] == 'h' and len(tag) == 2): return False try: return max(int(tag[1])-1, 1) except ValueError: return False class _html2text(sgmllib.SGMLParser): line_length = 72 begspace_re = re.compile('^( *)(.*)$') endspace_re = re.compile('^(.*)( +)$') def __init__(self, out=sys.stdout.write): sgmllib.SGMLParser.__init__(self) if out is None: self.out = self.outtextf else: self.out = out self.outtext = u'' self.quiet = [] self.p_p = 0 self.outcount = 0 self.list = [] self.space = '' self.start = 1 self.blockquote = 0 self.pre = 0 self.lastWasNL = 1 self.column = 0 self.charset = 'latin1' def outtextf(self, s): self.outtext += s def close(self): sgmllib.SGMLParser.close(self) self.pbr() self.o('', 0, 'end') return self.outtext def handle_charref(self, c): self.o(charref(c)) def handle_entityref(self, c): self.o(entityref(c)) def unknown_starttag(self, tag, attrs): self.handle_tag(tag, attrs, 1) def unknown_endtag(self, tag): self.handle_tag(tag, None, 0) def handle_tag(self, tag, attrs, start): attrs = dict(fixattrs(attrs)) if not start: self.space = '' if hn(tag): self.p() if start: self.pre = 1 self.o(hn(tag)*"!" + ' ') else: self.pre = 0 if tag in ['p', 'div']: self.p() if tag == "br" and start: self.o("//") self.pbr() if tag == 'hr' and start: self.p() self.o('----') self.p() if tag in ["head", "style", "script"]: if start: self.quiet.append(1) else: self.quiet.pop() if tag == 'title': if start: self.quiet.append(0) self.o("w_title(") else: self.o(")dnl") self.begin_line() self.quiet.pop() if tag == 'meta' and start: name = attrs.get('name') or \ attrs.get('http-equiv') or '' content = attrs.get('content') if name.lower() == 'author': self.o("w_author(%s)dnl" % content, 0, 1) self.begin_line() elif name.lower() in ['date', 'last-modified']: self.o("w_date(%s)dnl" % content, 0, 1) self.begin_line() elif name.lower() == 'content-type': match = re.search('[Cc]harset=(.*)', content) if match: try: charset = { 'ISO-8859-1':'latin1', 'ISO-8859-15':'latin9', 'US-ASCII':'ascii', 'UTF-8':'utf8' }[match.group(1).upper()] except KeyError: charset = 'latin1' self.charset = charset self.o("w_char_coding(%s)dnl" % charset, 0, 1 ) self.begin_line() if tag == "dl": self.p() if tag == "dt": if start: self.pre = 1 self.pbr() else: self.o("::") self.pre = 0 self.pbr() if tag in ["blockquote", "dd"]: if start: if tag != "dd": self.p() self.blockquote += 1 else: self.blockquote -= 1 if tag == "dd": self.pbr() else: self.p() if tag in ['em', 'i', 'u']: self.o("_") if tag in ['var', 'cite', 'dfn']: self.o("/") if tag in ['kbd', 'samp', 'code', 'tt']: self.o("''") if tag == "q": self.o('"') if tag in ['strong', 'b']: self.o("*") if tag == "a": if start: tgt = attrs.get('href', '') lbl = attrs.get('name', '') if lbl: self.o("w_label(%s, " % lbl) elif tgt and tgt[0]=='#': self.o("w_refer(%s, " % tgt[1:]) else: self.o("w_link(%s, " % tgt) else: self.o(")") if tag == "img" and start: tgt = re.sub('\.(jpe?g|gif|png)$', '', attrs.get('src', '')) alt = attrs.get('alt', '') self.o("w_img(%s, %s)" % (tgt, alt)) if tag in ["ol", "ul"]: if start: self.list.append(tag) else: self.list.pop() self.p() if tag == 'li': if start: self.pbr() if self.list: li = self.list.pop() else: li = "ul" if li == "ul" and len(self.list)<=1: self.o("- ") elif li == "ul": self.o("* ") elif li == "ol": self.o("# ") self.list.append(li) else: self.pbr() if tag == 'table': if start: self.p() self.o('w_beg(table)') self.pbr() else: self.pbr() self.o('w_end(table)') self.p() if tag == 'tr' and not start: self.o("//") self.begin_line() if tag in ['td', 'th'] and not start: self.o("||") if tag == "pre": if start: self.p() self.pre = 1 self.o("{{{") else: if not self.lastWasNL: self.pbr() self.o("}}}") self.pre = 0 self.p() self.start = start def pbr(self): if self.p_p == 0: self.p_p = 1 def p(self): self.p_p = 2 def begin_line(self): self.out('\n') self.out('\t' * self.blockquote) self.out(' ' * len(self.list)) self.column = self.blockquote * 8 + len(self.list) * 2 self.lastWasNL = 1 def o(self, data, puredata=0, force=0): if self.quiet and self.quiet[-1] and not force: return if puredata and not self.pre: data = re.sub('\n[ \t]*', '\n', data) data = re.sub('\\s', ' ', data) sp, data = self.begspace_re.match(data).groups() self.space += sp if not data and not force: return space = self.space if force == 'end': # It's the end. self.p_p = 0 self.out("\n") space = '' if self.p_p: if self.p_p > 1 and not self.lastWasNL: self.out('\n') self.begin_line() space = '' if space and not self.start and not self.lastWasNL: if self.column > self.line_length: self.begin_line() else: self.out(space) if not self.pre: while len(data) + self.column > self.line_length: spl = self.split_line( data ) if not spl: break line, data = spl self.out(self.decode(line)) self.begin_line() self.p_p = 0 if puredata and not self.pre: match = self.endspace_re.match(data) if match: data, self.space = match.groups() else: self.space = '' else: self.space = '' self.out(self.decode(data)) self.column += len(data) if data: self.lastWasNL = data[-1] == '\n' self.outcount += 1 def split_line( self, line ): match = re.match('^(.{0,%d}) (.*)$' % max(self.line_length - self.column, 15), line) if not match: return None return match.groups() def decode(self, s): if type(s) is unicode: if self.charset == 'latin9': return s.encode('latin1') return s.encode(self.charset) return s def handle_data(self, data): self.o(data, 1) self.start = 0 def unknown_decl(self, data): pass def html2text_file(html, out=sys.stdout.write): h = _html2text(out) h.feed(html) h.feed("") return h.close() if __name__ == "__main__": try: data = open(sys.argv[1], 'r').read() except IndexError: data = sys.stdin.read() html2text_file(data) stx2any/scripts/gather_stx_titles 0000755 0001750 0001750 00000002440 10424125332 021271 0 ustar pkalliok pkalliok 0000000 0000000 #!/bin/sh # This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski # and released under the license in ../LICENSE BASE=. usage() { cat $BASE/messages/gather_stx_titles.usage 1>&2 exit $1 } while test -n "$1"; do case "$1" in -f) FROM_SUFFIX="$2" shift ;; -f*) FROM_SUFFIX=`echo $1 | cut -c3-` ;; -t) TO_SUFFIX="$2" shift ;; -t*) TO_SUFFIX=`echo $1 | cut -c3-` ;; -p) PREFIX="$2" shift ;; -p*) PREFIX=`echo $1 | cut -c3-` ;; --help|-\?) usage 0 ;; --version|-V) cat $BASE/messages/version.msg exit 0 ;; *) break esac shift done for i in $@; do echo -n "define(\`@w_file_exists_" echo -n "$i" | sed -e "s#$FROM_SUFFIX\$#$TO_SUFFIX#" -e "s#^$PREFIX/*##" | tr -d '\012\015' echo "', t)dnl" done ( cat << DEF divert(-1) define(\`w_quotewrap', \`\`\`\$1''') define(\`w_makesubst', \`patsubst(patsubst(\`\$1', \`$FROM_SUFFIX\$', \`$TO_SUFFIX'), \`^$PREFIX/*',)') define(\`w_file', \`w_quotewrap(w_makesubst(w_real_file))') define(\`w_title', \`ifelse(\`\$1',,,\`divert(0)dnl') \`define'(\`\`@w_title_of_''w_file, \`\`\$1'')\`dnl' divert(-1)') define(\`w_doc_id', \`divert(0)dnl \`define'(\`\`@w_filename_of_\$1'', w_file)\`dnl' divert(-1)') DEF for i in $@; do echo "define(\`w_real_file', \`$i')" sed -f $BASE/common/header.sed $i done ) | m4 || usage 1 stx2any/scripts/stx2any 0000755 0001750 0001750 00000007205 10424125512 017151 0 ustar pkalliok pkalliok 0000000 0000000 #!/bin/sh # This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski # and released under the license in ../LICENSE BASE=. usage() { cat $BASE/messages/stx2any.usage 1>&2 exit $1 } DEVICE=html TEMPLDEVICE= SECURE= QUOTE_AFTER= NUMBERING= TABLE_OF_CONTENTS= MAKE_TITLE=on LINKABBREVS= EMDASH_SEP=" " LATEX_PARAMS= HTML_PARAMS= PIC_SUFFIX= PREPROC="" POSTPROC=cat M4OPTIONS="-I$BASE/site-packages" SOURCES= while test -n "$1"; do case "$1" in -T) DEVICE="$2" shift ;; -T*) DEVICE=`echo $1 | cut -c3-` ;; --quote) QUOTE_AFTER="-f $BASE/common/quote.sed" ;; --quote-me-harder) PREPROC="$PREPROC -f $BASE/common/quote_us.sed" ;; --more-secure) SECURE="$BASE/common/secure.m4" ;; --numbering) NUMBERING="$2" shift ;; --table-of-contents) TABLE_OF_CONTENTS="$2" shift ;; --make-title) MAKE_TITLE="$2" shift ;; --link-abbrevs) PREPROC="$PREPROC -f $BASE/common/linking.sed" M4OPTIONS="$M4OPTIONS -Dw_do_link_abbr=true" LINKABBREVS=on ;; --no-template) TEMPLDEVICE=common ;; --symmetric-crossrefs) M4OPTIONS="$M4OPTIONS -Dw_symmetric_crossrefs=t" ;; --latex-params) LATEX_PARAMS="$2" shift ;; --html-params) HTML_PARAMS="$2" shift ;; --picture-suffix) PIC_SUFFIX="$2" shift ;; --no-emdash-separate) EMDASH_SEP= ;; --sed-preprocessor) PREPROC=" -f \"$2\" $PREPROC" shift ;; --help|-\?) usage 0 ;; --version|-V) cat $BASE/messages/version.msg exit 0 ;; -) SOURCES="$SOURCES -" ;; -*) M4OPTIONS="$M4OPTIONS \"$1\"" ;; *) SOURCES="$SOURCES $1" esac shift done if test -n "$SECURE"; then if echo "$M4OPTIONS" | grep '[;|\\`&>]' >/dev/null; then echo "Unsecure characters in the options: $M4OPTIONS" exit 1 fi if echo "$PREPROC" | grep '[;|\\`&>]' >/dev/null; then echo "Unsecure characters in the preprocessor script: $M4OPTIONS" exit 1 fi fi case "$DEVICE" in ps) POSTPROC='groff -Tps -t -man' DEVICE=man ;; text) POSTPROC='w3m -dump -T text/html -cols 80' DEVICE=html ;; xhtml) POSTPROC='tidy -q -asxml' DEVICE=html ;; esac if test ! -d "$BASE/$DEVICE"; then echo "unknown output format: $DEVICE" 1>&2 exit 1 fi . $BASE/$DEVICE/settings.sh test -z "$TEMPLDEVICE" && TEMPLDEVICE="$DEVICE" test -z "$TABLE_OF_CONTENTS" && TABLE_OF_CONTENTS="$NUMBERING" set_onoff_option() { case "$1" in y*|on) M4OPTIONS="$M4OPTIONS -D$2=true" ;; n*|off) ;; *) echo "Unknown $3 setting: $1" 1>&2 exit 1 esac } set_onoff_option "$NUMBERING" w_do_numbering numbering set_onoff_option "$TABLE_OF_CONTENTS" w_make_toc table-of-contents set_onoff_option "$MAKE_TITLE" w_make_title make-title test -z "$SOURCES" && SOURCES=- trap 'echo "Subprocess m4 exited with an error" 1>&2 && echo "Try \"$0 --help\" for help on command line options" 1>&2' 13 PARENT=$$; export PARENT for i in $SOURCES; do echo -n "define(\`w_real_file', \`$i')" echo "define(\`w_line_base', not yet in file)dnl" test -n "$LINKABBREVS" -a "X$i" != "X-" && \ sed -f $BASE/common/gatherlinkmap.sed "$i" echo "define(\`w_line_base', __line__)dnl" cat "$i" done | \ sed -f $BASE/common/quote_quotes.sed | \ eval sed -f $BASE/common/header.sed -f $BASE/common/wrapcalls.sed $PREPROC | \ sed -f $BASE/common/emphasis.sed | \ sed -f $BASE/common/inline.sed -f $BASE/common/block.sed $QUOTE_AFTER | \ ( eval m4 "\"-Dw_outputfmt=$DEVICE\"" "\"-Dw_picture_suffix=$PIC_SUFFIX\"" \ "\"-Dw_emdash_separate=$EMDASH_SEP\"" "\"-D@w_bodytag_params=$HTML_PARAMS\"" \ "$M4OPTIONS" $SECURE \ $BASE/common/common.m4 \ $BASE/common/markup.m4 \ $BASE/$DEVICE/make.m4 \ $BASE/common/begin.m4 - $BASE/common/end.m4 \ $BASE/$TEMPLDEVICE/templ.m4 \ $BASE/common/cleanup.m4 || kill -13 $PARENT ) | \ $POSTPROC stx2any/scripts/extract_usage_from_stx 0000755 0001750 0001750 00000001143 10424125325 022315 0 ustar pkalliok pkalliok 0000000 0000000 #!/bin/sh # This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski # and released under the license in ../LICENSE BASE=. usage() { cat $BASE/messages/extract_usage_from_stx.usage 1>&2 exit $1 } case "$1" in --help|-\?) usage 0 ;; --version|-V) cat $BASE/messages/version.msg exit 0 ;; esac sed -n \ -e '/^! *SYNOPSIS/,/^!/{' \ -e '/^$/,/./{' \ -e 's#^[^ !]#Usage: ' \ -e '}' \ -e '/^!/!p' \ -e '}' \ -e '/^! *OPTIONS/,/^!/{' \ -e 's#^! *OPTIONS.*#Options:#p' \ -e '/^!/d' \ -e '/^[^ ].*::$/,/\./{' \ -e 's#\. .*#.#' \ -e 'p' \ -e '}' \ -e '}' $@ || usage 1 stx2any/common/ 0000755 0001750 0001750 00000000000 10471044066 015416 5 ustar pkalliok pkalliok 0000000 0000000 stx2any/common/quote.lsed 0000644 0001750 0001750 00000000710 10424125234 017415 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE Quoting of possible special characters. {{{ s#<#`'w_lt`'#g s#>#`'w_gt`'#g s#`'w_amp`'#g s#^\.#w_bldot`'# s#^'#w_blap`'# s#\\#`'w_bs`'#g s#{#`'w_obr`'#g s#|#`'w_bar`'#g s#}#`'w_cbr`'#g s#\^#`'w_ct`'#g s#~#`'w_td`'#g s/\([^,]\)#/\1`'w_hs`'/g s#%#`'w_pct`'#g }}} The special case of ''w_hs'' is because numbered lists put it in the call of w_item. stx2any/common/wrapcalls.lsed 0000644 0001750 0001750 00000000661 10424125237 020260 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE Prepare input so that every direct macro call can be checked against definedness. We protect against meddling with defines, because sometimes the users will want to legitimately redefine stx2any macros. {{{ /define/!{ /`/!{ s#\([^A-Za-z0-9]\)\(w_[a-z_][a-z_]*\)#\1w_invoke(`\2')#g s#^w_[a-z_][a-z_]*#w_invoke(`&')# } } }}} stx2any/common/quote_us.lsed 0000644 0001750 0001750 00000001130 10424125236 020123 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE Quoting underscores and dollar signs... First, quote-quote underscores that should _not_ be quoted. {{{ s#__line#UNDERSCORETHINGYUNDERSCORETHINGYline#g s#__file#UNDERSCORETHINGYUNDERSCORETHINGYfile#g s#w_#wUNDERSCORETHINGY#g t moreus : moreus s#\(UNDERSCORETHINGY[a-z]*\)_#\1UNDERSCORETHINGY#g t moreus }}} Then, do the actual quoting and quote-quote unquoting. {{{ s#_#`'w_us`'#g s#UNDERSCORETHINGY#_#g }}} Dollar signs are also problematic. {{{ /define/!{ /`/!{ s#\$#`'w_dol`'#g } } }}} stx2any/common/inline.lsed 0000644 0001750 0001750 00000002665 10424125230 017545 0 ustar pkalliok pkalliok 0000000 0000000 w_doc_id(s2aimpl.inl)dnl This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE Inline elements may happen anywhere in text. However, they are different from emphasis in that they are not constrained by paragraph boundaries and / or don't require both an opening and closing mark to be present. Footnotes are transformed directly into the corresponding environment: {{{ s#\[\[$#`'w_beg(footnote)#g s#^\( *\)\]\]#\1w_end(footnote)`'#g s#\[\[[- ]#`'w_beg(footnote)`'#g s#[- ]\]\]#`'w_end(footnote)`'#g }}} Several formatting rules do not apply within a literal block. Because of this, we branch past the rest of this stuff _and_ all blocks: {{{ /^{{{$/,/^}}}$/ { s#^}}}$#w_end(litblock)# s#^{{{$#w_beg(litblock)# b end } }}} Special characters. {{{ s#^\( *\)-- #\1w_emdash`'w_emdash_separate`'# s# -- #`'w_emdash_separate`'w_emdash`'w_emdash_separate`'#g s# --$#`'w_emdash_separate`'w_emdash# s#\([A-Za-z0-9])*\)--\((*[A-Za-z0-9]\)#\1`'w_endash`'\2#g s#\.\.\.\([] ,.;:?!)}>"-]\)#`'w_ellipsis\1#g s#\.\.\.$#`'w_ellipsis# s#\([ ([{<"-]\)([cC])\([ :1-9]\)#\1w_copyrightsign\2#g s#^([cC])\([ :1-9]\)#w_copyrightsign\1#g s#\([ ([{<"-]\)([cC])$#\1w_copyrightsign# s#([tT][mM])#`'w_trademarksign`'#g s# -> # w_rarrow #g s#^-> #w_rarrow # s# ->$# w_rarrow# s# <- # w_larrow #g s#^<- #w_larrow # s# <-$# w_larrow# }}} Some constructs that may occur most anywhere: {{{ s#||#`'w_horizbr`'#g s#//$#`'w_linebr# }}} stx2any/common/end.lm4 0000644 0001750 0001750 00000001113 10424125113 016565 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE End a document. Try to make sure everything is closed. (This is not perfect; for example the sectioning system of docbook-xml is not invoked. Docbook-xml sections don't play nicely with diversions anyway.) {{{ w_newindent(0)`'w_setblocktype(n)`'w_softpara`'dnl }}} Close the default environment begun in begin.m4 and check everything went as should. Then, prepare for the actual output (the template). {{{ w_end(n)`'w_check_env`'w_enddiv(body)`'w_check_div`'divert(0)dnl }}} stx2any/common/markup.lm4 0000644 0001750 0001750 00000041315 10471044066 017337 0 ustar pkalliok pkalliok 0000000 0000000 w_title(stx2any -- low-level and common markup facilities)dnl w_doc_id(s2aimpl.markup)dnl w_author(Panu A. Kalliokoski)dnl This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE w_use(examples/reflection-disclaimer) Included herein are macros that transform "low-level" annotations made in the ''sed'' phase into "high-level" macro calls to be defined specific to output format. Actually, this border is very fuzzy: some output formats don't need these helpers (e.g. LaTeX doesn't need footnote facilities), and may redefine some markup on a lower level. Because overriding definitions made here is easy, this file is also used for providing some sensible (output format independent) defaults for several macros. But the first thing to do is to disable the ''m4'' comment mechanism. It is the single most common source of syntax mistakes in ''m4'' files. Because traditional ''m4'' won't let us disable it entirely, let's make it something you're relatively unlikely to run into. {{{ changecom(`#%$bo',`ob$#%') }}} ! Defaults The default character set. {{{ w_char_coding(latin1) }}} Defaults for some simple macros. {{{ define(`w_horizbr', `||') define(`w_apo', ') define(`w_eline',) define(`w_section',) define(`w_url', `w_literal(`$1')') define(`w_man_desc', `ifelse(`$2',, `w_begdiv(ingr)w_emph(`$1')`'w_linebr`'w_nl`'w_enddiv(ingr)dnl', `$1 w_emdash $2 w_linebr')') define(`w_techemph', `w_emph(`$1')') define(`w_quotation', ``'w_bq`'w_bq`'$1`'w_apo`'w_apo`'') define(`w_emdash', `--') define(`w_endash', `--') define(`w_ellipsis', `...') define(`w_copyrightsign', `(c)') define(`w_sectbreak', `w_paragraph`'w_emdash w_emdash w_emdash`''dnl `ifelse(eval(`$1>4'),1,` w_emdash w_emdash`'')w_softopen') }}} Both itemised list types should have the same effect. In most situations, list items have the same effect, too. {{{ w_derive_env(*, -, 0,,,,) w_derive_env(*i, i, 0,,,,) w_derive_env(-i, i, 0,,,,) w_derive_env(#i, i, 0,,,,) }}} These environments have sensible defaults, but are overridden in some output formats when more appropriate markup is available. {{{ w_define_env(`compactlist', `pushdef(`w_eline', `w_linebr')', `popdef(`w_eline')') w_derive_env(`citation', q, 0,,,`w_linebr`'w_nl w_emdash $*`'w_nl',) w_derive_env(`abstract', q, 0,`w_begdiv(ingr)',,,`w_enddiv(ingr)') w_derive_env(`admonition', q, 1,`$1:w_nl`'',,,) define(`w_slideheader', `w_set_or_get(`@w_slideheader', `$*')') define(`w_slidefooter', `w_set_or_get(`@w_slidefooter', `$*')') w_define_env(`slide', `w_paragraph`'w_slideheader`'w_nl`'w_sbreak(3)`'w_softopen`'w_nl`'', `w_sbreak(3) w_paragraph`'w_slidefooter`'w_nl`'w_sbreak(5)`'w_softopen`'w_nl`'') }}} Quoted characters. We suppose most of these characters don't have any special meaning and let output format specific definitions override those that do. {{{ define(`w_lt', `<') define(`w_gt', `>') define(`w_amp', `&') define(`w_bldot', `.') define(`w_blap', ') define(`w_bs', `\') define(`w_obr', `{') define(`w_bar', `|') define(`w_cbr', `}') define(`w_us', `_') define(`w_ct', `^') define(`w_td', `~') define(`w_dol', `$') define(`w_hs', `#') define(`w_pct', `%') }}} ! Sectioning These macros just do section numbering for output formats that do not support it natively. They transform the low-level `w_headl' to the high-level `w_headline'. {{{ w_newcounter(subsubsection) w_newcounter(subsection, subsubsection) w_newcounter(section, subsection) w_newcounter(chapter, section) define(`w_headl', `w_newindent(0)'dnl `w_stepcounter(w_pickn(`$1', chapter, section, subsection, subsubsection))'dnl `w_headline(`$1', `w_maybe_tocline(w_number_of(`$1'), `$2')')') define(`w_maybe_tocline', `ifelse(w_make_toc, true, `w_index(toc, `$1', `$2')', w_do_link_abbr, true, `$1`'w_autolabel(`$2')', `$1`'$2')') define(`w_number_of', `ifelse(w_do_numbering, true, `w_sectionmark(`$1', chapter, section, subsection, subsubsection) ')') define(`w_sectionmark', `w_counter_arabic(`$2')`'ifelse(`$1',1,, `.w_sectionmark(decr(`$1'),shift(shift($@)))')') }}} ! Diversions Diversions common for every output format are declared here. {{{ w_define_div(`frontmatter') w_define_div(`ingr') w_define_div(`body') w_define_div(`backmatter') ifelse(w_make_toc,true,`w_define_div(`toc')',`w_define_trashcan(`toc')') }}} ''defs'' is a genuine trashcan diversion. The others serve as defaults for diversions that are not used for most output formats. {{{ w_define_trashcan(`defs') w_define_trashcan(`metas') w_define_trashcan(`preamble') }}} ! Footnotes By default, we gather footnotes in a diversion that can be dumped upon request. `w_footnote' is meant for end users; the environment is the real thing, used directly by abbreviations. {{{ w_define_div(`footnote') define(`w_footnote', ``'w_beg(footnote)`'$1`'w_end(footnote)`'') w_newcounter(footnote) w_define_env(footnote, `define(`@w_footnote_flag',t)'dnl `w_stepcounter(footnote)w_footnotemark(w_counter_arabic(footnote))`''dnl `w_begdiv(footnote)w_footnotemark(w_counter_arabic(footnote)) ', `w_linebr`'w_nl`'w_enddiv(footnote)') define(`w_dump_footnotes', `ifelse(defn(`@w_footnote_flag'),,,`$1`'w_dumpdiv(footnote)$2')'dnl `undefine(`@w_footnote_flag')') }}} ! Link abbreviation support The link abbreviations use two macros of their own, `w_generic_link' and `w_autolabel'. The label part is easy, though we have to provide infrastructure for making labels: {{{ w_newcounter(autolabel) define(`w_genlabel', `ifdef(`@w_label_used_$1', `w_stepcounter(autolabel)w_genlabel(`$1'w_counter_arabic(autolabel))', `define(`@w_label_used_$1',t)`$1'')') define(`w_tidystring', `patsubst(``$1'',`[^0-9A-Za-z`']',`.')') define(`w_make_autolabel', `define(`@w_label_of_$1',w_genlabel(w_tidystring(`$1')))') define(`w_autolabel', `ifdef(`@w_label_of_$1',,`w_make_autolabel(`$1')')'dnl `w_label(defn(`@w_label_of_$1'), `$1')') define(`w_autorefer', `w_refer(ifdef(`@w_label_of_$1',`defn(`@w_label_of_$1')',`w_tidystring(`$1')'), ifelse(`$2',,``$1'',``$2''))') }}} Now, generic links are quite a beast. They can become: # cross references (if the label exists), # cross links (if the document is known), # inline images or ordinary links (if it seems like we have a URL), # footnotes (if everything else fails) in this order of preference. They can link directly or indirectly (via a link data block). {{{ define(`w_generic_link', `ifdef(`@w_linkdata_of_$1', `w_generic_link(defn(`@w_linkdata_of_$1'),`$2')', `ifdef(`@w_label_of_$1', `w_autorefer(`$1', `$2')', `ifdef(`@w_filename_of_$1', `w_crosslink(`$1', `$2')', `ifdef(`@w_file_exists_$1', `w_crosslink(`$1', `$2')', `ifelse(index(`$1',img:),0, `w_img(substr(`$1', 4), `$2')', `ifelse(w_is_url(`$1'),t,`ifelse(`$2',,`w_url(`$1')',`w_link(`$1',`$2')')', `$2`'w_footnote(`$1')')')')')')')') define(`w_is_url', `ifelse(index(`$1',http://),0,t,index(`$1',https://),0,t,index(`$1',ftp://),0,t, index(`$1',gopher://),0,t,index(`$1',file:/),0,t,index(`$1',nntp://),0,t, index(`$1',mailto:),0,t,index(`$1',news:),0,t, index(`$1',./),0,t,index(`$1',../),0,t)') }}} ! End-user markup These definitions don't have anything to do with anything else and are included here because they are output format independent. They are expected to be invoked by the author of the document directly. These didn't fit anywhere else. {{{ define(`w_def_in_fmt', `ifelse(defn(`w_outputfmt'), `$1', `define(`$2', `$3')')') define(`w_invoke', `ifdef(`$1',`$1',`w_warning(`Unknown macro "$1" called')w_void')') define(`w_use', `ifdef(`@w_included_$1',, `define(`@w_included_$1',t)include(`$1.m4')`'')') }}} !! Indexes and cross-links Indexing something currently simply puts the same text both in the index diversion and in the current text, cross-referencing them. Some indexes should probably be lexicographically ordered, but this needs more careful designing. As it stands, this system is quite sufficient for lists of pictures and the like. {{{ define(`w_index', `ifelse(`$3',,`w_index(`$1',,`$2')', `w_make_autolabel(`$3')`$2'w_autolabel(`$3')`''dnl `w_begdiv(`$1')`$2'w_autorefer(`$3')`'w_linebr`'w_nl`'w_enddiv(`$1')')') define(`w_indexword', `define(`$2', `w_index(`$1', ``$2'')')') }}} Cross links are links between documents. To make a cross link properly to another document, a document needs to know something about the other document. w_crosslink(gstman) and `w_crosslink' together provide a way to keep track of this information. There are seven cases: # document contains both title and id, referenced by id # document contains both title and id, referenced by filename # document contains only title, referenced by filename # document contains only id, referenced by id # document contains only id, referenced by filename # document contains neither title nor id, referenced by filename # document unknown or doesn't exist Cases 1--3 produce a link to the file with text of title. Cases 4--6 produce a link with text of the filename, case 7 just whatever the document happened to be referenced by and a warning. A second argument, if present, will override the visible text produced by this macro. {{{ define(`w_file', `ifelse(w_is_url(`$1'),t,,ifdef(`w_base',defn(`w_base')/))`$1'') define(`w_crosslink', `ifdef(`@w_filename_of_$1', `w_crosslink(defn(`@w_filename_of_$1'),`$2')', `ifdef(`@w_file_exists_$1', `w_link(w_file(`$1'), ifelse(`$2',,`ifdef(`@w_title_of_$1', `defn(`@w_title_of_$1')',``$1'')',``$2''))', `w_warning(`Unknown cross link to "$1"')ifelse( `$2',,`$1',`$2')')')') }}} !! Some environments {{{ w_define_env(`text',,) w_define_env(`ifeq', `ifelse(`$1', `$2',, `w_begdiv(defs)')', `ifelse(`$1', `$2',, `w_enddiv(defs)')') }}} Floats and their infrastructure. {{{ w_define_env(`float', `w_beg(w_some_float_env(`$1'), shift($@))', `w_end(w_some_float_env(`$1'), shift($@))') define(`w_some_float_env', `ifelse(`$1',,`w_float_default', `w_ifdef_env(`w_float_'substr(`$1',0,1), ``w_float_'substr(`$1',0,1)', `w_some_float_env(substr(`$1',1))')')') w_define_env(`w_float_h', `w_sbreak(5)`'w_nl', `w_paragraph`'ifelse(`$1',,,w_caption(`$1')`'w_nl`')w_sbreak(5)`'w_nl') w_derive_env(`w_float_n', `w_float_h', 0, `define(`@w_footnote_flag',t)w_begdiv(footnote)',,, `w_enddiv(footnote)undefine(`@w_para_flag')') w_derive_env(`w_float_default', `w_float_n', 0,,,,) define(`w_caption', `$1') }}} ! Table infrastructure Common helpers used by both table environments. {{{ define(`w_begin_row', `w_reinit_list(columns)w_stepcounter(row)'dnl `w_beg(w_row, n, w_counter_arabic(row))`'') define(`w_begin_cell', `w_stepcounter(column)w_beg(w_cell, n, w_next_in_list(columns))') }}} Generic table environment. These transform low-level `w_horizbr' and `w_linebr' into high-level environment calls. The environment is based on `w_table', to be defined output-format-specifically. {{{ w_derive_env(`table', `w_table', 0, `w_setup_list(columns, $@)w_newcounter(column)w_newcounter(row,column)'dnl `pushdef(`w_pending_block_hook', `w_begin_row`'w_begin_cell`'')'dnl `pushdef(`w_linebr', `w_end(w_cell)`'w_end(w_row)`'define(`w_pending_block_hook', `w_begin_row`'w_begin_cell`'')')'dnl `pushdef(`w_horizbr', `w_end(w_cell)`'w_begin_cell`'')'dnl `pushdef(`w_sectbreak', `w_table_rule(w_length_list(columns))')', `undefine(`@w_para_flag')', , `popdef(`w_linebr')popdef(`w_horizbr')popdef(`w_sectbreak')'dnl `popdef(`w_pending_block_hook')w_delcounter(column)w_delcounter(row)'dnl `w_unsetup_list(columns)') }}} List tables. {{{ w_derive_env(`listtable', `w_table', 0, `w_setup_list(columns, $@)w_newcounter(column)w_newcounter(row,column)'dnl `pushdef(`w_sectbreak', `w_table_rule(w_length_list(columns))')'dnl `w_push_env(*)w_derive_env(*,w_listtable_level,0,,,,)'dnl `w_newcounter(w_listtable)w_push_env(*i)undefine(`@w_para_flag')',,, `w_pop_env(*i)w_pop_env(*)w_delcounter(w_listtable)popdef(`w_sectbreak')'dnl `w_unsetup_list(columns)w_delcounter(column)w_delcounter(row)') w_define_env(`w_listtable_level', `w_stepcounter(w_listtable)'dnl `ifelse(w_counter_arabic(w_listtable),1,`w_derive_env(*i,w_row,0,,,,)', w_counter_arabic(w_listtable),2,`w_derive_env(*i,w_cell,0,,,,)', `w_error(`Hm, trying to make three-dimensional tables?')')', `w_stepcounter(w_listtable,-1)'dnl `ifelse(w_counter_arabic(w_listtable),1,`w_derive_env(*i,w_row,0,,,,)', w_counter_arabic(w_listtable),0,`w_define_env(*i,,)')') }}} ! Paragraphs These transform low-level `w_para' calls into high-level `w_paragraph' calls. {{{ define(`w_softopen', `define(`@w_para_flag',t)') define(`w_para', `w_softopen`'w_softpara') define(`w_beg_para', `ifelse(defn(`@w_para_flag'),t,`w_paragraph`'')'dnl `undefine(`@w_para_flag')') }}} Hooks for dealing with breaks (`w_softpara' is a hook for those who want to do something for raw `w_para'). {{{ define(`w_softbr',) define(`w_softpara',) }}} Pending blocks. This hook is meant to be invoked for opening a block when (or if) any text is forthcoming. Used by tables (sometimes) and definition lists. {{{ define(`w_pending_block_hook',) define(`w_pending_block', `w_pending_block_hook`'define(`w_pending_block_hook',)') }}} ! Block system !! Block infrastructure This is the real thing. These macros provide the infrastructure for transforming indents and dedents (produced by the indentation system in w_crosslink(s2aimpl.common)) into block structure. Blocks are environments which are opened upon indent and closed upon dedent. If the block type changes, the old block is closed and a new one opened. There is a pseudo block, ''n'', which kind of means "no block at all". It is used because when we get a new indent, we don't know the forthcoming block type (because the indentation system is independent of block types). We store previous block type because sometimes the type of a new block depends on the type of the enclosing block. {{{ w_define_env(n,,) define(`@w_block_type',n) define(`w_indent', `pushdef(`@w_prev_block_type', defn(`@w_block_type'))'dnl `pushdef(`@w_block_type',n)w_beg(n)`'w_indent_hook`'') define(`w_dedent', `w_dedent_hook`'w_end(defn(`@w_block_type'))`''dnl `popdef(`@w_prev_block_type')popdef(`@w_block_type')') }}} Hooks for direct users of the indent system. {{{ define(`w_indent_hook',) define(`w_dedent_hook',) }}} Change block type within indent level. {{{ define(`w_setblocktype', `ifelse(defn(`@w_block_type'),`$1',, `w_end(defn(`@w_block_type'))`'define(`@w_block_type',`$1')'dnl `w_beg(defn(`@w_block_type'))')') }}} !! Block markup glue These definitions transform low-level `w_sbreak', `w_bline', `w_item' and `w_term' into high-level `w_sectbreak', environment invocations, `w_paragraph', `w_listitem' and `w_defnterm'. They use the paragraph system and block system above as well as the indentation system to achieve this. The block types have the following meaning: ''n'':: "no block yet" ''text'':: ordinary text ''q'':: block quote ''-'', ''*'', ''#'', '':'':: itemised, itemised, numbered, and definition lists, respectively. Note that these are _not_ the blocks of the list items, but of the lists themselves. ''-i'', ''*i'', ''#i'', ''t'':: list items. ''t'' is the type of a definition in definition lists (terms in definition lists are not considered blocks at all). Pending block hook usually contains some opening element, if anything. We try to invoke it at an appropriate place: after everything has been closed (so the environments have time to cancel it), but before the elements that were possibly supposed to be within the pending block. Okay, on with the definitions. Section breaks. {{{ define(`w_sbreak', `w_newindent(0)`'w_sectbreak(`$1')') }}} Ordinary text lines. These only induce one level of indentation, whose type is normal text in top level and inside list items, blockquote elsewhere. Kind of like saying, it can remain normal if it has a good reason to be indented so, otherwise it becomes a block quote. {{{ define(`w_bline', `w_newindent(`$1')`'w_onlyindent`''dnl `w_pending_block`'w_beg_para`'w_softbr') define(`w_onlyindent', `ifelse(index(`t-i*i#i',defn(`@w_prev_block_type')),-1, `w_setblocktype(q)',`w_setblocktype(text)')') w_define_env(i,`w_listitem',) }}} Different kinds of list items. These imply the presence of a list; if we were already in a list, the first indent level closes the pending list item. We also mark the indent level of the item text, so we can tell if the next line is a block quote (indented more). {{{ define(`w_item', `w_newindent(`$1',1)`'w_pending_block`'w_setblocktype(`$3')`''dnl `w_newindent(`$1',2)`'w_setblocktype(`$3i')`''dnl `w_newindent(`$2',0)`'w_setblocktype(text)`'undefine(`@w_para_flag')w_softbr') }}} Terms of definition list. These imply the presence of a definition list and a forthcoming definition. However, as the definition text does not begin on this line, we don't set up an indent level for it but just the ''t'' block. {{{ define(`w_term', `w_newindent(`$1',1)`'w_pending_block`'w_setblocktype(:)`''dnl `w_defnterm(`$2')`''dnl `w_newindent(`$1',2)`'w_setblocktype(t)`'undefine(`@w_para_flag')') }}} stx2any/common/emphasis.lsed 0000644 0001750 0001750 00000004214 10424125222 020071 0 ustar pkalliok pkalliok 0000000 0000000 w_doc_id(s2aimpl.emph)dnl This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE Different emphasis-like constructs. These must not cross paragraph boundaries and are only recognised in the presence of both an opening and closing marker, so we handle them one paragraph at a time. Literal blocks should be passed untouched. If a literal block was met when gathering a paragraph's lines (see below), it is processed now. {{{ x /^{{{$/ { G x s#.*## } x /^{{{$/,/^}}}$/ b /^{{{\n/,/^}}}$/ b }}} First, gather lines until we reach an empty line or literal block to process emphasis as paragraphs. End of file has to be made a special case. {{{ : gather s# $## $ b nogather N /\n$/ b nogather /\n{{{$/ { s### x s#.*#{{{# x b nogather } b gather : nogather }}} Normalise whitespace. Spaces inserted at the beginning and end of line are taken away later. {{{ s#^# # s#$# # }}} Literal formatting. We previously supported non-quoted versions but the new quote-protection scheme should avoid them altogether. This is quite a regexp because it's complicated to say, in regular expressions, "a span of text not containing the string /X/" (where len(/X/) w_gt 1). {{{ s#\([ \n([{<"-]\)`'w_apo`'`'w_apo`'\([^ `]\(\([^`]\(`'w_apo`'[^`]\)*\(`'w_us`'\)*\(`'w[^_]\)*\(`'[^w]\)*\)*[^ `]\)*\)`'w_apo`'`'w_apo`'\([] \n,.;:?!)}>"-]\)#\1w_literal(`\2')\9#g }}} Underscores have two forms, depending on whether they are already quoted. {{{ s#\([ \n([{<"'-]\)`'w_us`'\([^ `]\(\([^`]\(`'w_apo`'\)*\(`'w[^_]\)*\(`'[^w]\)*\)*[^ `]\)*\)`'w_us`'\([] \n,.;:?!)}>"'-]\)#\1w_emph(`\2')\8#g s#\([ \n([{<"'-]\)_\([^ _]\(\([^_]\(w_\)*\)*[^ _]\)*\)_\([] \n,.;:?!)}>"'-]\)#\1w_emph(`\2')\6#g }}} Other kinds of emphasis. {{{ s#\([ \n([{<"'-]\)/\([^ /]\([^/]*[^ /]\)*\)/\([] \n,.;:?!)}>"'-]\)#\1w_techemph(`\2')\4#g s#\([ \n([{<"'-]\)\*\([^ *]\([^*]*[^ *]\)*\)\*\([] \n,.;:?!)}>"'-]\)#\1w_strong(`\2')\4#g }}} Quotations are processed last to make it possible to have something emphasised within quotes. {{{ s#\([ \n([{<'`-]\)"\([^ "']\([^"]*[^ "]\)*\)"\([] \n,.;:?!)}>'-]\)#\1w_quotation(`\2')\4#g }}} Strip whitespace inserted above. {{{ s#^ ## s# $## }}} stx2any/common/linking.lsed 0000644 0001750 0001750 00000003552 10424125232 017720 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE This file has definitions for link abbreviations. This is the second phase of processing link abbreviations: the link data has (hopefully) already been gathered by w_crosslink(s2aimpl.gatherlink). The logic of this script is weird. We have complicated rules for deleting link data blocks. The basic difficulty is that both the blocks' beginning and end are marked with empty lines. Whether it is a link data block depends on the line that comes _after_ that. Remove link data blocks. {{{ /^$/,/./{ : del3 /^\[[A-Za-z0-9]\{1,\}\] /,/^$/d /^%-%/b proceed } }}} Generic link syntaces that can be confused with link data blocks. Because labels have not yet been processed, protect against ''+'' as the last character. {{{ s#\[\([^][]*\)\]\[\([^][]*[^][`+]\)\]#`'w_generic_link(`\2', `\1')#g s#^\[\([^][]*[^][`+]\)\]#w_generic_link(`\1')#g }}} Now we don't have anything that could be mistaken for beginning of link data block, so we can jump and see whether this line was actually meant to be deleted (because of the range it's in). {{{ s#^#%-%# b del3 : proceed s#^%-%## }}} Labels. {{{ s#\[+\([^][]*\)+\]#`'w_autolabel(`\1')#g }}} Rest of generic link syntaces. {{{ s#\([^ ,.;:!?"'-]*[^[ ,.;:!?"'-]\)\[\([^][]*[^][`]\)\]#`'w_generic_link(`\2', `\1')#g s#\([ ({<"'.:-]\)\[\([^][]*[^][`]\)\]#\1w_generic_link(`\2')#g }}} URLs. {{{ s#^# # s#\([ ([{<"'-]\)\(https*://[^ ]*[A-Za-z0-9_/]\)#\1w_url(`\2')#g s#\([ ([{<"'-]\)\(ftps*://[^ ]*[A-Za-z0-9_/]\)#\1w_url(`\2')#g s#\([ ([{<"'-]\)\(gopher://[^ ]*[A-Za-z0-9_/]\)#\1w_url(`\2')#g s#\([ ([{<"'-]\)\(nntp://[^ ]*[A-Za-z0-9_/]\)#\1w_url(`\2')#g s#\([ ([{<"'-]\)\(file:/[^ ]*[A-Za-z0-9_]\)#\1w_url(`\2')#g s#\([ ([{<"'-]\)\(mailto:[^ ]*[A-Za-z0-9_]\)#\1w_url(`\2')#g s#\([ ([{<"'-]\)\(news:[^ ]*[A-Za-z0-9_]\)#\1w_url(`\2')#g s#^ ## }}} stx2any/common/gatherlinkmap.lsed 0000644 0001750 0001750 00000003012 10424125224 021103 0 ustar pkalliok pkalliok 0000000 0000000 w_doc_id(s2aimpl.gatherlink)dnl This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE This is a (scary) preprocessor that gathers link data from documents. Link abbreviation processing is two-phase: this is the first phase, before actually processing the documents, in which we just gather enough information for the second phase and ditch everything else. The link data is supposed to be in blocks. Outside blocks, all we have to gather is labels. {{{ /^\[[A-Za-z0-9][A-Za-z0-9]*\] /,/^$/!{ /./!d }}} I don't care to process more than one label per line. If somebody uses two explicit labels on the same line, (s)he can't have a good reason to do so. We read text in whole paragraphs so as to skip blocklike-looking constructs in the middle of a paragraph. {{{ : gulp s#^.*\[+\([^]]*\)+\].*$#w_make_autolabel(`\1')dnl#p s#^!!* *\(.*\)$#w_make_autolabel(`\1')dnl#p $!{ N /\n$/!b gulp } d } }}} End-block processing. {{{ /^$/{ x s#$#`'')dnl#p d } }}} From here on, we are within a link data block. During that, we keep the previous line in hold space. Empty hold space means no line. {{{ s#'#`'w_apo`'#g }}} The case that we have a new datum: see whether there is a line to finish. {{{ /^\[\([A-Za-z0-9][A-Za-z0-9]*\)\] /{ s##define(`@w_linkdata_of_\1',`# x /./!d s#$#`'')dnl#p d } }}} The case that we don't: just store the new one, dump the old one. (Whitespace is stripped to allow indenting line continuations on the same level as the link data marker.) {{{ s#^[ ]*## x }}} stx2any/common/secure.lm4 0000644 0001750 0001750 00000001047 10424125134 017316 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE Attempt to kill all possibly dangerous commands. It would be nice to "partially kill" `include' and `undivert', but that does not seem to be possible. Provide some sensible information on what went wrong. {{{ define(`w_gag_cmd', `undefine(`$1')'dnl `define(`$1', `w_warning(`attempt to use "$1" in secure environment')`$1'($@)')') }}} These are the commands to kill. {{{ w_gag_cmd(`builtin') w_gag_cmd(`syscmd') w_gag_cmd(`esyscmd') }}} stx2any/common/block.lsed 0000644 0001750 0001750 00000003337 10424125220 017355 0 ustar pkalliok pkalliok 0000000 0000000 w_doc_id(s2aimpl.block)dnl This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE Block rules. These are quite simple: we process line at a time, classifying them by the kind of block-level element they begin. Grouping these into logical entities is done on the ''m4'' side. First, because we want to jump to the end immediately after first matching classification, clear the ''t'' flag of ''sed''. {{{ t foo : foo }}} Basically, there need not be special support for `w_beg' and `w_end'. However, if we process them as their own line type, they won't leave irritating markup in the output (most importantly, no `w_bline') and there is less need for whitespace-related quirks in the implementing ''m4'' code. {{{ s#^\( *\)\(w_invoke(`w_beg')(.*)\) *$#w_newindent(len(`\1'))\2`'dnl# t end s#^\( *\)\(w_invoke(`w_end')(.*)\) *$#w_newindent(len(`\1'))\2`'dnl# t end }}} Next, there are the syntaxes which take the whole line. These don't need a trailing `w_eline'. {{{ s#^\(!!*\) *\(.*\)$#w_headl(len(\1),`\2')# t end s#^ *$#w_para# t end s#^---*$#w_sbreak(len(&))# t end s#^\( *\)\(.*\)::$#w_term(len(`\1'),`\2')# t end }}} Otherwise, add the end-of-line marker. Because this is not a classification, clear the ''t'' flag again. {{{ s#$#`'w_eline# t bar : bar }}} Lists are recognised from the beginning of the line. They induce two indent levels (one for the list, one for the item text) so we report both. {{{ s#^\(\( *\)\* *\)#w_item(len(`\2'),len(`\1'),*)`'# t end s#^\(\( *\)- *\)#w_item(len(`\2'),len(`\1'),-)`'# t end s#^\(\( *\)\# *\)#w_item(len(`\2'),len(`\1'),\#)`'# t end }}} Everything else is an ordinary text line. End of classification. {{{ s#^ *#w_bline(len(`&'))`'# : end }}} stx2any/common/begin.lm4 0000644 0001750 0001750 00000000456 10424125042 017115 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE This is stuff we do just before entering the document(s). Currently not much: just set up the default environment for the document. {{{ w_begdiv(body)dnl w_beg(n)`'w_setblocktype(text)dnl }}} stx2any/common/cleanup.lm4 0000644 0001750 0001750 00000000350 10424125064 017455 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE This is called last. Clear up any unused diversions (otherwise ''m4'' dumps them upon exiting). {{{ divert(-1)undivert }}} stx2any/common/header.lsed 0000644 0001750 0001750 00000000372 10424125226 017515 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE Process header lines into stx2any metadata calls. {{{ 1,/^$/{ /^\([a-z][a-z_]*\): /s/,/`,'/g s/^\([a-z][a-z_]*\): \(.*\)$/w_\1(\2)dnl/ } }}} stx2any/common/common.lm4 0000644 0001750 0001750 00000027362 10424125107 017330 0 ustar pkalliok pkalliok 0000000 0000000 w_title(stx2any -- common m4 facilities)dnl w_doc_id(s2aimpl.common)dnl w_author(Panu A. Kalliokoski)dnl This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE w_use(examples/reflection-disclaimer) This source file deals with definitions that are generally useful, not directly related to ''stx2any''. You might well want to use these in any ''m4'' based project. First, ignore output. We only want to make definitions; output belongs to (format-specific) templates. {{{ divert(-1) }}} Undefine "format". I wonder what the GNU gurus were thinking about when they defined this extension to be recognised without parameters. {{{ undefine(`format') }}} ! Infrastructure These macros are simple computation or "exception" facilities. !! Error reporting There are 2 error "levels", fatal and non-fatal (ta-da!). Both should report the error location in standard Unix format, because some tools, like ''emacs'', can read this output and directly jump to the error locations. There are two complications: # block-related errors come in pairs, as mismatching begin and end conditions. The "error", which is usually an omission, is at neither place but usually somewhere in between. So, we must report both places. Currently, only the end-place is reported in the standard format. # because the actual input is streamed to ''m4'' through ''sed'', we don't know the actual file names. Fear not: we define the macro `w_real_file' upon entering each input file so we can report errors properly. {{{ define(`w_stdin_p', `ifelse(__file__,stdin,t,__file__,-,t)') define(`w_current_location', `ifelse(w_stdin_p,t,w_real_file,__file__):'dnl `ifelse(w_stdin_p,t,`eval(__line__ - w_line_base)',__line__)') define(`w_warning', `errprint(ifelse(`$2',,`w_current_location',``$2'')`: stx2any: $1'w_nl)') define(`w_error', `w_warning($@)m4exit(1)') }}} !! Output twiddling `w_nl' is indispensable if you want to write neat code. The whitespace dependence in ''m4'' is bad enough as it is. For producing backquotes, we need to override quotes both upon definition and upon invocation. The code looks weird but works. {{{ define(`w_nl',` ') define(`w_void',) define(`w_bq', changequote([[,]])dnl [[changequote(.,.)`changequote(`,')]]dnl changequote(`,')) }}} !! Quote facilities These are needed for storing parameter lists.[[-Single parameters are conveniently handled by simple pushdef / popdef.-]] Combining packs many parameters in a single string; dequoting removes one level of quotes for a string, effectively unpacking the parameter list (parameters are still quoted separately). Actually, ''foo'' is equal to `w_dequote'(`defn'(''foo'')), but only for strings that are valid macro names. For internal variables beginning with ''@'', defn + dequoting is the only way. {{{ define(`w_combine', ``$@'') define(`w_gather', ``$*'') define(`w_dequote', `$1') }}} !! List facilities {{{ define(`w_pickn', `ifelse(`$1',1,`$2',`w_pickn(decr(`$1'),shift(shift($@)))')') define(`w_listlen', `ifelse(`$*',,0,`incr(w_listlen(shift($@)))')') }}} ! Counter system (à la LaTeX) Reentrant newcounter and delcounter. {{{ define(`w_newcounter', `pushdef(`@w_counter_$1',0)'dnl `pushdef(`@w_refcounter_$1',`$2')') define(`w_delcounter', `popdef(`@w_counter_$1')popdef(`@w_refcounter_$1')') }}} All changes to counters are done via setcounter, in order to deal with reference counters. {{{ define(`w_setcounter', `define(`@w_counter_$1',`$2')'dnl `ifelse(defn(`@w_refcounter_$1'),,, `w_setcounter(defn(`@w_refcounter_$1'),0)')') define(`w_getcounter', `defn(`@w_counter_$1')') define(`w_stepcounter', `w_setcounter(`$1',ifelse(`$2',,`incr(',`eval($2+')w_getcounter(`$1')))') }}} Counter value in different formats. {{{ define(`w_counter_arabic', `w_getcounter(`$1')') define(`w_counter_alpha', `substr(_abcdefghijklmnopqrstuvwxyz,w_getcounter(`$1'),1)') define(`w_counter_Alpha', `substr(_ABCDEFGHIJKLMNOPQRSTUVWXYZ,w_getcounter(`$1'),1)') }}} ! Diversion system Diversions are for rearranging input. These are just a thin wrapper around the native diversions of ''m4'', providing nesting, error reporting, and names for diversions. I think naming would be reason enough to use these. Diversions are somewhat hard to understand, because they don't do anything to the way ''m4'' processes macros, they only say where the output goes _when_ there is some output. But in ''m4'', expansions are reread until they don't expand any more; so it's not that simple to tell when there will be output. Stated differently: diversions are _side effects_, so make sure (by quoting) that they won't take effect before you want them to. Another important point to realise is that other side effects (e.g. definitions) are not affected by diversions. {{{ define(`w_begdiv', `ifdef(`@w_div_$1',,`w_error(`unknown diversion "$1"')')'dnl `pushdef(`@w_divlocstack', w_current_location)'dnl `pushdef(`@w_divstack',$1)divert(defn(`@w_div_$1'))') define(`w_enddiv', `ifdef(`@w_divstack',,`w_error(`diversion stack empty')')'dnl `ifelse(`$1',,,`$1',defn(`@w_divstack'),, `w_warning("defn(`@w_divstack')`" begins here...', defn(`@w_divlocstack'))' `w_error(`diversion "'defn(`@w_divstack')`" closed by "$1"')')'dnl `popdef(`@w_divlocstack')popdef(`@w_divstack')'dnl `ifdef(`@w_divstack',`divert(defn(`@w_div_'defn(`@w_divstack')))')') define(`w_check_div', `ifdef(`@w_divstack', `w_error(`unclosed diversion "'defn(`@w_divstack')", defn(`@w_divlocstack'))')') define(`w_dumpdiv', `undivert(defn(`@w_div_$1'))') }}} Diversions are actually numbers. Give some way to map names to those numbers. {{{ w_newcounter(`w_n_avail_div') define(`w_define_div', `w_stepcounter(`w_n_avail_div')'dnl `define(`@w_div_$1', w_getcounter(`w_n_avail_div'))') define(`w_define_trashcan', `define(`@w_div_$1', -1)') }}} ! Environment system (à la LaTeX) Environments are meant for "big" things, where it would be ugly and/or unwieldy to use a single macro. For example, I wouldn't like it if I had to wrap a whole block quote in a macro call. Macros are more sensitive to syntax errors with parentheses and quotes and provide less information about what went wrong. On the other hand, environments can't read and process the included text,[[-unless you are perverse enough to have the environment expand to a big macro call, in which case the problems of macros apply.-]] so the effect of environments is limited to output upon opening and closing, and indirect effects like redefining hooks. That said, environments are a relatively thin wrapper around macro calls, as they are in LaTeX. They provide error reporting, saving of arguments until the end of the environment, and a separate namespace. To allow environments to call other environments, we define many /layers/ of environment variables. Always when we are executing an environment definition, we increase the layer. This ensures that the arguments of the _calling_ environment won't mess with the arguments of the _called_ environment. But this imposes a restriction: environments must always be closed at the same layer where they are opened. {{{ w_newcounter(`w_layer') define(`w_layervar', ``w_layer_'w_getcounter(`w_layer')`_$1'') define(`w_sublayer', `w_stepcounter(`w_layer')$1`'w_stepcounter(`w_layer',-1)') define(`w_define_env', `define(`@w_begin_$1', `$2')define(`@w_end_$1', `$3')') define(`w_ifdef_env', `ifdef(`@w_begin_$1', `$2', `$3')') define(`w_beg', `w_ifdef_env(`$1',, `w_error(`unknown environment "$1"')')'dnl `pushdef(w_layervar(env), `$1')'dnl `pushdef(w_layervar(params), w_combine(shift($@)))'dnl `pushdef(w_layervar(loc), w_current_location)'dnl `w_sublayer(`indir(`@w_begin_$1',shift($@))')') define(`w_end', `ifdef(w_layervar(env),,`w_error(`environment stack empty')')'dnl `ifelse(`$1',,,`$1',defn(w_layervar(env)),, `w_warning("defn(w_layervar(env))`" begins here...', defn(w_layervar(loc)))' `w_error(`environment "'defn(w_layervar(env))`" closed by "$1" in layer 'w_counter_arabic(`w_layer'))')'dnl `w_sublayer(`indir(`@w_end_''defn(w_layervar(env))`,' defn(w_layervar(params))`)')'dnl `popdef(w_layervar(loc))popdef(w_layervar(env))popdef(w_layervar(params))') define(`w_check_env1', `ifdef(w_layervar(env), `w_error(`unclosed environment "'defn(w_layervar(env))`" in layer 'w_counter_arabic(`w_layer'), defn(w_layervar(loc)))')') define(`w_check_env', `w_sublayer(`w_sublayer(`w_check_env1')w_check_env1')w_check_env1') define(`w_push_env', `pushdef(`@w_begin_$1',)pushdef(`@w_end_$1',)') define(`w_pop_env', `popdef(`@w_begin_$1')popdef(`@w_end_$1')') define(`w_make_param_shifter', `ifelse(`$1',0,``$'@',``shift('w_make_param_shifter(decr(`$1'))`)'')') define(`w_derive_env', `w_define_env(`$1', `$4`'w_beg(`$2','w_make_param_shifter(`$3')`)`'$5', `$6`'w_end(`$2','w_make_param_shifter(`$3')`)`'$7')') }}} ! Indentation system (à la Python) The indentation system forms the basis of the block system, because indentation determines the nesting of various elements. Actually, the indents are at least partially virtual. If an element takes a specific indentation, it means that that element wants anything with a greater indentation to be inside it, and with less or equal indentation, outside. All the indentation system does is to translate an indentation level into some or none `w_dedent's possibly followed by a `w_indent'. The work of translating these into element openings and closings is the job of w_crosslink(s2aimpl.markup). We dedent until we can find an enclosing or equal indentation level; then, if we have an enclosing level, we indent onto the requested level. The indentation level consists of two parts: an indent column and a "sub-character" level. Sub-character levels are needed because some constructs may need to open many blocks but only have one sensible column to mark them at. Besides, some constructs (like body text) are outside some others (like lists) even if they begin in the same column. {{{ define(`w_newindent', `ifelse(`$2',,`w_new_indents(`$1',0)', `w_new_indents(`$1',`$2')')') define(`w_new_indents', `w_compare_indent(`$1', `$2', w_dequote(defn(`@w_indstack')), `pushdef(`@w_indstack',`$1,$2')w_indent`'', `popdef(`@w_indstack')w_dedent`'w_new_indents(`$1',`$2')',)') define(`w_compare_indent', `ifelse(eval(`$1>$3'),1,`$5',eval(`$1<$3'),1,`$6', eval(`$2>$4'),1,`$5',eval(`$2<$4'),1,`$6',`$7')') define(`@w_indstack',`0,0') }}} ! List helpers These are facilities to iterate through lists (possibly many times). They are used by some table environments to track column types. {{{ define(`w_setup_list', `pushdef(`@w_list_len_$1', w_listlen(shift($@)))'dnl `pushdef(`@w_list_save_$1', w_combine(shift($@)))'dnl `pushdef(`@w_list_$1', defn(`@w_list_save_$1'))') define(`w_unsetup_list', `popdef(`@w_list_$1')popdef(`@w_list_save_$1')popdef(`@w_list_len_$1')') define(`w_reinit_list', `define(`@w_list_$1', defn(`@w_list_save_$1'))') define(`w_next_in_list', `w_pickn(1,w_dequote(defn(`@w_list_$1')))`''dnl `define(`@w_list_$1',w_combine(shift(w_dequote(defn(`@w_list_$1')))))') define(`w_length_list', `defn(`@w_list_len_$1')') }}} ! Title and other metadata These are not related to any specific markup and are thus defined here. {{{ define(`w_set_or_get', `ifelse(`$2',,`defn(`$1')',`define(`$1', `$2')')') define(`w_doc_id',) define(`w_documentclass',) define(`w_title', `w_set_or_get(`@w_title', `$1')') define(`w_gettitle', `w_title') define(`w_author', `w_set_or_get(`@w_author', `$1')') define(`w_date', `w_set_or_get(`@w_date', `$1')') define(`w_getdate', `w_date') define(`w_language', `define(`@w_language', `$1')'dnl `define(`@w_iso_language', ifelse(`$2',,`substr(`$1',0,2)',`$2'))') define(`w_char_coding', `define(`@w_char_coding', `$1')'dnl `define(`w_long_charset_name', ifelse(`$2',,`w_long_charset_name_for(`$1')',`$2'))') define(`w_long_charset_name_for', `ifelse(`$1',latin9,ISO-8859-15, `$1',ascii,US-ASCII, `$1',utf8,utf-8,ISO-8859-1)') }}} stx2any/common/quote_quotes.lsed 0000644 0001750 0001750 00000001002 10424125235 021011 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE Do apostrophe quoting before we start doing anything. {{{ t morequ : morequ s#`\([^']*\)'#REALOPENQUOTE\1REALCLOSEQUOTE#g t morequ /`/{ $i\ w_error(Unmatched open quote) N b morequ } s#'#`'w_apo`'#g s#REALOPENQUOTE#`#g s#REALCLOSEQUOTE#'#g }}} Convert tabs into spaces. This is best done in as early a phase as possible, so we don't have to write tabs in our formatting regexps. {{{ s# # #g }}} stx2any/common/templ.lm4 0000644 0001750 0001750 00000000315 10424125157 017153 0 ustar pkalliok pkalliok 0000000 0000000 This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE {{{ w_dumpdiv(frontmatter)dnl w_dumpdiv(ingr)dnl w_dumpdiv(body)dnl w_dumpdiv(backmatter)dnl }}} stx2any/stx-mode.el 0000644 0001750 0001750 00000013621 10424125264 016211 0 ustar pkalliok pkalliok 0000000 0000000 ;;; This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski ;;; and released under the license in ../LICENSE (defun stx-bracket-word-with (str) (forward-word 1) (insert str) (backward-word 1) (insert str)) (defun stx-make-bold () "Put in syntax for boldfacing the current word." (interactive) (stx-bracket-word-with "*")) (defun stx-make-italic () "Put in syntax for italicising the current word." (interactive) (stx-bracket-word-with "/")) (defun stx-make-literal () "Put in syntax for making the current word literal." (interactive) (stx-bracket-word-with "''")) (defun stx-make-underline () "Put in syntax for underlining the current word." (interactive) (stx-bracket-word-with "_")) (defun stx-make-heading () "Put in syntax for making a section heading." (interactive) (beginning-of-line) (insert "! ")) (defvar stx-stx2any-args "" "Additional arguments to give to stx2any.") (defun stx-transform-buffer (fmt) "Process the buffer via stx2any. Possible formats are those supported by stx2any, namely: html, man, latex, docbook-xml, xhtml, text, (ps)." (interactive "sOutput format: ") (shell-command-on-region (point-min) (point-max) (concat "stx2any " stx-stx2any-args " -T " fmt) "*stx2any-output*") (switch-to-buffer-other-window "*stx2any-output*")) (defvar stx-preview-command "groffer" "Command to use for previewing postscript.") (defun stx-preview-buffer-as-webpage () "Preview the buffer as converted to a web page, via browse-url." (interactive) (let ((myfile (make-temp-name "/tmp/stx2any"))) (shell-command-on-region (point-min) (point-max) (concat "stx2any " stx-stx2any-args " -T html >" myfile)) (browse-url (concat "file://" myfile)))) ;;;###autoload (defun stx-preview-buffer () "Preview the buffer as it would be printed by stx-print-buffer. The actual command used for previewing can be set by the variable stx-preview-command." (interactive) (stx-send-buffer "man" stx-preview-command)) ;;;###autoload (defun stx-print-buffer () "Print the buffer via stx2any, groff and lpr. The actual command used for printing can be set by the variable lpr-command." (interactive) (stx-send-buffer "ps" lpr-command)) (defun stx-send-buffer (fmt command) "Helper function for stx-preview-buffer and stx-print-buffer." (shell-command-on-region (point-min) (point-max) (concat "stx2any " stx-stx2any-args " -T " fmt " | " command))) (defvar stx-mode-map (let ((mymap (make-sparse-keymap))) (define-key mymap "\C-c\C-c" 'stx-transform-buffer) (define-key mymap "\C-c\C-p" 'stx-preview-buffer) (define-key mymap "\C-cp" 'stx-print-buffer) (define-key mymap "\C-cb" 'stx-make-bold) (define-key mymap "\C-ci" 'stx-make-italic) (define-key mymap "\C-cl" 'stx-make-literal) (define-key mymap "\C-cu" 'stx-make-underline) (define-key mymap "\C-ch" 'stx-make-heading) (define-key mymap [menu-bar stx] (cons "Stx" (make-sparse-keymap "Stx"))) (define-key mymap [menu-bar stx stx-make-bold] '(menu-item "Make a word bold" stx-make-bold)) (define-key mymap [menu-bar stx stx-make-italic] '(menu-item "Make a word italic" stx-make-italic)) (define-key mymap [menu-bar stx stx-make-literal] '(menu-item "Make a word literal" stx-make-literal)) (define-key mymap [menu-bar stx stx-make-underline] '(menu-item "Underline a word" stx-make-underline)) (define-key mymap [menu-bar stx stx-make-heading] '(menu-item "Make current line a heading" stx-make-heading)) (define-key mymap [menu-bar stx stx-preview-buffer-as-webpage] '(menu-item "Preview as web page" stx-preview-buffer-as-webpage)) (define-key mymap [menu-bar stx stx-print-buffer] '(menu-item "Print buffer" stx-print-buffer)) (define-key mymap [menu-bar stx stx-preview-buffer] '(menu-item "Print preview" stx-preview-buffer)) (define-key mymap [menu-bar stx stx-transform-buffer] '(menu-item "Convert buffer" stx-transform-buffer)) mymap) "Keymap for Stx major mode.") (defvar stx-list-marker-regexp "^ *[-*#] ") (defvar stx-hard-divisor-regexp "^\\(---*\\|{{{\\|}}}\\)$") (defvar stx-paragraph-separate (concat "[ \t]*$\\|" (substring stx-hard-divisor-regexp 1) "\\|!\\|.*::$") "Regexp to match paragraph separators in Stx.") (defvar stx-paragraph-start (concat stx-paragraph-separate "\\|" (substring stx-list-marker-regexp 1)) "Regexp to match paragraph starts or separators in Stx.") (defvar stx-font-lock-keywords (append (list (cons stx-list-marker-regexp 'font-lock-builtin-face) (cons stx-hard-divisor-regexp 'font-lock-builtin-face)) '(("w_[a-z_]*\\|\\(un\\)?define\\|dnl" . font-lock-keyword-face) ("\\[\\[[- ]\\|[- ]\\]\\]\\| -- " . font-lock-builtin-face) ("[A-Za-z0-9)]\\(--\\)[(A-Za-z0-9]" 1 font-lock-builtin-face) ("\\(//\\|::\\)$" . font-lock-builtin-face) ("\\(^\\|[ (\"'-]\\)/\\([^ /][^/]*\\)/\\($\\|[ ,.;:?!)\"'-]\\)" 2 font-lock-type-face) ("\\(^\\|[ (\"'-]\\)\\*\\([^ *][^*]*\\)\\*\\($\\|[ ,.;:?!)\"'-]\\)" 2 font-lock-comment-face) ("\\(^\\|[ (\"'-]\\)_\\([^ _][^_]*\\)_\\($\\|[ ,.;:?!)\"'-]\\)" 2 font-lock-type-face) ("\\(^\\|[ (\"-]\\)''\\([^ '][^']*\\)''\\($\\|[ ,.;:?!)\"-]\\)" 2 font-lock-string-face) ("`\\([^']\\)'" . font-lock-constant-face) ("^\\(!!*\\)\\(.*\\)$" (1 font-lock-builtin-face) (2 font-lock-comment-face)))) "Faces for Stx fontification.") (defvar stx-mode-hook '() "Hooks to run upon entering Stx major mode.") ;;;###autoload (defun stx-mode () "A major mode for editing Stx (structured text) documents." (interactive) (kill-all-local-variables) (use-local-map stx-mode-map) (make-local-variable 'font-lock-defaults) (make-local-variable 'paragraph-start) (make-local-variable 'paragraph-separate) (setq major-mode 'stx-mode mode-name "Stx" font-lock-defaults '(stx-font-lock-keywords t) paragraph-start stx-paragraph-start paragraph-separate stx-paragraph-separate) (turn-on-font-lock) (auto-fill-mode 1) (run-hooks 'stx-mode-hook)) stx2any/docbook-xml/ 0000755 0001750 0001750 00000000000 10446213102 016333 5 ustar pkalliok pkalliok 0000000 0000000 stx2any/docbook-xml/make.lm4 0000644 0001750 0001750 00000013263 10424125170 017676 0 ustar pkalliok pkalliok 0000000 0000000 w_doc_id(s2aimpl.docbook-xml)dnl This file is copyright (c) 2004,2005,2006 by Panu Kalliokoski and released under the license in ../LICENSE w_use(examples/reflection-disclaimer) Definitions for DocBook XML. XML requires well-formed paragraphs, so we have our own paragraph system here. We put a paragraph inside most everything "just to be sure". Here are the macros to keep the result well-formed. {{{ define(`w_paragraph', `w_close—blockquote ends this thing—
w_open',`w_closew_softopen') w_define_env(:, `w_close
w_softopen') w_define_env(`w_float_n', `w_close$* w_open', `w_close
$1') }}} Other inlines. {{{ define(`w_link', `