antlr-2.7.7/0000755000175000017500000000000010522212350012575 5ustar twernertwernerantlr-2.7.7/Makefile.in0000644000175000017500000001305010522211616014645 0ustar twernertwerner## do not change this value subdir=. ## get standard variables from autoconf - autoconf will replace ## this variable with content of "scripts/config.vars". @stdvars@ ## By default, make will jump into any sub directory containing ## a file named "Makefile". This is done in the order implied by ## /bin/ls. You can override this by using variable SUBDIRS. For ## example, if not set, then make behaves as if ## SUBDIRS = antlr doc examples lib ## has been set. SUBDIRS = antlr lib doc ## When using stdmake before any other rule, then the default ## rule is "all" and behaviour of make is first to make all ## subdirectories and then all "local" targets with name all, ## clean, install, test etc. ## Sometimes it's usefull to make the local target first and then ## subdirs. To enforce this, just listen target in question be- ## fore "stdmake". By doing so, your target will become the ## default. This is usually not what you want. Therefore introduce ## a pseude rule (like this) to tell make about the default target. this : all ## get standard make rules from autoconf @stdmake@ test clean distclean install :: @ if test -f examples/Makefile ; then \ @MAKE@ -C examples $@ ; \ fi # Rule to remove all objects, cores, ANTLR generated, # configure generated, etc. This is not a recursive rule # because distclean removes files unconditionally # included by subdirectory Makefiles. # maintainer-clean: distclean -@RMF@ -f configure # # Rule to make a tarball exclusive all kinds of fluff # TIMESTAMP = $(shell date +%Y%m%d) TAR_DIR = $(versioneddir) TAR_FILE = $(versioneddir).tar _tar: -@RMF@ $(TAR_DIR) ln -s $(srcdir) $(TAR_DIR) $(TAR) cfh $(TAR_FILE) \ --exclude CVS \ --exclude *.pyc \ --exclude *.o \ --exclude *.d \ --exclude *.lo \ --exclude *.a \ --exclude *.la \ --exclude *.lai \ --exclude *.so \ --exclude *.class \ --exclude .deps \ --exclude .depend \ --exclude config.cache \ --exclude config.status \ --exclude Config.make \ --exclude antlr-config \ --exclude run-antlr \ --exclude *~ \ --exclude core \ --exclude dmalloc.log \ --exclude .gdb_history \ --exclude ".nfs*" \ --exclude "$(TAR_DIR)/gen_doc/html" \ --exclude "$(TAR_DIR)/gen_doc/html/*" \ --exclude Makefile \ $(TAR_DIR) @CHMOD@ 660 $(TAR_FILE) @RMF@ $(TAR_DIR) tar backup : _tar gzip -f --best $(TAR_FILE) ## When building a release, it's crucial that time stamps are up-to-date and that ## files have proper permission bit set. Since $(srcdir) might be under the ## control of Perforce (or an other versioning system), I'm going to unpack the ## tar file again in a local directory and update time stamps as well as ## permission. release : @ANTLR_JAR@ _tar @TAR@ xpf $(TAR_FILE) @CP@ @ANTLR_JAR@ $(TAR_DIR) find $(TAR_DIR) -type f | xargs @CHMOD@ 644 find $(TAR_DIR) -type d | xargs @CHMOD@ 755 @CHMOD@ 777 $(TAR_DIR)/configure find $(TAR_DIR) | xargs @TOUCH@ find $(TAR_DIR)/antlr -name \*.java | xargs @TOUCH@ @TAR@ cf $(TAR_FILE) $(TAR_DIR) gzip -f --best $(TAR_FILE) gzip -t -v $(TAR_FILE).gz @RMF@ $(TAR_DIR) ## This one for RK: new_version antlr/Version.java: @@ECHO@ "Rebuilding Version.java" @@ECHO@ "package antlr;" > antlr/Version.java @@ECHO@ "public class Version {" >> antlr/Version.java @@ECHO@ " public static final String version = \"$(VERSION)\";" >> antlr/Version.java @@ECHO@ " public static final String subversion = \"$(SUBVERSION)\";" >> antlr/Version.java @@ECHO@ " public static final String patchlevel = \"$(PATCHLEVEL)\";" >> antlr/Version.java @@ECHO@ " public static final String datestamp = \"$(TIMESTAMP)\";" >> antlr/Version.java @@ECHO@ " public static final String project_version = \"$(VERSION).$(SUBVERSION).$(PATCHLEVEL) ($(TIMESTAMP))\";" >> antlr/Version.java @@ECHO@ "}" >> antlr/Version.java ## Installation is delegated to sub directories - as configured. ## Here we just create a bin directory that should contain ## scripts to mess up with ANTLR. Other things to do? docdir = $(datadir)/doc/$(versioneddir) extradir = $(datadir)/$(versioneddir) install :: $(MKDIR) -p "$(bindir)" $(MKDIR) -p "$(extradir)" $(MKDIR) -p "$(docdir)" $(INSTALL) -m 755 scripts/run-antlr "$(bindir)/antlr" $(INSTALL) -m 755 scripts/antlr-config "$(bindir)/antlr-config" $(INSTALL) -m 444 @abs_top_srcdir@/extras/antlr-mode.el "$(extradir)" $(INSTALL) -m 444 @abs_top_srcdir@/extras/antlr-jedit.xml "$(extradir)" $(INSTALL) -m 444 @abs_top_srcdir@/LICENSE.txt "$(docdir)" $(INSTALL) -m 444 @abs_top_srcdir@/README.txt "$(docdir)" $(INSTALL) -m 444 @abs_top_srcdir@/INSTALL.txt "$(docdir)" install :: @ECHO@ "installation done" # Make sure that all generated files are removed distclean_obj = \ config.* \ Makefile \ scripts/antlr-config \ scripts/antlr.sh \ scripts/antlr.spec \ scripts/config.deps \ scripts/config.make \ scripts/config.vars \ scripts/cpp.sh \ scripts/csc.sh \ scripts/c.sh \ scripts/cxx.sh \ scripts/jar.sh \ scripts/javac.sh \ scripts/java.sh \ scripts/lib.sh \ scripts/link.sh \ scripts/pyantlr.sh \ scripts/python.sh \ scripts/run-antlr \ $(eof) distclean :: clean @RMF@ $(distclean_obj) Q=`find . -name Makefile` && test -n "$$Q" && @RMF@ $${Q} ### phony targets - make this targets even if file with same name exists. .PHONY: bootstrap backup maintainer-clean ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stddeps@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx antlr-2.7.7/examples/0000755000175000017500000000000010522211616014417 5ustar twernertwernerantlr-2.7.7/examples/Makefile.in0000644000175000017500000000052510522211616016466 0ustar twernertwerner############################################################################### # $Id:$ ############################################################################### ## do not change this value subdir=examples ## get configured variables @stdvars@ ## get configured (standard) rules @stdmake@ ## get configured dependencies @stddeps@ antlr-2.7.7/examples/python/0000755000175000017500000000000010522211616015740 5ustar twernertwernerantlr-2.7.7/examples/python/ASTsupport/0000755000175000017500000000000010522211616020024 5ustar twernertwernerantlr-2.7.7/examples/python/ASTsupport/Makefile.in0000644000175000017500000000353110522211616022073 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/ASTsupport ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/astsupport.g \ $(eol) g_py_FILES = \ ASTsupportParser.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ && @CP@ $< $@ test :: test1 test1_deps = \ astsupport.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) astsupport.py \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup astsupport clean :: @@ECHO@ cleaning astsupport ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/ASTsupport/astsupport.py0000644000175000017500000000462410522211616022630 0ustar twernertwernerimport sys import antlr version = sys.version.split()[0] if version < '2.2.1': False = 0 if version < '2.3': True = not False class MyAST(antlr.CommonAST): pass class ASTType49(antlr.CommonAST): pass def testDefaultCreate(): t =factory.create() return checkNode(t, antlr.CommonAST, antlr.INVALID_TYPE) def testSpecificHomoCreate() : ### class names as strings not supported factory.setASTNodeClass(MyAST) t = factory.create() factory.setASTNodeClass(antlr.CommonAST) return checkNode(t, MyAST, antlr.INVALID_TYPE) def testDynamicHeteroCreate() : factory.setTokenTypeASTNodeType(49,ASTType49) t = factory.create(49) a = checkNode(t, ASTType49, 49) u = factory.create(55) b = checkNode(u,antlr.CommonAST, 55) v = factory.create(49,"",MyAST) c = checkNode(v, MyAST, 49) factory.setTokenTypeASTNodeType(49,None) return a and b and c def testNodeDup() : t = factory.create() a = t.equals(antlr.dup(t,factory)) b = not t.equals(None) u = factory.create(49,"",ASTType49) c = checkNode(antlr.dup(u,factory),ASTType49, 49) d = u.equals(antlr.dup(u,factory)) return a and b and c and d def testHeteroTreeDup() : x = factory.create(1,"[type 1]",MyAST) ## will be root y = factory.create(2,"[type 2]",MyAST) z = factory.create(3,"[type 3]",MyAST) sub = factory.create(49,"[type 49]",ASTType49) sub.addChild(factory.create(3,"[type 3 #2]",MyAST)) t = antlr.make(x,y,sub,z) dup_t = antlr.dupList(t,factory) ## check structure a = dup_t.equalsList(t) ## check types b = equalsNodeTypesList(t,dup_t) return a and b def checkNode(t,c, tokenType) : if not t: return False if t.__class__ != c: return False if t.getType()!=tokenType: return False return True def equalsNodeTypesList(this, t) : return antlr.cmptree(this,t,partial=False) def error(test) : print "Test "+test+" FAILED" def success(test) : print "Test "+test+" succeeded" if __name__ == "__main__" : factory = antlr.ASTFactory() funcs = [ testDefaultCreate, testSpecificHomoCreate, testDefaultCreate, testSpecificHomoCreate, testNodeDup, testHeteroTreeDup ] for f in funcs: if f(): success(f.__name__) else: error(f.__name__) import ASTsupportParser P = ASTsupportParser.Parser() P.main() antlr-2.7.7/examples/python/ASTsupport/astsupport.g0000644000175000017500000001322010522211616022416 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language=Python; } class ASTsupportParser extends Parser; options { buildAST = true; } /* Test the equals, equalsSubtree, and findAll methods plus AST enumeration. * The output should be: t is ( = a 1 ) u is ( = b ) v is ( = 4 ) w is = t.equalsTree(t) is true t.equalsTree(u) is false t.equalsTree(v) is false t.equalsTree(w) is false t.equalsTree(null) is false t.equalsTreePartial(t) is true t.equalsTreePartial(u) is false t.equalsTreePartial(v) is false t.equalsTreePartial(w) is true t.equalsTreePartial(null) is true a is ( A ( B C ( A B ) ) ( A B ) ( F ( A B ) ) ( A ( A B ) ) ) J A---------------------J | B-----A-----F----A | | | | C--A B A A | | | B B B x is second sibling of upperleftmost A: ( A B ) ( F ( A B ) ) ( A ( A B ) ) y is child B of x: B x.equalsTree(#(A B)) is true x.equalsList(#(A B)) is false x.equalsListPartial(#(A B)) is true a.equalsTree(#(A B)) is false a.equalsTreePartial(#(A B)) is true y.equalsList(#[B]) is true y.equalsListPartial(#[B]) is true a.findAllPartial(#(A B)): ( A ( B C ( A B ) ) ( A B ) ( F ( A B ) ) ( A ( A B ) ) ) J ( A B ) ( A B ) ( F ( A B ) ) ( A ( A B ) ) ( A B ) ( A B ) a.findAllPartial(#[A])): ( A ( B C ( A B ) ) ( A B ) ( F ( A B ) ) ( A ( A B ) ) ) J ( A B ) ( A B ) ( F ( A B ) ) ( A ( A B ) ) ( A B ) ( A ( A B ) ) ( A B ) a.findAll(#(A B)): ( A B ) ( A B ) ( F ( A B ) ) ( A ( A B ) ) ( A B ) ( A B ) Test results: equalsTree is ok equalsTreePartial is ok equalsList is ok equalsListPartial is ok */ { def main(self): astFactory = antlr.ASTFactory() t = #([ASSIGN,"="], [ID,"a"], [INT,"1"]) print("t is " + t.toStringList()) u = #([ASSIGN,"="], [ID,"b"]) print("u is " + u.toStringList()) v = #([ASSIGN,"="], [INT,"4"]) print("v is " + v.toStringList()) w = #[ASSIGN,"="] print("w is " + w.toStringList()) print("") r1=t.equalsTree(t);print "t.equalsTree(t) is ",r1 r2=t.equalsTree(u);print "t.equalsTree(u) is ",r2 r3=t.equalsTree(v);print "t.equalsTree(v) is ",r3 r4=t.equalsTree(w);print "t.equalsTree(w) is ",r4 r5=t.equalsTree(None);print "t.equalsTree(None) is ",r5 print("") r6=t.equalsTreePartial(t);print "t.equalsTreePartial(t) is ",r6 r7=t.equalsTreePartial(u);print "t.equalsTreePartial(u) is ",r7 r8=t.equalsTreePartial(v);print "t.equalsTreePartial(v) is ",r8 r9=t.equalsTreePartial(w);print "t.equalsTreePartial(w) is ",r9 r10=t.equalsTreePartial(None);print "t.equalsTreePartial(None) is ",r10 print("") a = #(None, ([A,"A"], ([B,"B"], [C,"C"], ([A,"A"],[B,"B"])), ([A,"A"],[B,"B"]), ([F,"F"], #([A,"A"], [B,"B"])), ([A,"A"], #([A,"A"], [B,"B"]))), [J,"J"]) print("a is "+a.toStringList()+"\n") print(" A---------------------J") print(" |") print(" B-----A-----F----A") print(" | | | |") print(" C--A B A A") print(" | | |") print(" B B B\n") x = a.getFirstChild().getNextSibling() print("x is second sibling of upperleftmost A: "+x.toStringList()) y = a.getFirstChild().getNextSibling().getFirstChild() print("y is child B of x: "+y.toStringList()) r11=x.equalsTree(#([A,"A"],[B,"B"]));print "x.equalsTree(#(A B)) is ",r11 r12=x.equalsList(#([A,"A"],[B,"B"]));print "x.equalsList(#(A B)) is ",r12 r13=x.equalsListPartial(#([A,"A"],[B,"B"]));print "x.equalsListPartial(#(A B)) is ",r13 r14=a.equalsTree(#([A,"A"],[B,"B"]));print "a.equalsTree(#(A B)) is ",r14 r15=a.equalsTreePartial(#([A,"A"],[B,"B"]));print "a.equalsTreePartial(#(A B)) is ",r15 r16=y.equalsList(#[B,"B"]);print "y.equalsList(#[B]) is ",r16 r17=y.equalsList(#[B,"B"]);print "y.equalsListPartial(#[B]) is ",r17 print("\na.findAllPartial(#(A B)):") enum = a.findAllPartial(#([A,"A"],[B,"B"])) for e in enum: print (e.toStringList()) print("\na.findAllPartial(#[A])):") enum = a.findAllPartial(#[A,"A"]) for e in enum: print (e.toStringList()) print("\na.findAll(#(A B)):") enum = a.findAll(#([A,"A"],[B,"B"])) for e in enum: print (e.toStringList()) print("\nTest results:") if r1 and not r2 and not r3 and not r4 and \ not r5 and r11 and not r14: print("equalsTree is ok") else: print("equalsTree is bad") if r6 and not r7 and not r8 and r9 and r10: print("equalsTreePartial is ok") else: print("equalsTreePartial is bad") if not r12 and r16: print("equalsList is ok") else : print("equalslist is bad") if r13 and r17: print("equalsListPartial is ok") else : print("equalslistPartial is bad") } defTokenTypes : ID INT ASSIGN PLUS A B C D E F G H I J K ; antlr-2.7.7/examples/python/heteroAST/0000755000175000017500000000000010522211616017576 5ustar twernertwernerantlr-2.7.7/examples/python/heteroAST/hetero.in0000644000175000017500000000001310522211616021406 0ustar twernertwerner3+4*5+21; antlr-2.7.7/examples/python/heteroAST/Makefile.in0000644000175000017500000000362510522211616021651 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/heteroAST ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/hetero.g \ $(eol) g_py_FILES = \ hetero_l.py \ hetero_p.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ hetero.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) hetero_l.py < $(_srcdir)/hetero.in ; \ $(python) hetero.py < $(_srcdir)/hetero.in ; \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup hetero clean :: @@ECHO@ cleaning hetero ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/heteroAST/hetero.g0000644000175000017500000000203210522211616021231 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ header { import hetero } options { language=Python; } /** This example demonstrates the heterogeneous tree construction * mechanism. Compare this example to examples/calc/calc.g * to see that I use tree node methods not a tree walker to compute * the result. */ class hetero_p extends Parser; options { buildAST = true; // uses CommonAST by default } // define a bunch of specific AST nodes to build. // can override at actual reference of tokens in grammar // below. tokens { PLUS; STAR; } expr : mexpr (PLUS^ mexpr)* SEMI! ; mexpr : atom (STAR^ atom)* ; atom: INT // could have done in tokens{} section ; class hetero_l extends Lexer; WS : (' ' | '\t' | '\n' | '\r') { _ttype = SKIP; } ; LPAREN: '(' ; RPAREN: ')' ; STAR: '*' ; PLUS: '+' ; SEMI: ';' ; protected DIGIT : '0'..'9' ; INT : (DIGIT)+ ; antlr-2.7.7/examples/python/heteroAST/hetero.py0000644000175000017500000000703210522211616021440 0ustar twernertwernerimport sys import antlr class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k); self.visit1(s); self.printf(")") def visit(self,node): self.visit1(node); self.printf("\n") class CalcAST(antlr.BaseAST): def __init__(self,*args): antlr.BaseAST.__init__(self) class BinaryOperatorAST(CalcAST): def __init__(self,*args): CalcAST.__init__(self,*args) def left(self): return self.getFirstChild() def right(self): t = self.left(); if not t: return None return t.getNextSibling() def c2(self): t = self.left() if t: t = t.getNextSibling() assert t return t ### A simple node to represent PLUS operation class PLUSNode(BinaryOperatorAST): def __init__(self,*args): BinaryOperatorAST.__init__(self,*args) ### Compute value of subtree; this is heterogeneous part :) def value(self): left = self.left() assert self r = self.c2() assert r return left.value() + r.value() def toString(self): return " +"; def __str__(self): return self.toString() def __repr__(self): return str(self) ### A simple node to represent MULT operation class MULTNode(BinaryOperatorAST): def __init__(self,*args): BinaryOperatorAST.__init__(self,*args) # Compute value of subtree; this is heterogeneous part :) def value(self): return self.left().value() * self.c2().value() def toString(self): return " *"; def __str__(self): return self.toString() def __repr__(self): return str(self) ### A simple node to represent an INT class INTNode(CalcAST): def __init__(self,*args): CalcAST.__init__(self,*args) self.v = 0 if args and isinstance(args[0],antlr.Token): self.v = int(args[0].getText()) # Compute value of subtree; this is heterogeneous part :) def value(self): return self.v def toString(self): return " " + str(self.v) def main(): import hetero_l import hetero_p L = hetero_l.Lexer() P = hetero_p.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression try: P.expr() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(1) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" ### compute value and return r = ast.value() print "value is", r if __name__ == "__main__": main() antlr-2.7.7/examples/python/tinyc/0000755000175000017500000000000010522211616017066 5ustar twernertwernerantlr-2.7.7/examples/python/tinyc/Makefile.in0000644000175000017500000000354310522211616021140 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/tinyc ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/tinyc_l.g \ $(_srcdir)/tinyc_p.g \ $(eol) g_py_FILES = \ tinyc_l.py tinyc_p.py \ $(eol) compile : $(g_py_FILES) test :: test1 test1_deps = \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) tinyc_l.py < $(_srcdir)/tinyc.in ; \ $(python) tinyc_p.py < $(_srcdir)/tinyc.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup tinyc clean :: @@ECHO@ cleaning tinyc ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/tinyc/tinyc_l.g0000644000175000017500000000276510522211616020711 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ /* * Make sure to run antlr.Tool on the lexer.g file first! */ options { mangleLiteralPrefix = "TK_"; language=Python; } class tinyc_l extends Lexer; options { k=2; exportVocab=TinyC; charVocabulary = '\3'..'\377'; } tokens { "int"; "char"; "if"; "else"; "while"; } WS : (' ' | '\t' | '\n' { $newline;} | '\r') { _ttype = SKIP; } ; SL_COMMENT : "//" (~'\n')* '\n' { _ttype = Token.SKIP; $newline; } ; ML_COMMENT : "/*" ( { self.LA(2) != '/' }? '*' | '\n' { $newline; } | ~('*'|'\n') )* "*/" { $setType(SKIP); } ; LPAREN options { paraphrase="'('"; } : '(' ; RPAREN options { paraphrase="')'"; } : ')' ; LCURLY: '{' ; RCURLY: '}' ; STAR: '*' ; PLUS: '+' ; ASSIGN : '=' ; SEMI: ';' ; COMMA : ',' ; CHAR_LITERAL : '\'' (ESC|~'\'') '\'' ; STRING_LITERAL : '"' (ESC|~'"')* '"' ; protected ESC : '\\' ( 'n' | 'r' | 't' | 'b' | 'f' | '"' | '\'' | '\\' | '0'..'3' ( options { warnWhenFollowAmbig = false; } : DIGIT ( options { warnWhenFollowAmbig = false; } : DIGIT )? )? | '4'..'7' ( options { warnWhenFollowAmbig = false; } : DIGIT )? ) ; protected DIGIT : '0'..'9' ; INT : (DIGIT)+ ; ID options { testLiterals = true; paraphrase = "an identifier"; } : ('a'..'z'|'A'..'Z'|'_') ('a'..'z'|'A'..'Z'|'_'|'0'..'9')* ; antlr-2.7.7/examples/python/tinyc/tinyc_p.g0000644000175000017500000000332710522211616020710 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ /* * Make sure to run antlr.Tool on the lexer.g file first! */ header "tinyc_p.__main__" { import tinyc_l import tinyc_p L = tinyc_l.Lexer() P = tinyc_p.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression try: P.program() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." sys.exit(0) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" } options { mangleLiteralPrefix = "TK_"; language=Python; } class tinyc_p extends Parser; options { importVocab=TinyC; } program : ( declaration )* EOF ; declaration : (variable) => variable | function ; declarator : id:ID | STAR id2:ID ; variable : type declarator SEMI ; function : type id:ID LPAREN (formalParameter (COMMA formalParameter)*)? RPAREN block ; formalParameter : type declarator ; type: ( TK_int | TK_char | id:ID ) ; block : LCURLY ( statement )* RCURLY ; statement : (declaration) => declaration | expr SEMI | TK_if LPAREN expr RPAREN statement ( TK_else statement )? | TK_while LPAREN expr RPAREN statement | block ; expr: assignExpr ; assignExpr : aexpr (ASSIGN assignExpr)? ; aexpr : mexpr (PLUS mexpr)* ; mexpr : atom (STAR atom)* ; atom: ID | INT | CHAR_LITERAL | STRING_LITERAL ; antlr-2.7.7/examples/python/tinyc/tinyc.in0000644000175000017500000000023610522211616020545 0ustar twernertwernerint i; int *i; int f(char c, char *d) { int f; c = '\033'+'\47'+'\''+'\\'; d = " \" '\\' foo"; i = c+3*f; if ( i ) { f = c; } else { f = 1; } } antlr-2.7.7/examples/python/unicode.IDENTs/0000755000175000017500000000000010522211616020413 5ustar twernertwernerantlr-2.7.7/examples/python/unicode.IDENTs/Makefile.in0000644000175000017500000000361410522211616022464 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/unicode.IDENTs ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/ident.g \ $(eol) g_py_FILES = \ ident_l.py ident_p.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ && @CP@ $< $@ test :: test1 test1_deps = \ ident.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) ident_l.py < $(_srcdir)/ident.in \ $(python) ident.py < $(_srcdir)/ident.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup ident clean :: @@ECHO@ cleaning ident ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/unicode.IDENTs/ident.g0000755000175000017500000000566710522211616021707 0ustar twernertwernerheader { /* common code to all generated files */ def println(*args): if args: import sys enc = sys.getdefaultencoding() for arg in args[0:-1]: print arg.encode(enc,"replace"), print args[-1].encode(enc,"replace") } header "__main__" { // the main header pass } header "ident_l.__main__" { import sys,codecs def warn(msg): print >>sys.stderr,"warning:",msg sys.stderr.flush() def error(msg): print >>sys.stderr,"error:",msg sys.stderr.flush() try: sys.stdin = codecs.lookup("Shift-JIS")[-2](sys.stdin) except: warn("Japanese codecs required - please install.") sys.exit(0) L = Lexer() for token in L: // I'm being conservative here .. print token.__str__().encode("ascii","replace") } header "__init__" { // init - for all classes } header "ident_p.__init__" { // init - for ident_l } header "ident_l.__init__" { // init - for ident_p } options { language=Python; } /* ** Unicode example ** written by Matthew Ford (c)2000 Forward Computing and Control Pty. Ltd. ** email matthew.ford@forward.com.au ** ** The UnicodeLexer is the interesting part */ class ident_p extends Parser; options { buildAST = false; // skip the tree building defaultErrorHandler = false; // Don't generate parser error handlers } program : (statement)* // perhaps none EOF ; protected statement {exprToken=None} : lhs:IDENT ASSIGNS rhs:IDENT SEMI! { println(" Found statement: ",lhs.getText(),":=",rhs.getText() ); } | tt:TOTAL_TIME SEMI! { println(" Found TOTAL_TIME statement: ",tt.getText()); } | SEMI! {println(" Found empty statement"); } ; class ident_l extends Lexer; options { charVocabulary = '\u0000'..'\uFFFE'; // allow all possiable unicodes except -1 == EOF testLiterals = false; // in general do not test literals caseSensitiveLiterals=false; caseSensitive=false; defaultErrorHandler = false; // pass error back to parser k = 2; // two character lookahead for // versus /* } tokens { TOTAL_TIME = "\u5408\u8A08\u6642\u9593"; // total_time } // an identifier. Note that testLiterals is set to true! This means // that after we match the rule, we look in the literals table to see // if it's a literal or really an identifer // NOTE: any char > \u0080 can start an Ident // may need to restrict this more in some cases // \uFFFF is EOF so do not include it here, stop at \uFFFE IDENT options {testLiterals=true; paraphrase = "an identifier";} : ('a'..'z'|'_'|'$'|'\u0080'..'\uFFFE') ('a'..'z'|'_'|'0'..'9'|'$'|'\u0080'..'\uFFFE')* ; ASSIGNS options {paraphrase = ":=";} : ":=" ; SEMI options {paraphrase = ";";} : ';'; // white space is skipped by the parser WS : ( ' ' | '\t' | '\r'('\n')? {self.newline();} | '\n' {self.newline();} ) {$setType(Token.SKIP);} // way to set token type ; antlr-2.7.7/examples/python/unicode.IDENTs/ident.in0000755000175000017500000000013610522211616022051 0ustar twernertwernerEnglish := word; Japanise := 和英辞典【わえいじてん】; テスティング := testing; 合計時間; antlr-2.7.7/examples/python/unicode.IDENTs/ident.py0000755000175000017500000000220610522211616022073 0ustar twernertwernerimport sys import antlr import codecs import ident_l,ident_p def warn(msg): print >>sys.stderr,"warning:",msg sys.stderr.flush() def error(msg): print >>sys.stderr,"error:",msg sys.stderr.flush() sys.exit(1) ### Unicode handling depends very much on whether ### your terminal can handle (print) unicode chars. ### To be sure about it, just create a non ASCII ### letter and try to print it. If that is not going ### to work, we create an alternative method which ### maps non printable chars to '?'. c = u"\N{LATIN SMALL LETTER O WITH ACUTE}" try: print c except: warn("terminal can't display unicode chars.") sys.stderr.flush() ## I'm just going to redefine 'unicode' to return ## a ASCII string. def unicode(x): return x.__str__().encode("ascii","replace") ### Now for the input. This should ideally be done ### in the lexer .. ### replace stdin with a wrapper that spits out ### unicode chars. try: sys.stdin = codecs.lookup('Shift-JIS')[-2](sys.stdin) except: warn("Japanese codecs required - please install.") sys.exit(0) L = ident_l.Lexer() P = ident_p.Parser(L) P.program() antlr-2.7.7/examples/python/preserveWhiteSpace/0000755000175000017500000000000010522211616021550 5ustar twernertwernerantlr-2.7.7/examples/python/preserveWhiteSpace/keepws.py0000644000175000017500000000522310522211616023422 0ustar twernertwernerimport sys import antlr class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k); self.visit1(s); self.printf(")") def visit(self,node): self.visit1(node); self.printf("\n") stream = None def setstream(st): import keepws keepws.stream = st def getstream(): assert stream return stream ### referenced by treewalker def write(*args): import sys sys.stdout.write(*args) sys.stdout.flush() ### walk list of hidden tokens in order, printing them out def dumpHidden(t): assert stream while t: write(t.getText()) t = stream.getHiddenAfter(t) def pr(p): write(p.getText()) dumpHidden(p.getHiddenAfter()) def main(): import keepws_l import keepws_p import keepws_w L = keepws_l.Lexer() ### change token class L.setTokenObjectClass(antlr.CommonHiddenStreamToken) ### create new token stream - referenced by parser ### global stream st = antlr.TokenStreamHiddenTokenFilter(L); st.hide(keepws_p.WS); st.hide(keepws_p.SL_COMMENT); setstream(st) ### create parser with my stream P = keepws_p.Parser(st) P.setFilename(L.getFilename()) ### use this kind of AST nodes P.setASTNodeClass(antlr.CommonASTWithHiddenTokens) ### Parse the input expression try: P.slist() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(1) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" W = keepws_w.Walker() W.slist(ast) print "Ast tree walked without problems." if __name__ == "__main__": main() antlr-2.7.7/examples/python/preserveWhiteSpace/keepws.g0000644000175000017500000000477410522211616023232 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ header { import keepws } options { language=Python; } class keepws_p extends Parser; options { buildAST = true; k=2; } tokens { CALL; // define imaginary token CALL } slist : ( stat )+ ; stat: LBRACE^ (stat)+ RBRACE | "if"^ expr "then" stat ("else" stat)? | ID ASSIGN^ expr SEMI | call ; expr : mexpr (PLUS^ mexpr)* ; mexpr : atom (STAR^ atom)* ; atom: INT | ID ; call: ID LPAREN (expr)? RPAREN SEMI {#call = #(#[CALL,"CALL"], #call);} ; class keepws_l extends Lexer; options { charVocabulary = '\3'..'\377'; } WS : (' ' | '\t' | ('\n'|'\r'('\n')?) {$newline;} )+ ; // Single-line comments SL_COMMENT : "//" (~('\n'|'\r'))* ('\n'|'\r'('\n')?) {$newline;} ; LBRACE: '{' ; RBRACE: '}' ; LPAREN: '(' ; RPAREN: ')' ; STAR: '*' ; PLUS: '+' ; SEMI: ';' ; ASSIGN : '=' ; protected DIGIT : '0'..'9' ; INT : (DIGIT)+ ; ID : ('a'..'z')+ ; class keepws_w extends TreeParser; slist : {keepws.dumpHidden(keepws.getstream().getInitialHiddenToken());} (stat)+ ; stat: #(LBRACE {keepws.pr(#LBRACE);} (stat)+ RBRACE {keepws.pr(#RBRACE);}) | #(i:"if" {keepws.pr(i);} expr t:"then" {keepws.pr(t);} stat (e:"else" {keepws.pr(e);} stat)?) | #(ASSIGN ID {keepws.pr(#ID); keepws.pr(#ASSIGN);} expr SEMI {keepws.pr(#SEMI);} ) | call ; expr : #(PLUS expr {keepws.pr(#PLUS);} expr) | #(STAR expr {keepws.pr(#STAR);} expr) | INT {keepws.pr(#INT);} | ID {keepws.pr(#ID);} ; call: { self.callDumpInstrumentation(#call); } #(CALL ID {keepws.pr(#ID);} LPAREN {keepws.pr(#LPAREN);} (expr)? RPAREN {keepws.pr(#RPAREN);} SEMI { keepws.write(#SEMI.getText()) keepws.write("}") keepws.dumpHidden(#SEMI.getHiddenAfter()) } ) ; /** Dump instrumentation for a call statement. * The reference to rule expr prints out the arg * and then at the end of this rule, we close the * generated called to dbg.invoke(). */ callDumpInstrumentation : #(CALL id:ID {keepws.write("{dbg.invoke(\""+id.getText()+"\", \"");} LPAREN (e:expr)? RPAREN SEMI {keepws.write("\"); ");} ) ; antlr-2.7.7/examples/python/preserveWhiteSpace/Makefile.in0000644000175000017500000000365510522211616023626 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/preserveWhiteSpace ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/keepws.g \ $(eol) g_py_FILES = \ keepws_l.py \ keepws_p.py \ keepws_w.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ keepws.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) keepws_l.py < $(_srcdir)/keepws.in ;\ $(python) keepws.py < $(_srcdir)/keepws.in ;\ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup keepws clean :: @@ECHO@ cleaning keepws ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/preserveWhiteSpace/keepws.in0000644000175000017500000000020410522211616023372 0ustar twernertwerner// start comment // another comment a = 2; g(); { a=3*4; } if 3 then f(4 + b); else { b =a; // assign } // final antlr-2.7.7/examples/python/Makefile.in0000644000175000017500000000062010522211616020003 0ustar twernertwerner## This file is part of PyANTLR. See LICENSE.txt for license ## details..........Copyright (C) Wolfgang Haefelinger, 2004. ## subdir=examples/python @stdvars@ SUBDIRS_NOT = inherit.tinyc ### Couple of examples to be run and tested. Almost all of ### them are standard ANTLR 2.7.5 examples - just modified ### to have Python code instead Java and some minor changes. @stdmake@ @stddeps@ antlr-2.7.7/examples/python/unicode/0000755000175000017500000000000010522211616017366 5ustar twernertwernerantlr-2.7.7/examples/python/unicode/Makefile.in0000644000175000017500000000352610522211616021441 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/unicode ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/unicode.g \ $(eol) g_py_FILES = \ unicode_l.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ unicode.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) unicode.py < $(_srcdir)/unicode.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup unicode clean :: @@ECHO@ cleaning unicode ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/unicode/unicode.py0000644000175000017500000000216210522211616021367 0ustar twernertwernerimport sys import antlr import codecs import unicode_l def warn(msg): print >>sys.stderr,"warning:",msg sys.stderr.flush() def error(msg): print >>sys.stderr,"error:",msg sys.stderr.flush() ### Unicode handling depends very much on whether ### your terminal can handle (print) unicode chars. ### To be sure about it, just create a non ASCII ### letter and try to print it. If that is not going ### to work, we create an alternative method which ### maps non printable chars to '?'. c = u"\N{LATIN SMALL LETTER O WITH ACUTE}" try: print c except UnicodeEncodeError,e: warn("terminal can't display unicode chars.") sys.stderr.flush() ## I'm just going to redefine 'unicode' to return ## a ASCII string. def unicode(x): return x.__str__().encode("ascii","replace") ### Now for the input. This should ideally be done ### in the lexer .. ### replace stdin with a wrapper that spits out ### unicode chars. sys.stdin = codecs.lookup('latin1')[-2](sys.stdin) try: for token in unicode_l.Lexer(): print unicode(token) except antlr.TokenStreamException, e: error(str(e)) antlr-2.7.7/examples/python/unicode/unicode.g0000644000175000017500000000150310522211616021163 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language = Python; } class unicode_l extends Lexer; options { // Allow any char but \uFFFF (16 bit -1) charVocabulary='\u0000'..'\uFFFE'; } { done = False def uponEOF(self): done=True def another(self): pass } ID : ID_START_LETTER ( ID_LETTER )* ; WS : (' '|'\n') {$skip} ; protected ID_START_LETTER : '$' | '_' | 'a'..'z' | '\u0080'..'\ufffe' ; protected ID_LETTER : ID_START_LETTER { // got a LETTER_ID // handle it // whatever } | '0'..'9' ; // ANTLR should actually allow this here. Would enable me to write // something like: //{ // if __name__ == '__main__' : // ## test lexer // //} antlr-2.7.7/examples/python/unicode/unicode.in0000644000175000017500000000005310522211616021342 0ustar twernertwerner some test letters cool鉞ニヤ ハ antlr-2.7.7/examples/python/calc/0000755000175000017500000000000010522211616016642 5ustar twernertwernerantlr-2.7.7/examples/python/calc/calc.g0000644000175000017500000001310610522211616017715 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ header { /* import calc module - need to import my AST Nodes */ import calc // comment pass // another comment } /* no javadoc comments on file level except for classes and rules */ options { language="Python"; //language="Java"; //<- should at least generate a warning } /** go ahead with jdoc comments here ..*/ class CalcParser extends Parser("calc.Parser"); options { k=3; buildAST = true; // contrary to original is this going to change the default // node type from CommonAST to calc.Tnode. ASTLabelType = "calc.TNode"; exportVocab=Calc; } tokens { BODY; STMT; EXPR; TOK01; UNARY_MINUS; UNARY_PLUS; } { /* a sample function */ def hello(self): // just print hello .. print "hello, world" // end of sample function } body : "begin"! (expr ";"!)+ "end"! ; id : VALID | TYPID | CLSID ; expr : expr1 { ## = #(#[EXPR,"expr"],##); } ; expr1 : expr2 ((MINUS^|PLUS^) expr2)* ; expr2 : expr3 ((MULT^|DIV^) expr3)* ; expr3 : ( p:PLUS { #p.setType(UNARY_PLUS) } | m:MINUS { #m.setType(UNARY_MINUS) } )* expr4 ; expr4 : NUMBER | LPAREN^ expr1 RPAREN! ; /** My tree parser ..*/ class CalcWalker extends TreeParser("calc.Walker"); options { //ASTLabelType = "TNode"; } { /* sample function */ def hello(self): print "Hello, world" } body returns [s = 0] { r = 0 } : ( #(e:EXPR { // comment // // // s = self.expr(e) r = s + r // comment -> you should see print e.toStringTree() /* comment */ print e.toStringTree(), /* a comment spanning three lines. */ /* some comment that spans more than one line */ print "=>",s // comments work fine but there some limitations when having // C style comments (/* .. */). In Python a comment ends with // the end of line. Therefore I need to force a '\n' on // encountering '*/'. That works fine usually but be aware // something like // // x /* comment /* = /* comment /* 1 // // is legal in C/C++/Java but would end up in Python as // // x // # comment // = // # comment // 1 // // which is of course not valid. } ) )* ; expr returns [r = 0] : #(EXPR r=expr0()) ; expr0 returns [a = 0] { b = 0 } : UNARY_MINUS a=expr0() { a=-a } | UNARY_PLUS a=expr0() | j:NUMBER { a = self.tofloat(j) } | #(PLUS a=expr0() b=expr0()) { a = a + b } | #(MINUS a=expr0() b=expr0()) { a = a - b } | #(MULT a=expr0() b=expr0()) { a = a * b } | #(DIV a=expr0() b=expr0()) { a = a / b } | #(LPAREN a=expr0()) ; /** a javdoc comment */ nullp { i = 1; if #nullp == None : return print(" error in parser tree .. ") print(#nullp.toStringTree()) if i > 0: return } : #(INT INT) { } ; /** standard lexer - not of further interest here */ class CalcLexer extends Lexer; options { // className = "Scanner"; } { /* sample function */ def hello(self): print "Hello, world" } /** ws is supposed to be skipped as usual. * note that you need to write self.newline(). * You could also write $newline instead. Trailing * ';' are not harmful. */ WS : ( ' ' | '\t' | '\n' { $newline } | "\r\n" { $newline } | '\r' { $newline } ) { $skip } ; LPAREN : '(' ; RPAREN : ')' ; MULT : '*' ; DIV : '/' ; PLUS : '+' ; MINUS : '-' ; SEMI : ';' ; AND : '&' ; OR : '|' ; NOT : '!' ; EQ : '=' ; protected DIGIT : '0'..'9' ; protected INT : (DIGIT)+ ; NUMBER : INT ("." INT)? ; protected LOWER : 'a'..'z' ; protected UPPER : 'A'..'Z' ; protected LETTER : UPPER | LOWER ; ID : LOWER (LETTER|DIGIT|'-')* { $setType(VALID) } | (UPPER (UPPER|DIGIT|'-')*) => UPPER (UPPER|DIGIT|'-')* { $setType(CLSID) } | (UPPER (UPPER|LOWER|DIGIT|'-')*) { buffer = $getText $setType(TYPID) } ; antlr-2.7.7/examples/python/calc/calc.py0000644000175000017500000001116510522211616020122 0ustar twernertwerner## This file is part of PyANTLR. See LICENSE.txt for license ## details..........Copyright (C) Wolfgang Haefelinger, 2004. ## ## $Id$ import sys import antlr class Parser(antlr.LLkParser): def __init__(self,*args): super(Parser,self).__init__(*args) self.num_err = 0 self.num_wrn = 0 def reportError(self,err): self.num_err += 1 super(Parser,self).reportError(err) def reportWarning(self,err): self.num_wrn += 1 super(Parser,self).reportWarning(err) class Walker(antlr.TreeParser): def __init__(self,*args): super(Walker,self).__init__(*args) self.depth = 0 def tofloat(self,ast): s = ast.getText() return float(s) def howmanysiblings(self,ast): if ast == None: return -1 r = 0 ast = ast.getNextSibling() while(ast != None) : r += 1 ast = ast.getNextSibling() return r def traceIn(self,s,ast) : self.depth += 1 print ">" * self.depth print " " + s + "( `" if (ast==None): print("()") else: print(ast.toStringList()) print " ') | siblings:",self.howmanysiblings(ast) def traceOut(self,s,ast): self.depth += 1 print(">" * self.depth) print(" " + s + "( `") if (ast==None): print("()") else: print(ast.toStringList()) print(" ')") self.depth -= 1 class TNode(antlr.CommonAST): def __init__(self,token=None): antlr.CommonAST.__init__(self,token) ### change printing style def toStringTree(self): ts = "" kid = self.getFirstChild() if kid: ts += "{" ts += " " + self.toString() if kid: ts += kid.toStringList() ts += "}" return ts class BodyNode(antlr.CommonAST): def __init__(self,token=None): antlr.CommonAST.__init__(self,token) ### change printing style def toStringTree(self): ts = "BODY: " kid = self.getFirstChild() if kid: ts += "{" ts += " " + self.toString() if kid: ts += kid.toStringList() ts += "}" return ts class ExprNode(antlr.CommonAST): def __init__(self,token=None): antlr.CommonAST.__init__(self,token) ### change printing style def toStringTree(self): ts = "EXPR: " kid = self.getFirstChild() if kid: ts += "{" ts += " " + self.toString() if kid: ts += kid.toStringList() ts += "}" return ts class NumberNode(antlr.CommonAST): def __init__(self,token=None): antlr.CommonAST.__init__(self,token) ### change printing style def toStringTree(self): ts = "NUMBER: " kid = self.getFirstChild() if kid: ts += "{" ts += " " + self.toString() if kid: ts += kid.toStringList() ts += "}" return ts class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k); self.visit1(s); self.printf(")") def visit(self,node): self.visit1(node); self.printf("\n") def main(): import CalcLexer import CalcParser import CalcWalker L = CalcLexer.Lexer() P = CalcParser.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression P.body() if(P.num_err>0): print "*** " + P.num_err + " error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(1) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" W = CalcWalker.Walker() s = W.body(ast) print "*sum =>",s if __name__ == "__main__": main() antlr-2.7.7/examples/python/calc/Makefile.in0000644000175000017500000000403510522211616020711 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/calc ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/calc.g \ $(eol) g_py_FILES = \ CalcLexer.py \ CalcParser.py \ CalcWalker.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test2 test1_deps = \ calc.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test2_deps = \ $(test1_deps) \ calc.py \ $(eol) test1_cmd = \ $(python) CalcLexer.py < $(_srcdir)/calc.in \ $(eol) test2_cmd = \ $(python) calc.py < $(_srcdir)/calc.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) test2 : $(test2_deps) @ $(test2_cmd) calc : test $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup calc clean :: @@ECHO@ cleaning calc ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/calc/calc.in0000644000175000017500000000015210522211616020072 0ustar twernertwernerbegin 1+2; 4+5; --1; ++1; +-1; -+1; -1; +1; -0; +0; 1+-2; -1+--2; 1+2*3; (1+2)*3; end antlr-2.7.7/examples/python/lexerTester/0000755000175000017500000000000010522211616020246 5ustar twernertwernerantlr-2.7.7/examples/python/lexerTester/LT5test.py0000644000175000017500000000072210522211616022125 0ustar twernertwerner import sys import antlr import SimpleLexer5 import LexerTester if __name__ == '__main__': try: lexer = SimpleLexer5.Lexer(sys.stdin) parser = LexerTester.Parser(lexer) parser.setFilename('') # Parse the input expression parser.source_text() except antlr.TokenStreamException, e: sys.stderr.write('exception: ' + str(e) + '\n') except antlr.RecognitionException, e: sys.stderr.write('exception: ' + str(e) + '\n') antlr-2.7.7/examples/python/lexerTester/test.in0000644000175000017500000000000510522211616021550 0ustar twernertwernerAAA antlr-2.7.7/examples/python/lexerTester/Makefile.in0000644000175000017500000000450410522211616022316 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/lexerTester ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/lexertester.g \ $(_srcdir)/simplelexer1.g \ $(_srcdir)/simplelexer2.g \ $(_srcdir)/simplelexer3.g \ $(_srcdir)/simplelexer4.g \ $(_srcdir)/simplelexer5.g \ $(eol) g_py_FILES = \ LexerTester.py \ SimpleLexer1.py \ SimpleLexer2.py \ SimpleLexer3.py \ SimpleLexer4.py \ SimpleLexer5.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ LT1test.py \ LT2test.py \ LT3test.py \ LT4test.py \ LT5test.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) LT1test.py < $(_srcdir)/test.in; \ $(python) LT2test.py < $(_srcdir)/test.in; \ $(python) LT3test.py < $(_srcdir)/test.in; \ $(python) LT4test.py < $(_srcdir)/test.in; \ $(python) LT5test.py < $(_srcdir)/test.in; \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup filter clean :: @@ECHO@ cleaning filter ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/lexerTester/LT3test.py0000644000175000017500000000072210522211616022123 0ustar twernertwerner import sys import antlr import SimpleLexer3 import LexerTester if __name__ == '__main__': try: lexer = SimpleLexer3.Lexer(sys.stdin) parser = LexerTester.Parser(lexer) parser.setFilename('') # Parse the input expression parser.source_text() except antlr.TokenStreamException, e: sys.stderr.write('exception: ' + str(e) + '\n') except antlr.RecognitionException, e: sys.stderr.write('exception: ' + str(e) + '\n') antlr-2.7.7/examples/python/lexerTester/LT2test.py0000644000175000017500000000072210522211616022122 0ustar twernertwerner import sys import antlr import SimpleLexer2 import LexerTester if __name__ == '__main__': try: lexer = SimpleLexer2.Lexer(sys.stdin) parser = LexerTester.Parser(lexer) parser.setFilename('') # Parse the input expression parser.source_text() except antlr.TokenStreamException, e: sys.stderr.write('exception: ' + str(e) + '\n') except antlr.RecognitionException, e: sys.stderr.write('exception: ' + str(e) + '\n') antlr-2.7.7/examples/python/lexerTester/simplelexer2.g0000644000175000017500000000070610522211616023034 0ustar twernertwerner// This is -*- ANTLR -*- code header { import sys } options { language = "Python"; } //---------------------------------------------------------------------------- // The lexer //---------------------------------------------------------------------------- class SimpleLexer2 extends Lexer; options { k = 1; // A lookahead depth of 1 codeGenDebug = true; } A : ( 'A' ) ; //NL : ( '\r' ) { self.newline() } // ; antlr-2.7.7/examples/python/lexerTester/shiplist0000644000175000017500000000023110522211616022024 0ustar twernertwernerlexertester.g simplelexer1.g simplelexer2.g simplelexer3.g simplelexer4.g simplelexer5.g LT1test.py LT2test.py LT3test.py LT4test.py LT5test.py test1.in antlr-2.7.7/examples/python/lexerTester/simplelexer1.g0000644000175000017500000000067610522211616023041 0ustar twernertwerner// This is -*- ANTLR -*- code header { import sys } options { language = "Python"; } //---------------------------------------------------------------------------- // The lexer //---------------------------------------------------------------------------- class SimpleLexer1 extends Lexer; options { k = 1; // A lookahead depth of 1 codeGenDebug = true; } A : 'A' ; //NL : '\r' { self.newline() } // ; antlr-2.7.7/examples/python/lexerTester/LT4test.py0000644000175000017500000000072210522211616022124 0ustar twernertwerner import sys import antlr import SimpleLexer4 import LexerTester if __name__ == '__main__': try: lexer = SimpleLexer4.Lexer(sys.stdin) parser = LexerTester.Parser(lexer) parser.setFilename('') # Parse the input expression parser.source_text() except antlr.TokenStreamException, e: sys.stderr.write('exception: ' + str(e) + '\n') except antlr.RecognitionException, e: sys.stderr.write('exception: ' + str(e) + '\n') antlr-2.7.7/examples/python/lexerTester/simplelexer3.g0000644000175000017500000000065610522211616023041 0ustar twernertwerner// This is -*- ANTLR -*- code header { import sys } options { language = "Python"; } //---------------------------------------------------------------------------- // The lexer //---------------------------------------------------------------------------- class SimpleLexer3 extends Lexer; options { k = 1; // A lookahead depth of 1 codeGenDebug = true; } A : 'A' { print "found an 'A'!" } ; antlr-2.7.7/examples/python/lexerTester/LT1test.py0000644000175000017500000000072210522211616022121 0ustar twernertwerner import sys import antlr import SimpleLexer1 import LexerTester if __name__ == '__main__': try: lexer = SimpleLexer1.Lexer(sys.stdin) parser = LexerTester.Parser(lexer) parser.setFilename('') # Parse the input expression parser.source_text() except antlr.TokenStreamException, e: sys.stderr.write('exception: ' + str(e) + '\n') except antlr.RecognitionException, e: sys.stderr.write('exception: ' + str(e) + '\n') antlr-2.7.7/examples/python/lexerTester/simplelexer4.g0000644000175000017500000000066210522211616023037 0ustar twernertwerner// This is -*- ANTLR -*- code header { import sys } options { language = "Python"; } //---------------------------------------------------------------------------- // The lexer //---------------------------------------------------------------------------- class SimpleLexer4 extends Lexer; options { k = 1; // A lookahead depth of 1 codeGenDebug = true; } A : ( 'A' ) { print "found an 'A'!" } ; antlr-2.7.7/examples/python/lexerTester/simplelexer5.g0000644000175000017500000000066310522211616023041 0ustar twernertwerner// This is -*- ANTLR -*- code header { import sys } options { language = "Python"; } //---------------------------------------------------------------------------- // The lexer //---------------------------------------------------------------------------- class SimpleLexer5 extends Lexer; options { k = 1; // A lookahead depth of 1 codeGenDebug = true; } A : ( 'A' { print "found an 'A'!" } ) ; antlr-2.7.7/examples/python/lexerTester/lexertester.g0000644000175000017500000000144010522211616022763 0ustar twernertwerner// This is -*- ANTLR -*- code header { import sys } options { language = "Python"; } //---------------------------------------------------------------------------- // The lexertester parser //---------------------------------------------------------------------------- class LexerTester extends Parser; options { k = 1; // A lookahead depth of 1 buildAST = false; // no AST required } // This is a simple rule that can be used to test the Lexer. It will output // every token it sees using a complete description (including file, line // and column info). source_text : ( token:. { sys.stdout.write("lexertester: " + \ self.getFilename() + ':' + \ str(token) + '\n') } )* ; antlr-2.7.7/examples/python/filter/0000755000175000017500000000000010522211616017225 5ustar twernertwernerantlr-2.7.7/examples/python/filter/Makefile.in0000644000175000017500000000342310522211616021274 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/filter ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/filter.g \ $(eol) g_py_FILES = \ filter_l.py \ $(eol) compile : $(g_py_FILES) test :: test1 test1_deps = \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) filter_l.py < $(_srcdir)/filter.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup filter clean :: @@ECHO@ cleaning filter ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/filter/filter.g0000644000175000017500000000136010522211616020662 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ header "Lexer.__main__" { // main - create and run lexer from stdin if __name__ == "__main__": import sys import antlr import filter_l // create lexer - shall read from stdin L = filter_l.Lexer() try: token = L.nextToken() while not token.isEOF(): print token token = L.nextToken() except antlr.TokenStreamException, e: print "error: exception caught while lexing:", e // end of main } options { language="Python"; } class filter_l extends Lexer; options { k=2; filter=true; } P : "

" ; BR: "
" ; antlr-2.7.7/examples/python/filter/filter.in0000644000175000017500000000266210522211616021050 0ustar twernertwerner A test file

ANTLR 2.xx Meta-Language

ANTLR 2.0 accepts three types of grammar specifications -- parsers, lexers, and tree-parsers (also called tree-walkers). Because ANTLR 2.0 uses LL(k) analysis for all three grammar variants, the grammar specifications are similar, and the generated lexers and parsers behave similarly.

Note: in this document, the word "parser" usually includes tree-parsers as well as token stream parsers, except where noted.

Meta-Language Vocabulary


Whitespace. Spaces, tabs, and newlines are separators in that they can separate ANTLR vocabulary

Download ANTLR 2.4.0.

ANTLR 2.4.0 release notes

ANTLR Meta-Language

antlr-2.7.7/examples/python/multiParser/0000755000175000017500000000000010522211616020247 5ustar twernertwernerantlr-2.7.7/examples/python/multiParser/multiparser.in0000644000175000017500000000001510522211616023142 0ustar twernertwernerA B C C B A antlr-2.7.7/examples/python/multiParser/parser2.g0000644000175000017500000000051510522211616021776 0ustar twernertwerner/* This grammar demonstrates the use of two parsers sharing a token * vocabulary with a single lexer. */ header { # empty header } options { language="Python"; } class SimpleParser2 extends Parser; options { k=3; importVocab=Simple; } simple : (x)+; x : (a | b); a : C B A; b : D B A; antlr-2.7.7/examples/python/multiParser/Makefile.in0000644000175000017500000000364210522211616022321 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/filter ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ## two abbrevs to make shorten things _srcdir = @abs_top_srcdir@/examples/python/filter thisdir = @abs_this_builddir@/examples/python/filter ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/filter.g \ $(eol) g_py_FILES = \ filter_l.py \ $(eol) compile : $(g_py_FILES) test :: test1 test1_deps = \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) filter_l.py < $(_srcdir)/filter.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup filter clean :: @@ECHO@ cleaning filter ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/multiParser/parser1.g0000644000175000017500000000053210522211616021774 0ustar twernertwerner/* This grammar demonstrates the use of two parsers sharing a token * vocabulary with a single lexer. */ header { # empty header } options { language="Python"; } class SimpleParser extends Parser; options { k=1; importVocab=Simple; } simple: ( x )+ ; x: (a) => a | b ; a : A B C; b : A B D; antlr-2.7.7/examples/python/multiParser/lexer.g0000644000175000017500000000066610522211616021546 0ustar twernertwerner/* This grammar demonstrates the use of two parsers sharing a token * vocabulary with a single lexer. */ header { # empty header } options { language="Python"; } class SimpleLexer extends Lexer; options { exportVocab=Simple; } WS_ : (' ' | '\t' { self.tab() } | '\n' { self.newline() } | '\r') { $setType(Token.SKIP) } ; A : 'a' | 'A' ; B : 'b' | 'B' ; C : 'c' | 'C' ; D : 'd' | 'D' ; antlr-2.7.7/examples/python/multiParser/multiparser.py0000644000175000017500000000236410522211616023175 0ustar twernertwerner import sys import traceback import antlr import SimpleLexer import SimpleParser import SimpleParser2 lexer = None parser = None parser2 = None class Main: global lexer, parser, parser2 def __init__(self): try: lexer = SimpleLexer.Lexer(sys.stdin); # Invoke first parser sys.stdout.write("first parser" + '\n') parser = SimpleParser.Parser(lexer) parser.simple(); # Now we need to get the inputState from the first parser # this includes data about guessing and stuff like it. # If we don't do this and create the second parser # with just the lexer object we might (doh! will!) miss tokens # read for lookahead tests. self.inputstate = parser.getInputState() # When first parser runs out, invoke secnond parser sys.stdout.write("second parser" + '\n') parser2 = SimpleParser2.Parser(self.inputstate) parser2.simple() except antlr.ANTLRException, e: sys.stderr.write("exception: " + str(e) + '\n') #apply(traceback.print_exception, sys.exc_info()) except Exception, e: sys.stderr.write("exception: " + str(e) + '\n') #apply(traceback.print_exception, sys.exc_info()) if __name__ == '__main__': Main() antlr-2.7.7/examples/python/linkChecker/0000755000175000017500000000000010522211616020162 5ustar twernertwernerantlr-2.7.7/examples/python/linkChecker/LinkChecker.py0000644000175000017500000001711710522211616022725 0ustar twernertwerner import sys import os, os.path import string import traceback import antlr version = sys.version.split()[0] if version < '2.2.1': False = 0 if version < '2.3': True = not False import LinkExtractor class LinkListener: def hrefReference(self, target, line): raise NotImplementedError def imageReference(self, imageFileName, line): raise NotImplementedError class LinkChecker(LinkListener): ### Record which files we have seen so that we don't get into an # infinite loop and for efficiency. The absolute path is stored here # to uniquely identify the files. That is, a file can be arrived # at from many different locations such as help.html from . # and ../help.html from a directory below. # # This table is shared by all instances of LinkChecker. # visited = {} ### A table of the images visited by any document; a cache of correctness imgVisited = {} recursionDepth = 0 separator = "/" # not OS sensitive in HTML localSeparator = None def __init__(self, document): self.document = document self.directory = "." LinkChecker.localSeparator = os.sep def checkLinkRules(self, fName, line): # Check case of path (check for UNIX compatibility on a PC)! offensive = LinkChecker.offensivePathMember(self.directory + separator + fName) if offensive: file_ = '' try: f = file(offensive) file_ = os.path.normcase(offensive) self.error("Case mismatch in reference " + fName + ":" + os.sep + "\treal name is " + os.path.basename(file_) + os.sep + "\treal absolute path is " + file_, line); return False except Exception, e: self.error("internal error: cannot get canonical name for " + offensive, line); if LinkChecker.pathIsAbsolute(fName): self.error("Reference to " + fName + " with absolute path", line); return False; return True def doCheck(self): if self.document[-5:] != ".html": return # prevent infinite recursion to this file if LinkChecker.isVisited(self.document): return LinkChecker.visit(self.document) LinkChecker.recursionDepth += 1 f = file(self.document) lexer = LinkExtractor.Lexer(f) lexer.addLinkListener(self) # this will parse whole file since all tokens are skipped lexer.nextToken() LinkChecker.recursionDepth -= 1 def error(self, err, line): d = "" try: # f = file(self.document) d = os.path.normcase(self.document) except Exception, e: sys.stderr.write("internal error: cannot find file that has error\n") sys.exit(0) sys.stderr.write(d + ":" + str(line) + ": error: " + err + '\n') def pathIsAbsolute(path): return path[0] == '/' or path[1] == ':' pathIsAbsolute = staticmethod(pathIsAbsolute) def fileProtocolURL(target): return target.find("://") == -1 and \ not target[:7] == "mailto:" and \ not target[:5] == "news:" fileProtocolURL = staticmethod(fileProtocolURL) def getParent(path): return os.path.join(os.path.split(path)[:-1]) getParent = staticmethod(getParent) def hrefReference(self, target, line): sys.stdout.write(self.document + ":" + str(line) + ": href to " + target + '\n') # recursively check the target document unless non-file ref if LinkChecker.fileProtocolURL(target): # prune off any #name reference on end of file pound = target.find('#') path = target if pound != -1: path = target[:pound] # rip off #name on end, leave file if not len(path): return # ref to name in this file # first check existence on disk f = self.directory + os.sep + path if not os.path.exists(f): self.error("Reference to missing file " + path, line) return # check the case self.checkLinkRules(path, line); try: # Link is ok, now follow the link chk = LinkChecker.Lexer(self.directory + os.sep + path) chk.doCheck() except Exception, e: self.error("Document does not exist: " + target, line) def imageLinkIsOk(file_): # f = file(file_) f = os.path.normcase(file_) b = f in LinkChecker.imgVisited.keys() if b: return True return False imageLinkIsOk = staticmethod(imageLinkIsOk) def imageReference(self, imageFileName, line): # first check if we have seen this exact file try: if LinkChecker.imageLinkIsOk(self.directory + os.sep + imageFileName): return f = self.directory + os.sep + imageFileName if not os.path.exists(f): self.error("Reference to missing file " + imageFileName, line); return; if self.checkLinkRules(imageFileName, line): LinkChecker.visitImage(self.directory + os.sep + imageFileName) except Exception, e: sys.stderr.write("internal error: " + str(e) + '\n') ### # Given a path to a file or dir, is the case of the reference # the same as the actual path on the disk? This is only # meaningful on a PC which is case-insensitive (not a real # file system). # # Returns null if there is nothing offensive and the file exists. # Returns offending file/dir if it does not exist or # it has there is a case mismatch for it. The last file is checked # first followed by the parent directory, recursively, all the way # to the absolute or relative path root in that String; i.e., we parse # from right to left. # # Because the File object won't actually go get the real filename # from the disk so we can compare, we must get a directory listing # of the directory and then look for the referenced file or dir. # For example, for "./images/logo.gif" we would check "./images" dir # listing for "logo.gif" with the appropriate case and then check # directory "." for a dir called images with the right case. When # no parent exists, we can stop looking for case problems. def offensivePathMember(fName): sys.stdout.write("caseMismatch(" + fName + ")\n"); # have we reached the root? (stopping condition) if not fName or not LinkChecker.getParent(fName): return None parent = LinkChecker.getParent(fName) fName = os.path.basename(fName) # f = file(parent) parentFiles = os.path.split(parent) sys.stdout.write("checking dir " + parent + " for " + fName + '\n') # handle weird stuff like "c:/doc/../foo"; skip this parent dir if fName == "..": return LinkChecker.offensivePathMember(LinkChecker.getParent(parent)) for i in range(len(parentFiles)): sys.stdout.write("is it " + parentFiles[i] + "?\n") if string.lower(parentFiles[i]) == fName: if not parentFiles[i] == fName: sys.stdout.write("case mismatch " + fName + " in " + parent + '\n') return parent + LinkChecker.separator + fName # found a match, verify parent is ok return LinkChecker.offensivePathMember(parent) sys.stdout.write("can't find " + fName + " in " + parent + '\n') return parent + LinkChecker.separator + fName offensivePathMember = staticmethod(offensivePathMember) def visit(file_): # f = file(file_) f = os.path.normcase(file_) LinkChecker.visited[f] = True visit = staticmethod(visit) def isVisited(file_): # f = file(file_) f = os.path.normcase(file_) return f in LinkChecker.visited.keys() isVisited = staticmethod(isVisited) def visitImage(file_): # f = file(file_) f = os.path.normcase(file_) sys.stdout.write("caching image " + f + '\n') LinkChecker.imgVisited[f] = True visitImage = staticmethod(visitImage) class Main: def __init__(self): chk = LinkChecker(sys.argv[1]) try: chk.doCheck() except Exception, e: sys.stderr.write("Exception: " + str(e) + '\n'); apply(traceback.print_exception, sys.exc_info()) if __name__ == "__main__": Main() antlr-2.7.7/examples/python/linkChecker/Makefile.in0000644000175000017500000000354010522211616022231 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/linkChecker ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/links.g \ $(eol) g_py_FILES = \ LinkExtractor.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ LinkChecker.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) LinkChecker.py $(_srcdir)/test.html \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup filter clean :: @@ECHO@ cleaning filter ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/linkChecker/links.g0000644000175000017500000000540710522211616021460 0ustar twernertwerner header { import sys import string } header "LinkExtractor.__init__" { self.listener = None } options { language = "Python"; } /** Parse an entire html file, firing events to a single listener * for each image and href link encountered. All tokens are * defined to skip so the lexer will continue scarfing until EOF. */ class LinkExtractor extends Lexer; options { caseSensitive=false; k=2; filter=SCARF; charVocabulary='\3'..'\177'; } { def addLinkListener(self, listener): self.listener = listener def removeLinkListener(self, listener): self.listener = None def fireImageLinkEvent(self, target, line): self.listener.imageReference(target, line) def fireHREFLinkEvent(self, target, line): self.listener.hrefReference(target, line) /** strip quotes from "..." or '...' strings */ def stripQuotes(src): h = src.find('"') if h == -1: h = src.index("'") t = src.rfind('"') if t == -1: t = src.rindex("'"); if h == -1 or t == -1: return src return src[h+1:t] stripQuotes = staticmethod(stripQuotes) } AHREF : "' { $skip } ; IMG : "' { $skip } ; protected ATTR options { ignore=WS; } : w:WORD '=' ( s:STRING | v:WORD ) { if s: target = self.stripQuotes(s.getText()) else: target = v.getText() if string.lower(w.getText()) == "href": self.fireHREFLinkEvent(target, self.getLine()) elif string.lower(w.getText()) == "src": self.fireImageLinkEvent(target, self.getLine()) } ; /** Match until next whitespace; can be file, int, etc... */ protected WORD: ( options { generateAmbigWarnings=false; } : 'a'..'z' | '0'..'9' | '/' | '.' | '#' | '_' )+ ; protected STRING : '"' (~'"')* '"' | '\'' (~'\'')* '\'' ; protected WS : ( ' ' | '\t' | '\f' | ( "\r\n" // DOS | '\r' // Macintosh | '\n' // Unix (the right way) ) { $newline } ) { $skip } ; protected SCARF : WS // track line numbers while you scarf | . ; antlr-2.7.7/examples/python/linkChecker/test.html0000644000175000017500000000041510522211616022027 0ustar twernertwerner

Hi. Click here for more info.

missing.

Here is an image Here is an image Here is an image antlr-2.7.7/examples/python/transform/0000755000175000017500000000000010522211616017753 5ustar twernertwernerantlr-2.7.7/examples/python/transform/transform.g0000644000175000017500000000221110522211616022132 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language=Python; } class transform_p extends Parser; options { buildAST = true; // uses CommonAST by default ASTLabelType = "antlr.CommonAST"; } expr : mexpr (PLUS^ mexpr)* SEMI! ; mexpr : atom (STAR^ atom)* ; atom: INT ; class transform_l extends Lexer; WS : (' ' | '\t' | '\n' | '\r') { $skip; } ; LPAREN: '(' ; RPAREN: ')' ; STAR: '*' ; PLUS: '+' ; SEMI: ';' ; protected DIGIT : '0'..'9' ; INT : (DIGIT)+ ; class transform_w extends TreeParser; options { buildAST = true; ASTLabelType = "antlr.CommonAST"; } expr:! #(PLUS left:expr right:expr) { if #right.getType()==INT and int(#right.getText()) == 0: #expr = #left elif #left.getType()==INT and int(#left.getText()) ==0: #expr = #right; else: #expr = #(PLUS, left, right); } | #(STAR expr expr) | i:INT ; antlr-2.7.7/examples/python/transform/Makefile.in0000644000175000017500000000370510522211616022025 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/transform ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/transform.g \ $(eol) g_py_FILES = \ transform_l.py \ transform_p.py \ transform_w.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ transform.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) transform_l.py < $(_srcdir)/transform.in ;\ $(python) transform.py < $(_srcdir)/transform.in ;\ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup transform clean :: @@ECHO@ cleaning transform ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/transform/transform.in0000644000175000017500000000001410522211616022311 0ustar twernertwerner0+3+0*5+0; antlr-2.7.7/examples/python/transform/transform.py0000644000175000017500000000356210522211616022346 0ustar twernertwernerimport sys import antlr class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k); self.visit1(s); self.printf(")") def visit(self,node): self.visit1(node); self.printf("\n") def main(): import transform_l import transform_p import transform_w L = transform_l.Lexer() P = transform_p.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression try: P.expr() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(1) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" W = transform_w.Walker(); ### Traverse the tree created by the parser W.expr(ast); ast = W.getAST(); print "List: " + ast.toStringList() if __name__ == "__main__": main() antlr-2.7.7/examples/python/multiLexer/0000755000175000017500000000000010522211616020072 5ustar twernertwernerantlr-2.7.7/examples/python/multiLexer/multilex.tok0000644000175000017500000000005110522211616022450 0ustar twernertwernerCommon JAVADOC_OPEN=4 JAVADOC_CLOSE=5 antlr-2.7.7/examples/python/multiLexer/Makefile.in0000644000175000017500000000412710522211616022143 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/multiLexer ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/multilex_l.g \ $(_srcdir)/multilex_p.g \ $(_srcdir)/javadoc_l.g \ $(_srcdir)/javadoc_p.g \ $(eol) g_py_FILES = \ multilex_l.py \ multilex_p.py \ javadoc_l.py \ javadoc_p.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ && @CP@ $< $@ CommonTokenTypes.txt : $(_srcdir)/multilex.tok @-@RMF@ $@ && @CP@ $< $@ test :: test1 test1_deps = \ multilex.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) multilex.py < $(_srcdir)/multilex.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh $(g_py_FILES) : CommonTokenTypes.txt ### cleanup multilex clean :: @@ECHO@ cleaning multilex ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/multiLexer/multilex.in0000644000175000017500000000014210522211616022262 0ustar twernertwerner/** a javadoc comment * @param foo * @exception IOException * blah blah */ int abc; antlr-2.7.7/examples/python/multiLexer/multilex_p.g0000644000175000017500000000070510522211616022426 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language=Python; } class multilex_p extends Parser; options { importVocab=Java; } input : ( (javadoc)? INT ID SEMI )+ ; javadoc : JAVADOC_OPEN { import javadoc_p jdocparser = javadoc_p.Parser(self.getInputState()) jdocparser.content(); } JAVADOC_CLOSE ; antlr-2.7.7/examples/python/multiLexer/multilex_l.g0000644000175000017500000000113510522211616022420 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language=Python; } class multilex_l extends Lexer; options { k=2; importVocab = Common; exportVocab = Java; } tokens { INT="int"; } JAVADOC_OPEN : "/**" { import multilex ; multilex.selector.push("doclexer");} ; ID : ('a'..'z')+ ; SEMI: ';' ; WS : ( ' ' | '\t' | '\f' // handle newlines | ( "\r\n" // Evil DOS | '\r' // Macintosh | '\n' // Unix (the right way) ) { self.newline(); } ) { $setType(Token.SKIP); } ; antlr-2.7.7/examples/python/multiLexer/javadoc_p.g0000644000175000017500000000074410522211616022175 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language=Python; } class javadoc_p extends Parser; options { importVocab=JavaDoc; } content : ( p:PARAM // includes ID as part of PARAM {print "found: ",p.getText()} | e:EXCEPTION {print "found: ",e.getText()} )* ; antlr-2.7.7/examples/python/multiLexer/multilex.py0000644000175000017500000000440710522211616022314 0ustar twernertwernerimport sys import antlr class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k); self.visit1(s); self.printf(")") def visit(self,node): self.visit1(node); self.printf("\n") ### some global vars referenced by lexer and parser selector = None def getselector(): import multilex assert multilex.selector def check(s): import multilex assert multilex.selector assert multilex.selector == s def main(): import multilex_l import multilex_p import javadoc_l ## make a selector S = antlr.TokenStreamSelector() ## and two lexer L = multilex_l.Lexer() D = javadoc_l.Lexer(L.getInputState()) ## setup selector with lexer, and .. S.addInputStream(L,"main") S.addInputStream(D,"doclexer") S.select("main") ## let parser use selector .. P = multilex_p.Parser(S) P.setFilename(L.getFilename()) import multilex multilex.selector = S check(S) ### Parse the input expression try: P.input() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(0) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" if __name__ == "__main__": main() antlr-2.7.7/examples/python/multiLexer/javadoc_l.g0000644000175000017500000000170010522211616022162 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language=Python; } class javadoc_l extends Lexer; options { k=2; importVocab = Common; exportVocab = JavaDoc; filter=true; } PARAM : "@param" ' ' ID ; EXCEPTION : "@exception" ' ' ID ; protected ID : ('a'..'z'|'A'..'Z')+ ; /** This rule simply prevents JAVADOC_CLOSE from being * called for every '*' in a comment. Calling JAVADOC_CLOSE * will fail for simple '*' and cause an exception, which * is slow. In other words, the grammar will work without * this rule, but is slower. */ STAR: '*' {$setType(Token.SKIP);} ; JAVADOC_CLOSE : "*/" { import multilex; multilex.selector.pop();} ; /** Ignore whitespace inside JavaDoc comments */ NEWLINE : ( "\r\n" // Evil DOS | '\r' // Macintosh | '\n' // Unix (the right way) ) { self.newline(); $setType(Token.SKIP); } ; antlr-2.7.7/examples/python/asn1/0000755000175000017500000000000010522211616016602 5ustar twernertwernerantlr-2.7.7/examples/python/asn1/asn1.py0000644000175000017500000000423710522211616020024 0ustar twernertwerner#! /usr/bin/python -t ## --*- python -*-- import sys import antlr version = sys.version.split()[0] if version < '2.2.1': False = 0 if version < '2.3': True = not False class CharScanner(antlr.CharScanner): def __init__(self,*args): super(CharScanner,self).__init__(*args) self.altcomment = True self.state_with_syntax = False ### check whether a string contains a lower case char def haslowerchar(self,s): return (s.upper() != s) def handle_comment(self): la1 = self.LA(1) if not la1: self.throw_no_viable_alt_for_char(la1) elif la1 in '-': self.match("--") elif la1 in '\n': self.match('\n') self.newline() elif la1 in '\r': self.match('\r') if self.LA(2) == '\n': self.match('\n') self.newline() elif la1 in u'\u000b' : self.match(u'\u000b') self.newline() elif la1 in u'\u000c': self.match('\u000c') self.newline() else: self.throw_no_viable_alt_for_char(la1) def throw_no_viable_alt_for_char(self,la1): raise antlr.NoViableAltForCharException( la1, self.getFilename(), self.getLine(), self.getColumn() ) def chr_ws_erase(self,string,*chars): return string if __name__ == '__main__' : ### create my lexer .. ### print "reading from test.in .." Lexer = lexer.Lexer("test.in") token = Lexer.nextToken() while not token.isEOF(): ### Is there a way to simplify this loop?? ### this looks complicated to me. However, we can't simply ### return none to check for EOF as we would like to know ### where EOF appeared (file, line, col etc). This would ### be lost. Or we could return NIL in case of EOF and, if ### we are really want to know more about EOF ask lexer to ### provide this information. But this would extend the ### lexer's interface. Another idea would be to return EOF ### by exception, but EOF is actually not an exception at ### all. ### handle token print token token = Lexer.nextToken() antlr-2.7.7/examples/python/asn1/Makefile.in0000644000175000017500000000363710522211616020660 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/asn1 ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile %.py : $(_srcdir)/%.py @-@RMF@ $@ && @CP@ $< $@ ASN1TokenTypes.txt : $(_srcdir)/asn1tokens.txt @-@RMF@ $@ && @CP@ $< $@ g_FILES = \ $(_srcdir)/asn1.g \ $(eol) g_py_FILES = \ asn1_l.py \ $(eol) compile : $(g_py_FILES) test :: test1 test1_deps = \ asn1.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) asn1_l.py < $(_srcdir)/asn1.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) ASN1TokenTypes.txt @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup asn1 clean :: @@ECHO@ cleaning asn1 ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/asn1/asn1.in0000644000175000017500000007243510522211616020007 0ustar twernertwerner M { -- dub:111 iso member-body(2) f(250) type-org(1) ft(16) asn1-book(9) chapter9(1) module1(0) } DEFINITIONS AUTOMATIC TAGS EXTENSIBILITY IMPLIED ::= BEGIN -- wh: more than one export section is accepted by the parser. EXPORTS; EXPORTS ALL; EXPORTS T{},T,v; -- wh: more than one import section will be syntactially allowed - shall be -- checked during semantic analysis. This will allow to have multiple variants -- within one file to be syntax checked. IMPORTS ; IMPORTS -- dub:111 Type1 FROM Module1 { iso member-body(2) f(250) type-org(1) ft(16) asn1-book(9) chapter9(1) module1(0) } value2 FROM Module2 { iso member-body(2) f(250) type-org(1) ft(16) asn1-book(9) chapter9(1) module2(1) } ; IMPORTS P{} FROM Module3 { iso member-body(2) f(250) type-org(1) ft(16) asn1-book(9) chapter9(1) module2(1) } ; IMPORTS T{} FROM Module3 ; IMPORTS T FROM Module3 ; IMPORTS v FROM Module3 ; IMPORTS T{},T,v FROM Module3 ; IMPORTS T{} FROM Module m T FROM Module m v FROM Module m ; IMPORTS T{} FROM Module T FROM Module v FROM Module ; I ::= INTEGER R ::= REAL v X ::= 5 cr IA5String ::= "\r" S00 ::= SEQUENCE { } S01 ::= SEQUENCE { a INTEGER } S02 ::= SEQUENCE { a INTEGER, b INTEGER OPTIONAL, c INTEGER DEFAULT 0 , COMPONENTS OF X } S03 ::= SEQUENCE { ... } S04 ::= SEQUENCE { ... !INTEGER:4 } S05 ::= SEQUENCE { ... !-5 } S65 ::= SEQUENCE { ... !a } S07 ::= SEQUENCE { ... !SEQUENCE { a INTEGER } : 5 } S08 ::= SEQUENCE { a INTEGER, ... } S09 ::= SEQUENCE { a INTEGER, ... !INTEGER:4 } S10 ::= SEQUENCE { a INTEGER, ... !-5 } S11 ::= SEQUENCE { a INTEGER, ... !a } S12 ::= SEQUENCE { a INTEGER, ... !SEQUENCE { a INTEGER } : 5 } S13 ::= SEQUENCE { ..., a INTEGER, ... } S14 ::= SEQUENCE { ... !INTEGER:4, a INTEGER, ... !INTEGER:4 } S15 ::= SEQUENCE { ... !-5, a INTEGER, ... !-5 } S16 ::= SEQUENCE { ... !a, a INTEGER, ... !a } S17 ::= SEQUENCE { ... !SEQUENCE { a INTEGER } : 5, a INTEGER, ... !SEQUENCE { a INTEGER } : 5 } S18 ::= SEQUENCE { a INTEGER, ..., [[d D, e E]], ..., c C } C ::= CLASS { &T, &T OPTIONAL, &T DEFAULT INTEGER, &c INTEGER, &c INTEGER OPTIONAL, &c INTEGER DEFAULT 5, &c INTEGER UNIQUE, &c INTEGER UNIQUE OPTIONAL, &c INTEGER UNIQUE DEFAULT 5 } OTHER-FUNCTION ::= CLASS { -- fixed-type value field spec -- &code INTEGER -- (0 .. MAX) -- UNIQUE, -- fixed-type value set field spec -- &Alphabet BMPString DEFAULT {Latin1 INTERSECTION Level1} , -- type field spec -- &ArgumentType, -- variable-type value set field spec &SupportedArguments &ArgumentType OPTIONAL, -- variable type value field spec &result-if-error &ResultType DEFAULT NULL, -- object field spec &associated-function OTHER-FUNCTION OPTIONAL, -- object set field spec -- &Errors ERROR DEFAULT { rejected-memory | memory-fault } , -- type field spec &ResultType DEFAULT NULL } v X ::= {} v X ::= { 1 UNION 2 } v X ::= { (2|3) INTERSECTION (3|4) } v X ::= { ALL EXCEPT 1 } v X ::= { ALL EXCEPT 1 } v X ::= { 1|2|3^4|5 } v X ::= { {1|2|3^4|5} } v X ::= { {{1}|2|3^4|5} } v X ::= { (1|2|3) EXCEPT 3 } v X ::= { 1|2|3 EXCEPT 3 } v X ::= { 5 , ... } v X ::= { 5 , ... , 5 } v X ::= { ... } v X ::= { ..., 5 } Person ::= SEQUENCE { age INTEGER , name IA5String } -- pki93 Extension ::= SEQUENCE { extnId EXTENSION.&id ({ExtensionSet}), critical BOOLEAN DEFAULT FALSE, extnValue OCTET STRING } EXTENSION ::= CLASS { &id OBJECT IDENTIFIER UNIQUE, &ExtnType } WITH SYNTAX { SYNTAX &ExtnType IDENTIFIED BY &id } T ::= INTEGER ( {SupportedAttributes} {@type} ) AttributeTypeAndValue ::= SEQUENCE { type ATTRIBUTE.&id ({SupportedAttributes}), value ATTRIBUTE.&Type ({SupportedAttributes}{@type}) } Attribute ::= SEQUENCE { type ATTRIBUTE.&id ({SupportedAttributes}), values SET SIZE (1 .. MAX) OF ATTRIBUTE.&Type ({SupportedAttributes}{@type})} C::= CLASS { &T, &id INTEGER } WITH SYNTAX { TYPE &T MAPPED BY &id [&a] [A [&b] C] } c C ::= { TYPE IA5String MAPPED BY 5 } -- dub: 106 TypeReference ::= CHOICE { integer INTEGER, boolean BOOLEAN } value-reference TypeReference ::= integer : 12 -- dub: 107 Pair ::= SEQUENCE { x INTEGER, y INTEGER } Couple ::= SEQUENCE { x INTEGER, y INTEGER } pair Pair ::= { x 5, y 13 } couple Couple ::= pair Lighter-state ::= ENUMERATED { on(0), off(1), out-of-order(2) } Kettle-state ::= ENUMERATED { on(0), off(1), out-of-order(2) } lighter Lighter-state ::= on kettle Kettle-state ::= lighter PrimeNumbers INTEGER ::= { 2 | 3 | 5 | 7 | 11 | 13 } -- dub: 112 Type2 ::= SET { a Type1 DEFAULT value1, b BOOLEAN } -- dub: 125 T1 ::= [0] SET { name PrintableString, age INTEGER, gender BOOLEAN -- male = TRUE -- } U1 ::= [1] SET { gender Male, name VisibleString, age INTEGER } -- dub: 126 T2 ::= [0] SEQUENCE { name [0] PrintableString, age INTEGER } U2 ::= [2] SEQUENCE { name [1] PrintableString, age INTEGER } -- dub: 128 RoundResult ::= BOOLEAN ok RoundResult ::= TRUE ko RoundResult ::= FALSE Married ::= BOOLEAN kim MALE ::= TRUE -- dub: 129 Ack ::= NULL Clock ::= CHOICE { time UTCTime, out-of-order NULL } battery-down Clock ::= out-of-order:NULL -- dub: 130 LinkedList ::= SEQUENCE { data Data, next CHOICE { linked-list LinkedList, end NULL} } --LinkedList ::= SEQUENCE OF Data -- dub: 131 zero INTEGER ::= 0 french-population INTEGER ::= 60000000000 fridge-temperature INTEGER ::= -18 -- in Centigrade scale -- no constraints yet Interval ::= INTEGER (123456789..1234567890) -- dub: 132 ErrorCode ::= INTEGER { disk-full(1), no-disk(-1), disk-not-formatted(2) } stupid-error ErrorCode ::= disk-full ok ErrorCode ::= 0 stupid-error ErrorCode ::= 1 AbortReason ::= INTEGER { reason-not-specified(0), unrecognized-ppdu(1), unexpected-ppdu(2), unexpected-session-service-primitive(3), unrecognized-ppdu-parameter(4), unexpected-ppdu-parameter(5), invalid-ppdu-parameter-value(6) } -- dub: 133 Temperature ::= INTEGER { freezing(0), boiling(100) } alpha INTEGER ::= 1 Type1 ::= INTEGER { alpha(2) } Type2 ::= INTEGER { alpha(3), beta(alpha) } gamma Type2 ::= beta delta Type2 ::= alpha -- dub: 136 -- [..] -- dub: 137 RadioButton ::= ENUMERATED { button1,button2,button3 } RadioButton ::= ENUMERATED { button1, button2, button3, ...} RadioButton ::= ENUMERATED { button1, button2, button3, ..., button4, button5 } -- dub: 138 -- [..] -- dub: 141 ExtendedReal ::= CHOICE { decimal REAL, particular-real ENUMERATED { one-third,pi,e, ...} } pi REAL ::= { mantissa 314159, base 10, exponent -5 } e REAL ::= { mantissa 271828128459045235360287, base 10, exponent -23 } zero REAL ::= 0 -- dub: 142 -- [..] -- dub: 145 SubjectPublicKeyInfo ::= SEQUENCE { algorithm AlgorithmIdentifier , subjectPublicKey BIT STRING } pi-decimals BIT STRING ::= '001000100'B pi-decimals BIT STRING ::= '243F68885A'H -- dub: 146 -- [..] Rights ::= BIT STRING { user-read(0), user-write(1), group-read(2), group-write(3), other-read(4), other-write(5) } group1 Rights ::= { group-read,group-write} group2 Rights ::= '0011'B group3 Rights ::= '3'H group4 Rights ::= '001100'B weired-rights Rights ::= '000001'B -- dub: 148 -- [..] alpha INTEGER ::= 1 BinaryString ::= BIT STRING { alpha(3), beta(alpha) } -- dub: 151 icon OCTET STRING ::= '00110010100101'B icon OCTET STRING ::= '349aB'H internet-id OBJECT IDENTIFIER ::= { iso(1) identified-organization(4) dod(6) internet(1)} francetelecom-id OBJECT IDENTIFIER ::= { iso member-body f(250) type-org(1) ft(16)} ber-id OBJECT IDENTIFIER ::= { 2 1 1 } -- dub: 260 Two ::= INTEGER (2) Day ::= ENUMERATED { tuesday(2), wednesday(3), thursday(4) } Wednesday ::= Day (wednesday) FourZ ::= IA5String ("ZZZZ") Afters ::= CHOICE { cheese IA5String, dessert ENUMERATED { profiteroles(1), sabayon(2), fraisier(3) } } CompulsoryAfters ::= Afters (desert:sabayon) -- dub: 262 Weekend ::= Day(saturday|sunday) PushButtonDial ::= IA5String ("0"|"1"|"2"|"3"|"4"|"5"|"6"|"7"|"8"|"9"|"*"|"#") FrenchWeekend ::= Day(Weekend) -- dub: 263 LongWeekend ::= Day(Weekend|monday) T1 ::= INTEGER { trois(3), quatre(4) } T2 ::= INTEGER { one(1),two(2),three(3), four(4) } (T1) FrenchWeekend ::= Day(INCLUDES Weekend) -- dub: 264 Number ::= INTEGER From3to15 ::= Number (3 .. 15) From4to14 ::= Number (3<..<15) Positive ::= NUMBER (0<..MAX) Negative ::= NUMBER (MIN..<0) Zero ::= NUMBER (-1<..<1) T ::= REAL (0..<{mantissa 5,base 10, exponent 0}) U ::= T({mantissa 2, base 10, exponent 0} .. MAX) T ::= INTEGER ({ExtensionSet}) --dub: 266 Exactly31BitsString ::= BIT STRING (SIZE (31)) StringOf31BitsAtTheMost ::= BIT STRING (SIZE (0..31)) EvenNumber ::= INTEGER (2|4|6|8|10) EvenLengthString ::= IA5String (SIZE (INCLUDES EvenNumber)) NonEmptyString ::= OCTET STRING (SIZE (1..MAX)) ListOfStringsOf5Characters ::= SEQUENCE OF PrintableString (SIZE (5)) -- dub: 267 ListOfStrings ::= SEQUENCE OF PrintableString ListOf5Strings ::= ListOfStrings (SIZE (5)) ListOf5Strings ::= SEQUENCE (SIZE (5)) OF PrintableString ListOf5StringsOf5Characters ::= SEQUENCE (SIZE (5)) OF PrintableString (SIZE (5)) ListOf5StringsOf5Characters ::= SEQUENCE SIZE (5) OF PrintableString (SIZE (5)) -- dub: 268 Morse ::= PrintableString (FROM ("."|"-"|" ")) IDCardNumber ::= NumericString (FROM ("0".."9")) PushButtonDialSequence ::= IA5String (FROM ("0".."9"|"*"|"#")) --dub: 271 DateAndTime ::= VisibleString(PATTERN "\d#2/\d#2/\d#4-\d#2:\d#2") -- DD/MM/YYYY-HH:MM -- dub: 275 DateAndTime ::= VisibleString (PATTERN "((\d#2)/(\d#2)/(\d#4)-(\d#2:\d#2))") -- \1 is a date in which \2 is the month, \3 the day, -- \4 the year and \5 the time (in hours and minutes) -- dub: 280 ROIV-m-Linked-Reply-Action ::= ROIV-m-Linked-Reply (WITH COMPONENTS { invokedID PRESENT, linked-ID PRESENT, operation-value (m-Linked-Reply), argument (INCLUDES LinkedReplyArgument (WITH COMPONENTS { getResult ABSENT, getListError ABSENT, setResult ABSENT, setListError ABSENT, actionResult PRESENT, processingFailure PRESENT, deleteResult ABSENT, actionError PRESENT, deleteError ABSENT }))}) Choice ::= CHOICE { a A, b B, c C, d D } ChoicesCD ::= Choice ( WITH COMPONENTS {...,a ABSENT, b ABSENT} ) ChoiceCD ::= Choice (WITH COMPONENTS {..., a ABSENT, b ABSENT}) ChoiceA1 ::= Choice (WITH COMPONENTS {..., a PRESENT}) ChoiceA2 ::= Choice (WITH COMPONENTS {a PRESENT}) ChoiceBCD ::= Choice (WITH COMPONENTS {a ABSENT, b, c}) -- dub: 284 MoreCompact ::= OCTET STRING (CONTAINING MyType ENCODED BY {joint-iso-itu-t asn1 packed-encoding(3) basic(0) unaligned(1)}) -- dub: 285 PhoneNumber ::= NumericString (FROM ("0".."9"))(SIZE (10)) Row ::= SEQUENCE OF INTEGER -- dub: 287 Lipogramme ::= IA5String (FROM (ALL EXCEPT ("e"|"E"))) SaudiName ::= BasicArabic (SIZE (1..100) ^ Level2) ISO-10646-String ::= BMPString (FROM (Level2 ^ (BasicLatin | HebrewExtended | Hiragana))) KatakanaAndBasicLatin ::= UniversalString (FROM (Katakana | BasicLatin)) CapitalAndSmall ::= IA5String (FROM ("A".."Z"|"a".."z")) CapitalOrSmall ::= IA5String (FROM ("A".."Z")|FROM ("a".."z")) ExoticString ::= IA5String (SIZE (1..4)|FROM ("abc")) -- dub: 288 InvokeId ::= CHOICE { present INTEGER, absent NULL } DAP-InvokeIdSet ::= InvokeId (ALL EXCEPT absent:NULL) Identifications ::= SEQUENCE { idNumber NumericString (FROM (ALL EXCEPT " ")) (SIZE (6)) OPTIONAL, telephone NumericString (FROM (ALL EXCEPT " ")) (SIZE (13)) OPTIONAL } Person ::= SEQUENCE { name PrintableString (SIZE (1..20)), ident Identifications (WITH COMPONENTS {idNumber} |WITH COMPONENTS {telephone}) } -- dub: 291 A ::= INTEGER (0..10, ...) A ::= INTEGER (0..10, ..., 12) -- dub: 292 S ::= IA5String (SIZE (1..10, ...)) E ::= INTEGER (1..10, ...!Exception:too-large-integer) Exception ::= ENUMERATED {too-large-integer, ...} ImplementedUnivStr{UniversalString:Level} ::= UniversalString (FROM ((Level UNION BasicLatin)) !characterSet-problem) characterSet-problem INTEGER ::= 4 T ::= INTEGER (0..10, ...!10) U ::= T (2..6, ...!6) -- dub: 293 ImplementedUnivStgLevel1{UniversalString:ImplementedSubset} ::= UniversalString (ImplementedUnivStr{{Level1}} INTERSECTION ImplementedSubset, ...!level1-problem) level1-problem INTEGER ::= 5 -- dub: 295 PDV-List ::= SEQUENCE { transfer-syntax-name Transfer-syntax-name OPTIONAL, presentation-context-identifier Presentation-context-identifier, presentation-data-values CHOICE { single-ASN1-type [0] ABSTRACT-SYNTAX.&Type (CONSTRAINED BY {-- Type corresponding -- -- to presentation-context-identifier --}), octet-aligned [1] IMPLICIT OCTET STRING, arbitrary [2] IMPLICIT BIT STRING } } Reject ::= SEQUENCE { invokeId InvokeId, problem CHOICE { general [0] GeneralProblem, invoke [1] InvokeProblem, returnResult [2] ReturnResultProblem, returnError [3] ReturnErrorProblem }} (CONSTRAINED BY {-- must conform to the above -- -- definition --} ! RejectProblem:general-mistypedPDU) Encrypted{TypeToBeEnciphered} ::= BIT STRING (CONSTRAINED BY {-- must be the result of the encipherment -- -- of some BER-encoded value of -- TypeToBeEnciphered} !Error:securityViolation) Error ::= ENUMERATED {securityViolation} -- dub: 299 EXTERNAL ::= [UNIVERSAL 8] IMPLICIT SEQUENCE { direct-reference OBJECT IDENTIFIER OPTIONAL, indirect-reference INTEGER OPTIONAL, data-value-descriptor ObjectDescriptor OPTIONAL, encoding CHOICE { single-ASN1-type [0] ANY, octet-aligned [1] IMPLICIT OCTET STRING, arbitrary [2] IMPLICIT BIT STRING }} CoordinateMatrix ::= SEQUENCE SIZE (6) OF Row ( SIZE (6)) (WITH COMPONENT (-100..100)) TextBlock ::= SEQUENCE OF VisibleString Address ::= TextBlock (SIZE (3..6))(WITH COMPONENT (SIZE (1..32))) Address ::= TextBlock (WITH COMPONENT (SIZE (1..32))) PushButtonDialSequence ::= IA5String (FROM ("0".."9"|"*"|"#")) SIGNED { ToBeSigned } ::= SEQUENCE { toBeSigned ToBeSigned, algorithm AlgorithmIdentifier, signature BIT STRING } DirectoryString { INTEGER:maxSize } ::= CHOICE { teletexString TeletexString (SIZE (1..maxSize)), printableString PrintableString (SIZE (1..maxSize)), universalString UniversalString (SIZE (1..maxSize)), bmpString BMPString (SIZE(1..maxSize)), utf8String UTF8String (SIZE(1..maxSize)) } LinkedList ::= SEQUENCE SIZE (1..MAX) OF T GeneralName ::= CHOICE { otherName [0] INSTANCE OF OTHER-NAME, rfc822Name [1] IA5String, dNSName [2] IA5String, x400Address [3] ORAddress, directoryName [4] Name, ediPartyName [5] EDIPartyName, uniformResourceIdentifier [6] IA5String, iPAddress [7] OCTET STRING, registeredID [8] OBJECT IDENTIFIER } OTHER-NAME ::= TYPE-IDENTIFIER EDIPartyName ::= SEQUENCE { nameAssigner DirectoryString {ub-name} } extendedKeyUsage EXTENSION ::= { SYNTAX SEQUENCE SIZE (1..MAX) OF KeyPurposeId IDENTIFIED BY id-ce-extKeyUsage } -- dub: 313 FUNCTION ::= CLASS { &ArgumentType , &ResultType DEFAULT NULL, &Errors ERROR OPTIONAL, &code INTEGER UNIQUE } addition-of-2-integers FUNCTION ::= { &ArgumentType SEQUENCE { a INTEGER, b INTEGER }, &ResultType INTEGER, -- empty error list by default &code 1 } -- dub: 314 OTHER-FUNCTION ::= CLASS { &code INTEGER (0..MAX) UNIQUE, &Alphabet BMPString DEFAULT {Latin1 INTERSECTION Level1}, &ArgumentType , &SupportedArguments &ArgumentType OPTIONAL, &ResultType DEFAULT NULL, &result-if-error &ResultType DEFAULT NULL, &associated-function OTHER-FUNCTION OPTIONAL, &Errors ERROR DEFAULT {rejected-argument | memory-fault} } rejected-argument ERROR ::= {-- object definition --} memory-fault ERROR ::= {-- object definition --} -- dub: 315 other-addition-of-2-integers OTHER-FUNCTION ::= { &ArgumentType Pair, &SupportedArguments {PosPair | NegPair}, &ResultType INTEGER, &result-if-error 0, &code 1 } Pair ::= SEQUENCE {a INTEGER, b INTEGER} PosPair ::= Pair (WITH COMPONENTS {a(0..MAX), b(0..MAX)}) NegPair ::= Pair (WITH COMPONENTS {a(MIN..0), b(MIN..0)}) -- dub: 323 OTHER-FUNCTION ::= CLASS { &code INTEGER (0..MAX) UNIQUE, &Alphabet BMPString DEFAULT {Latin1 INTERSECTION Level1}, &ArgumentType , &SupportedArguments &ArgumentType OPTIONAL, &ResultType DEFAULT NULL, &result-if-error &ResultType DEFAULT NULL, &associated-function OTHER-FUNCTION OPTIONAL, &Errors ERROR DEFAULT {rejected-argument|memory-fault} } WITH SYNTAX { ARGUMENT TYPE &ArgumentType, [SUPPORTED ARGUMENTS &SupportedArguments,] [RESULT TYPE &ResultType, [RETURNS &result-if-error IN CASE OF ERROR,]] -- syntax error if ']]' (solved) [ERRORS &Errors,] [MESSAGE ALPHABET &Alphabet,] [ASSOCIATED FUNCTION &associated-function,] CODE &code } memory-fault ERROR ::= {-- object definition --} -- dub: 315 other-addition-of-2-integers OTHER-FUNCTION ::= { &ArgumentType Pair, &SupportedArguments {PosPair | NegPair}, &ResultType INTEGER, &result-if-error 0, &code 1 } Pair ::= SEQUENCE {a INTEGER, b INTEGER} PosPair ::= Pair (WITH COMPONENTS {a(0..MAX), b(0..MAX)}) NegPair ::= Pair (WITH COMPONENTS {a(MIN..0), b(MIN..0)}) -- dub: 327 ATTRIBUTE ::= CLASS { &derivation ATTRIBUTE OPTIONAL, &Type OPTIONAL, &equality-match MATCHING-RULE OPTIONAL, &ordering-match MATCHING-RULE OPTIONAL, &substrings-match MATCHING-RULE OPTIONAL, &single-valued BOOLEAN DEFAULT FALSE, &collective BOOLEAN DEFAULT FALSE, &no-user-modification BOOLEAN DEFAULT FALSE, &usage Attribute-Usage DEFAULT userApplications, &id OBJECT IDENTIFIER UNIQUE} WITH SYNTAX { [SUBTYPE OF &derivation] [WITH SYNTAX &Type] [EQUALITY MATCHING RULE &equality-match] [ORDERING MATCHING RULE &ordering-match] [SUBSTRINGS MATCHING RULE &substrings-match] [SINGLE VALUE &single-valued] [COLLECTIVE &collective] [NO USER MODIFICATION &no-user-modification] [USAGE &usage] ID &id } AttributeUsage ::= ENUMERATED { userApplications(0), directoryOperation(1), distributedOperation(2), dSAOperation(3) } MATCHING-RULE ::= CLASS { &AssertionType OPTIONAL, &id OBJECT IDENTIFIER UNIQUE } WITH SYNTAX { [SYNTAX &AssertionType] ID &id } name ATTRIBUTE ::= { WITH SYNTAX DirectoryString EQUALITY MATCHING RULE caseIgnoreMatch ID {joint-iso-itu-t ds(5) attributeType(4) 2} } DirectoryString ::= CHOICE { teletexString TeletexString (SIZE (1..maxSize)), printableString PrintableString (SIZE (1..maxSize)), universalString UniversalString (SIZE (1..maxSize)), bmpString BMPString (SIZE (1..maxSize)), utf8String UTF8String (SIZE (1..maxSize)) } maxSize INTEGER ::= 25 caseIgnoreMatch MATCHING-RULE ::= { SYNTAX DirectoryString ID {id-mr 2} } id-mr OBJECT IDENTIFIER ::= { joint-iso-itu-t ds(5) matchingRule(13) } MatchingRules MATCHING-RULE ::= { caseIgnoreMatch | booleanMatch | integerMatch } --dub: 328 AttributeUsage ::= ENUMERATED { userApplications(0), directoryOperation(1), distributedOperation(2), dSAOperation(3) } MATCHING-RULE ::= CLASS { &AssertionType OPTIONAL, &id OBJECT IDENTIFIER UNIQUE } WITH SYNTAX { [SYNTAX &AssertionType] ID &id } --dub: 329 name ATTRIBUTE ::= { WITH SYNTAX DirectoryString EQUALITY MATCHING RULE caseIgnoreMatch ID {joint-iso-itu-t ds(5) attributeType(4) 2} } DirectoryString ::= CHOICE { teletexString TeletexString (SIZE (1..maxSize)), printableString PrintableString (SIZE (1..maxSize)), universalString UniversalString (SIZE (1..maxSize)), bmpString BMPString (SIZE (1..maxSize)), utf8String UTF8String (SIZE (1..maxSize)) } maxSize INTEGER ::= 25 caseIgnoreMatch MATCHING-RULE ::= { SYNTAX DirectoryString ID {id-mr 2} } id-mr OBJECT IDENTIFIER ::= { joint-iso-itu-t ds(5) matchingRule(13) } --dub: 330 LessMatchingRules MATCHING-RULE ::= { MatchingRules EXCEPT caseIgnoreMatch } ExtensibleMatchingRules MATCHING-RULE ::= { caseIgnoreMatch | booleanMatch | integerMatch, ... } ExtensibleMatchingRules MATCHING-RULE ::= {...} --dub: 331 Values INTEGER ::= { 1 | 2 | 3 } Values ::= INTEGER (1|2|3) --dub: 337 id-mr-caseIgnoreMatch OBJECT IDENTIFIER ::= caseIgnoreMatch.&id CLASS1 ::= CLASS { &obj CLASS2 } CLASS2 ::= CLASS { &val INTEGER } object1 CLASS1 ::= { &obj object2 } object2 CLASS2 ::= { &val 5 } value INTEGER ::= object1.&obj.&val Oids OBJECT IDENTIFIER ::= {MatchingRules.&id} Oids OBJECT IDENTIFIER ::= { {id-mr 2} | {id-mr 12} | {id-mr 13} } SupportedFunctions OTHER-FUNCTION ::= { addition-of-2-integers | substraction-of-2-integers | multiplication-of-2-integers } -- dub: 342 surname ATTRIBUTE ::= { -- family name SUBTYPE OF name WITH SYNTAX DirectoryString ID id-at-surname } givenName ATTRIBUTE ::= { -- first name SUBTYPE OF name WITH SYNTAX DirectoryString ID id-at-givenName } countryName ATTRIBUTE ::= { -- country SUBTYPE OF name WITH SYNTAX PrintableString (SIZE (2)) -- [ISO3166] SINGLE VALUE TRUE ID id-at-countryName} SupportedAttributes ATTRIBUTE ::= {surname | givenName | countryName} AttributeIdAndValue1 ::= SEQUENCE { ident ATTRIBUTE.&id, value ATTRIBUTE.&Type } -- dub: 343 AttributeIdAndValue2 ::= SEQUENCE { ident ATTRIBUTE.&id({SupportedAttributes}), value ATTRIBUTE.&Type({SupportedAttributes}) } -- dub: 344 value AttributeIdAndValue2 ::= { ident id-at-countryName, -- value DirectoryString:universalString:"$$Escher$$" -- -- wh: bug } -- dub: 345 AttributeIdAndValue3 ::= SEQUENCE { ident ATTRIBUTE.&id({SupportedAttributes}), value ATTRIBUTE.&Type({SupportedAttributes}{@ident}) } -- dub: 346 AttributeIdAndValue3 ::= SEQUENCE { ident ATTRIBUTE.&id({SupportedAttributes}), value ATTRIBUTE.&Type({SupportedAttributes}{@ident}) } -- dub: 353 AttributeIdAndValue3 ::= SEQUENCE { ident ATTRIBUTE.&id({SupportedAttributes}), value ATTRIBUTE.&Type({SupportedAttributes}{@.ident})} AttributeIdsAndValues ::= SET OF SEQUENCE { ident ATTRIBUTE.&id({SupportedAttributes}), value ATTRIBUTE.&Type({SupportedAttributes}{@.ident})} AttributeValueAssertion ::= SEQUENCE { type ATTRIBUTE.&Id({SupportedAttributes}), assertion ATTRIBUTE.&equality-match.&AssertionType ({SupportedAttributes}{@type}) } -- dub: 354 FilterItem ::= CHOICE { equality [0] AttributeValueAssertion, substrings [1] SEQUENCE { type Attribute.&id({SupportedAttributes}), strings SEQUENCE OF CHOICE { initial [0] ATTRIBUTE.&Type ({SupportedAttributes}{@substrings.type}), any [1] ATTRIBUTE.&Type ({SupportedAttributes}{@substrings.type}), final [2] ATTRIBUTE.&Type ({SupportedAttributes}{@substrings.type}) }}, greaterOrEqual [2] AttributeValueAssertion, lessOrEqual [3] AttributeValueAssertion, present [4] AttributeType, approximateMatch [5] AttributeValueAssertion, extensibleMatch [6] MatchingRuleAssertion } Attribute-desc ::= SEQUENCE { usage ATTRIBUTE.&usage({SupportedAttributes}), list SEQUENCE OF SEQUENCE { ident ATTRIBUTE.&id({SupportedAttributes}{@usage}), value ATTRIBUTE.&Type ({SupportedAttributes}{@usage,@.ident}) }} -- dub: 355 att-desc Attribute-desc ::= { usage userApplications, list { { ident id-at-objectClass, value oid }, { ident id-at-aliasedEntryName, value distinguishedName }}} -- dub: 356 Authentication-value ::= CHOICE { charstring [0] IMPLICIT GraphicString, bitstring [1] BIT STRING, external [2] EXTERNAL, other [3] IMPLICIT SEQUENCE { other-mechanism-name MECHANISM-NAME.&id({ObjectSet}), other-mechanism-value MECHANISM-NAME.&Type ({ObjectSet}{@.other-mechanism-name}) }} TYPE-IDENTIFIER ::= CLASS { &id OBJECT IDENTIFIER UNIQUE, &Type } WITH SYNTAX {&Type IDENTIFIED BY &id} MECHANISM-NAME ::= TYPE-IDENTIFIER Authentication-value ::= CHOICE { charstring [0] IMPLICIT GraphicString, bitstring [1] BIT STRING, external [2] EXTERNAL, other [3] IMPLICIT SEQUENCE { other-mechanism-name MECHANISM-NAME.&id({ObjectSet}), other-mechanism-value MECHANISM-NAME.&Type ({ObjectSet}{@.other-mechanism-name}) }} T::= SEQUENCE { type-id TYPE-IDENTIFIER.&id, value [0] EXPLICIT TYPE-IDENTIFIER.&Type } -- dub: 358 ExtendedBodyPart ::= SEQUENCE { parameters [0] INSTANCE OF TYPE-IDENTIFIER OPTIONAL, data INSTANCE OF TYPE-IDENTIFIER } (CONSTRAINED BY { -- must correspond to the ¶meters -- -- and &data fields of a member of -- IPMBodyPartTable} ) TYPE-IDENTIFIER ::= CLASS { &id OBJECT IDENTIFIER UNIQUE, &Type } WITH SYNTAX { &Type IDENTIFIED BY &id } -- dub: 360 ABSTRACT-SYNTAX ::= CLASS { &id OBJECT IDENTIFIER, &Type , &property BIT STRING {handles-invalid-encodings(0)} DEFAULT {} } WITH SYNTAX { &Type IDENTIFIED BY &id [HAS PROPERTY &property] } -- dub: 361 PDV-list ::= SEQUENCE { transfer-syntax-name Transfer-syntax-name OPTIONAL, presentation-context-identifier Presentation-context-identifier, presentation-data-values CHOICE { single-ASN1-type [0] ABSTRACT-SYNTAX.&Type (CONSTRAINED BY {-- Type which corresponds to -- -- the presentation context identifier --}), octet-aligned [1] IMPLICIT OCTET STRING, arbitrary [2] IMPLICIT BIT STRING }} -- dub: 379 DirectoryString{INTEGER:maxSize} ::= CHOICE { teletexString TeletexString (SIZE (1..maxSize)), printableString PrintableString (SIZE (1..maxSize)), universalString UniversalString (SIZE (1..maxSize)), bmpString BMPString (SIZE (1..maxSize)), utf8String UTF8String (SIZE (1..maxSize)) } -- dub: 380 SubstringAssertion ::= SEQUENCE OF CHOICE { initial [0] DirectoryString{ub-match}, any [1] DirectoryString{ub-match}, final [2] DirectoryString{ub-match} } ub-match INTEGER ::= 128 SubstringAssertion{INTEGER:ub-match} ::= SEQUENCE OF CHOICE { initial [0] DirectoryString{ub-match}, any [1] DirectoryString{ub-match}, final [2] DirectoryString{ub-match} } -- dub: 381 T ::= INTEGER List{T} ::= SEQUENCE OF T Choice{T} ::= CHOICE { a [0] T, b INTEGER } Structure{T} ::= SEQUENCE { a INTEGER, b [0] T OPTIONAL, c INTEGER } GeneralForm{T, T:val} ::= SEQUENCE { info T DEFAULT val, comments IA5String } -- dub: 382 Form ::= GeneralForm{BOOLEAN, TRUE} Form ::= SEQUENCE { info BOOLEAN DEFAULT TRUE, comments IA5String } pariTierce{INTEGER:first, INTEGER:second, INTEGER:third} SEQUENCE OF INTEGER ::= { first, second, third } -- dub: 382 MESSAGE-PARAMETERS ::= CLASS { &max-priority-level INTEGER, &max-message-buffer-size INTEGER, &max-reference-buffer-size INTEGER } WITH SYNTAX { MAXIMUM PRIORITY &max-priority-level MAXIMUM MESSAGE BUFFER &max-message-buffer-size MAXIMUM REFERENCE BUFFER &max-reference-buffer-size } Message-PDU{MESSAGE-PARAMETERS:param} ::= SEQUENCE { priority INTEGER (0..param.&max-priority-level !Exception:priority), message UTF8String (SIZE (0..param.&max-message-buffer-size) !Exception:message), comments UTF8String (SIZE (0..param.&max-reference-buffer-size) !Exception:comments) } Exception ::= ENUMERATED {priority(0), message(1), comments(2), ...} -- dub: 383 Forward{OPERATION:OperationSet} OPERATION ::= { OperationSet | OperationSet.&Linked.&Linked | OperationSet.&Linked.&Linked.&Linked.&Linked } Reverse{OPERATION:OperationSet} OPERATION ::= { Forward{{OperationSet.&Linked}} } -- dub: 383 ForwardAndReverse OPERATION ::= {Forward{{MyOperationSet}} UNION Reverse{{MyOperationSet}}} --dub: 387 Flag{Color} ::= SEQUENCE { country VisibleString, colors SEQUENCE OF Color DEFAULT {blue} } -- dub: 389 CharacterString{INTEGER:max-length} ::= CHOICE { teletexString TeletexString (SIZE (1..max-length) !exceeds-max-length), printableString PrintableString (SIZE (1..max-length) !exceeds-max-length) } exceeds-max-length INTEGER ::= 999 -- dub: 390 my-abstract-syntax {INTEGER:maxSize} ABSTRACT-SYNTAX ::= { my-PDU{size-max} IDENTIFIED BY {iso member-body(2) f(250) type-org(1) ft(16) asn1-book(9) chapter17(4) my-PDU(0)} } END -- dub: 499,500 MyHTTP DEFINITIONS AUTOMATIC TAGS ::= BEGIN GetRequest ::= SEQUENCE { header-only BOOLEAN, lock BOOLEAN, accept-types AcceptTypes, url Url, ... } AcceptTypes ::= SET { standards BIT STRING {html(0), plain-text(1), gif(2), jpeg(3)} (SIZE (4)) OPTIONAL, others SEQUENCE OF VisibleString (SIZE (4)) OPTIONAL } Url ::= VisibleString (FROM ("a".."z"|"A".."Z"|"0".."9"| "./-_~%#")) value GetRequest ::= { header-only TRUE, lock FALSE, accept-types { standards { html, plain-text } }, url "www.asn1.com" } END -- dub: 360 ProtocolName-Abstract-Syntax-Module {iso member-body(2) f(250) type-org(1) ft(16) asn1-book(9) chapter15(3) protocol-name(0)} DEFINITIONS ::= BEGIN IMPORTS ProtocolName-PDU FROM ProtocolName-Module {iso member-body(2) f(250) type-org(1) ft(16) asn1-book(9) chapter15(3) protocol-name(0) module1(2)}; protocolName-Abstract-Syntax ABSTRACT-SYNTAX ::= {ProtocolName-PDU IDENTIFIED BY protocolName-Abstract-Syntax-id} protocolName-Abstract-Syntax-id OBJECT IDENTIFIER ::= {iso member-body(2) f(250) type-org(1) ft(16) asn1-book(9) chapter15(3) protocol-name(0) abstract-syntax(0)} protocolName-Abstract-Syntax-descriptor ObjectDescriptor ::= "Abstract syntax of ProtocolName" protocolName-Transfer-Syntax-id OBJECT IDENTIFIER ::= {iso member-body(2) f(250) type-org(1) ft(16) asn1-book(9) chapter15(3) protocol-name(0) transfer-syntax(1)} protocolName-Transfer-Syntax-descriptor ObjectDescriptor ::= "Transfer syntax of ProtocolName" END antlr-2.7.7/examples/python/asn1/asn1tokens.txt0000644000175000017500000000375410522211616021442 0ustar twernertwerner// $ANTLR 2.7.4: parser.g -> parserTokenTypes.txt$ parser // output token vocab name VALUE_TOKEN_LIST=4 TYPE_DECL=5 CLASS_DECL=6 VALUE_DECL=7 MODULE_DECL=8 TOKEN_BSTRING=9 TOKEN_CSTRING=10 TOKEN_HSTRING=11 TOKEN_word=12 TOKEN_NUMBER=13 TOKEN_WORD=14 TOKEN_field=15 TOKEN_Field=16 TOKEN_FIELD=17 "-"=18 "+"=19 TOKEN_Word=20 LITERAL_BOOLEAN="BOOLEAN"=21 LITERAL_NULL="NULL"=22 LITERAL_INTEGER="INTEGER"=23 LITERAL_REAL="REAL"=24 TOKEN_BIT_STRING=25 TOKEN_OCTET_STRING=26 TOKEN_OBJECT_IDENTIFIER=27 "RELATIVE-OID"=28 "."=29 LITERAL_DEFINITIONS="DEFINITIONS"=30 "::="=31 LITERAL_BEGIN="BEGIN"=32 LITERAL_END="END"=33 "{"=34 "}"=35 "("=36 ")"=37 LITERAL_EXPLICIT="EXPLICIT"=38 LITERAL_TAGS="TAGS"=39 LITERAL_IMPLICIT="IMPLICIT"=40 LITERAL_AUTOMATIC="AUTOMATIC"=41 LITERAL_EXTENSIBILITY="EXTENSIBILITY"=42 LITERAL_IMPLIED="IMPLIED"=43 LITERAL_EXPORTS="EXPORTS"=44 ";"=45 ","=46 LITERAL_ALL="ALL"=47 LITERAL_IMPORTS="IMPORTS"=48 LITERAL_FROM="FROM"=49 ":"=50 LITERAL_CLASS="CLASS"=51 LITERAL_WITH="WITH"=52 LITERAL_SYNTAX="SYNTAX"=53 "["=54 "]"=55 LITERAL_APPLICATION="APPLICATION"=56 LITERAL_BY="BY"=57 LITERAL_COMPONENTS="COMPONENTS"=58 LITERAL_CONTAINING="CONTAINING"=59 LITERAL_DEFAULT="DEFAULT"=60 LITERAL_EXCEPT="EXCEPT"=61 LITERAL_INCLUDES="INCLUDES"=62 LITERAL_OF="OF"=63 LITERAL_OPTIONAL="OPTIONAL"=64 LITERAL_PATTERN="PATTERN"=65 LITERAL_PRIVATE="PRIVATE"=66 LITERAL_SIZE="SIZE"=67 LITERAL_UNIQUE="UNIQUE"=68 LITERAL_UNIVERSAL="UNIVERSAL"=69 "<"=70 LITERAL_SEQUENCE="SEQUENCE"=71 LITERAL_CHOICE="CHOICE"=72 LITERAL_SET="SET"=73 "[["=74 "]]"=75 "..."=76 "!"=77 LITERAL_ENUMERATED="ENUMERATED"=78 LITERAL_ANY="ANY"=79 TOKEN_DEFINED_BY=80 LITERAL_INSTANCE="INSTANCE"=81 LITERAL_TRUE="TRUE"=82 LITERAL_FALSE="FALSE"=83 "PLUS-INFINITY"=84 "MINUS-INFINITY"=85 "|"=86 LITERAL_UNION="UNION"=87 "^"=88 LITERAL_INTERSECTION="INTERSECTION"=89 LITERAL_MIN="MIN"=90 ".."=91 LITERAL_MAX="MAX"=92 TOKEN_ENCODED_BY=93 LITERAL_COMPONENT="COMPONENT"=94 LITERAL_PRESENT="PRESENT"=95 LITERAL_ABSENT="ABSENT"=96 "@"=97 TOKEN_CONSTRAINED_BY=98 antlr-2.7.7/examples/python/asn1/asn1.g0000644000175000017500000001634310522211616017623 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ header { // import language specific stuff // need to import my local module defining super classes etc. import asn1 } options { language="Python"; } class asn1_l extends Lexer("asn1.CharScanner"); options { k = 3; charVocabulary = '\3'..'\377'; caseSensitive=true; testLiterals = true; codeGenMakeSwitchThreshold = 2; codeGenBitsetTestThreshold = 2; importVocab=ASN1; } tokens { DOTDOT; ELLIPSIS; } ASSIGN_OP : "::=" ; BAR : '|' ; COLON : ':' ; COMMA : ',' ; DOT : '.' ; DOTDOT : ".." ; ELLIPSIS : "..." ; EXCLAMATION : '!' ; INTERSECTION : '^' ; LESS : '<' ; L_BRACE : '{' ; L_BRACKET : '[' ; LL_BRACKET : { self.state_with_syntax==False }? "[[" ; L_PAREN : '(' ; MINUS : '-' ; PLUS : '+' ; R_BRACE : '}' ; R_BRACKET : ']' ; RR_BRACKET : { self.state_with_syntax==False }? "]]" ; R_PAREN : ')' ; SEMI : ';' ; AT : '@' ; /* These are whitespace (without newline) characters according to X.680:2002 */ protected WSchr : '\t' // horizontal tab (HT) '\t' 0x09 9 | ' ' // space (SP) ' ' 0x20 32 ; /* Same as WSign - just ignore consumed character */ protected WSign : WSchr { $setText("") } ; /* the end of line */ protected EOLchr : ( options { generateAmbigWarnings = false; } : '\r''\n' | '\r' | '\n' // | '\v' // vertical tab (VT) 0x0b 11 // | '\f' // form feed (FF) '\f' 0x0c 12 ) { $newline } ; /* like EOL but we ignore the consumed symbol */ protected EOLign : EOLchr { $setText("") } ; /* like EOL but we normalize consumed symbol */ protected EOLnrm : EOLchr { $setText("\n") } ; /* upper (ASCII) case characters */ protected UPCHR : 'A' .. 'Z' ; /* lower (ASCII) case characters */ protected LOCHR : 'a' .. 'z' ; /* what's a (arabic) digit */ protected DIGIT : '0' .. '9' ; /* whats a (roman) letter - yes, the name sucks a bit */ protected CHR : UPCHR | LOCHR ; /* what's allowed in an identifier */ protected IDCHR : CHR | '-' | DIGIT ; /* a binary digit */ protected BINCHR : ('0'|'1') ; /* a hex digit */ protected HEXCHR : ('0'..'9') | ('A'..'F') | ('a'..'f') ; /* a binary string */ protected BINSTR : "'" (BINCHR|WSign|EOLign)+ "'B" ; /* a hex string */ protected HEXSTR : "'" (HEXCHR|WSign|EOLign)+ "'H" ; /* escape character in character strings */ protected CHResc : '"' '"' { $setText("\"") } ; /* define which input symbols we can skip (so called whitespace) */ WS : ( WSchr | EOLchr )+ { $skip } ; /* A number is a sequence of digits - note that deliberatly we allow ** here for tokens like '001' etc. */ TOKEN_NUMBER : (DIGIT)+ ; /* what's an idenifier */ ID { lowchrseen=False} : ("BIT" WS "STRING") => "BIT" WS "STRING" { $setType(TOKEN_BIT_STRING) } | ("OCTET" WS "STRING") => "OCTET" WS "STRING" { $setType(TOKEN_OCTET_STRING) } | ("OBJECT" WS "IDENTIFIER") => "OBJECT" WS "IDENTIFIER" { $setType(TOKEN_OBJECT_IDENTIFIER) } | ("ENCODED" WS "BY") => "ENCODED" WS "BY" { $setType(TOKEN_ENCODED_BY) } | ("CONSTRAINED" WS "BY") => "CONSTRAINED" WS "BY" { $setType(TOKEN_CONSTRAINED_BY) } | ("DEFINED" WS "BY") => "DEFINED" WS "BY" { $setType(TOKEN_DEFINED_BY) } | UPCHR ( LOCHR{lowchrseen=True}|UPCHR|DIGIT|'-')* { $setType(TOKEN_Word) if lowchrseen: pass else: $setType(TOKEN_WORD) } | LOCHR ( IDCHR )* { $setType(TOKEN_word) } ; /* what's a field */ FIELD { lowchrseen=False } : '&' UPCHR ( LOCHR{lowchrseen=True}|UPCHR|DIGIT|'-')* { $setType(TOKEN_Field) if lowchrseen: pass else: $setType(TOKEN_FIELD) } | '&' LOCHR ( IDCHR )* { $setType(TOKEN_field) } ; /* an octet string is either a bit string or a hex string */ OCTSTR : (BINSTR)=>BINSTR { $setType(TOKEN_BSTRING) } | HEXSTR { $setType(TOKEN_HSTRING) } ; /* A character string: this rule is not 1oo% correct as it will not ** ignore ws before and after eol. This needs best to be handled ** via a language specific function. Note that rule EOLnrm will ** replace any eol character by \n to simplify text processing. ** Contrary, ws is not normalized as ws can't be ignored in general. */ TOKEN_CSTRING : '"' (CHResc | EOLnrm | ~('"'|'\r'|'\n'))* '"' { s = self.chr_ws_erase($getText,"\n","\t ") $setText(s) } ; /* ASN.1 has kind of tricky comment rule: A comment starts with "--" ** and ends either with a "--" or with a eol character. Nesting of ** comments is therefore not possible, ie. ** -- not visible -- visible -- not visible ** The real ugly thing about this is that you can't just uncomment ** a line (regardless of it's content) by prefixing the liene with ** "--". For example assume you have this line: ** one INTEGER ::= 1 -- sample integer ** Then have this: ** -- one INTEGER ::= 1 -- sample integer ** This will hide ASN.1 and just makes the comment visible! */ COMMENT : "--" ( ~('-'|'\n'|'\r') | {self.LA(2) != '-'}? '-' )* { if self.LA(1) == '-': self.match("--"); $skip } ; ALTCOMMENT : { altcomment == true }? ( ALTCOMMENT1 | ALTCOMMENT2 | ALTCOMMENT3 ) { $skip } ; /* Due to problematic ASN.1 commentaries we have an alternative - ** "//" starts a comment that eat's up everything till end of line ** (as in C++ and Java). */ protected ALTCOMMENT1 : { altcomment == true }? "//" (~('\n'|'\r'))* { pass } ; /* We also also for typical C comments albeit not nested ones */ protected ALTCOMMENT2 : "/*" ( options { greedy=false; } : '\r' ( options { warnWhenFollowAmbig=false; } : '\n')? { $newline } | '\n' { $newline } | . )* "*/" { pass } ; /* And as homage to the master of style, Niklaus Wirth, we also also ** comments ala PASCAL */ protected ALTCOMMENT3 : "{*" ( options { greedy=false; } : '\r' ( options { warnWhenFollowAmbig=false; } : '\n')? { $nl } | '\n' { $nl; } | . )* "*}" { pass } ; antlr-2.7.7/examples/python/pascal/0000755000175000017500000000000010522211616017203 5ustar twernertwernerantlr-2.7.7/examples/python/pascal/Makefile.in0000644000175000017500000000367110522211616021257 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/pascal ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/pascal.g \ $(_srcdir)/pascalw.g \ $(eol) g_py_FILES = \ pascal_l.py \ pascal_p.py \ pascal_w.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ pascal.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) pascal_l.py < $(_srcdir)/pascal.in ;\ $(python) pascal.py < $(_srcdir)/pascal.in ;\ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup pascal clean :: @@ECHO@ cleaning pascal ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/pascal/pascalw.g0000644000175000017500000001611410522211616021010 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ // // Pascal Tree Super Grammar (symtab.g derives from this) // // Adapted from, // Pascal User Manual And Report (Second Edition-1978) // Kathleen Jensen - Niklaus Wirth // // By // // Hakki Dogusan dogusanh@tr-net.net.tr // // Then significantly enhanced by Piet Schoutteten // with some guidance by Terence Parr. Piet added tree // construction, and some tree walkers. // options { language=Python; } class pascal_w extends TreeParser; options { importVocab = Pascal; ASTLabelType = "pascal.PascalAST"; } program : programHeading block ; programHeading : #(PROGRAM IDENT identifierList) | #(UNIT IDENT) ; identifier : IDENT ; block : ( labelDeclarationPart | constantDefinitionPart | typeDefinitionPart | variableDeclarationPart | procedureAndFunctionDeclarationPart | usesUnitsPart | IMPLEMENTATION )* compoundStatement ; usesUnitsPart : #(USES identifierList) ; labelDeclarationPart : #(LABEL ( label )+) ; label : NUM_INT ; constantDefinitionPart : #(CONST ( constantDefinition )+ ) ; constantDefinition : #(EQUAL IDENT constant) ; constant : NUM_INT | NUM_REAL | #( PLUS ( NUM_INT | NUM_REAL | IDENT ) ) | #( MINUS ( NUM_INT | NUM_REAL | IDENT ) ) | IDENT | STRING_LITERAL | #(CHR (NUM_INT|NUM_REAL)) ; string : STRING_LITERAL ; typeDefinitionPart : #(TYPE ( typeDefinition )+) ; typeDefinition : #(TYPEDECL IDENT ( type | #(FUNCTION (formalParameterList)? resultType) | #(PROCEDURE (formalParameterList)?) ) ) ; type : #(SCALARTYPE identifierList) | #(DOTDOT constant constant) | typeIdentifier | structuredType | #(POINTER typeIdentifier) ; typeIdentifier : IDENT | CHAR | BOOLEAN | INTEGER | REAL | #( STRING ( IDENT | NUM_INT | NUM_REAL | ) ) ; structuredType : #(PACKED unpackedStructuredType) | unpackedStructuredType ; unpackedStructuredType : arrayType | recordType | setType | fileType ; /** Note here that the syntactic diff between brackets disappears. * If the brackets mean different things semantically, we need * two different alternatives here. */ arrayType : #(ARRAY typeList type) ; typeList : #( TYPELIST ( type )+ ) ; recordType : #(RECORD fieldList) ; fieldList : #( FIELDLIST ( fixedPart ( variantPart )? | variantPart ) ) ; fixedPart : ( recordSection )+ ; recordSection : #(FIELD identifierList type) ; variantPart : #( CASE tag ( variant )+ ) ; tag : #(VARIANT_TAG identifier typeIdentifier) | #(VARIANT_TAG_NO_ID typeIdentifier) ; variant : #(VARIANT_CASE constList fieldList) ; setType : #(SET type) ; fileType : #(FILE (type)?) ; /** Yields a list of VARDECL-rooted subtrees with VAR at the overall root */ variableDeclarationPart : #( VAR ( variableDeclaration )+ ) ; variableDeclaration : #(VARDECL identifierList type) ; procedureAndFunctionDeclarationPart : procedureOrFunctionDeclaration ; procedureOrFunctionDeclaration : procedureDeclaration | functionDeclaration ; procedureDeclaration : #(PROCEDURE IDENT (formalParameterList)? block ) ; formalParameterList : #(ARGDECLS ( formalParameterSection )+) ; formalParameterSection : parameterGroup | #(VAR parameterGroup) | #(FUNCTION parameterGroup) | #(PROCEDURE parameterGroup) ; parameterGroup : #(ARGDECL identifierList typeIdentifier) ; identifierList : #(IDLIST (IDENT)+) ; constList : #(CONSTLIST ( constant )+) ; functionDeclaration : #(FUNCTION IDENT (formalParameterList)? resultType block) ; resultType : typeIdentifier ; statement : #(COLON label unlabelledStatement) | unlabelledStatement ; unlabelledStatement : simpleStatement | structuredStatement ; simpleStatement : assignmentStatement | procedureStatement | gotoStatement ; assignmentStatement : #(ASSIGN variable expression) ; /** A variable is an id with a suffix and can look like: * id * id[expr,...] * id.id * id.id[expr,...] * id^ * id^.id * id^.id[expr,...] * ... * * LL has a really hard time with this construct as it's naturally * left-recursive. We have to turn into a simple loop rather than * recursive loop, hence, the suffixes. I keep in the same rule * for easy tree construction. */ variable : #(LBRACK variable (expression)+) | #(LBRACK2 variable (expression)+) | #(DOT variable IDENT) | #(POINTER variable) | #(AT IDENT) | IDENT ; expression : #(EQUAL expression expression) | #(NOT_EQUAL expression expression) | #(LT expression expression) | #(LE expression expression) | #(GE expression expression) | #(GT expression expression) | #(IN expression expression) | #(PLUS expression (expression)?) | #(MINUS expression (expression)?) | #(OR expression expression) | #(STAR expression expression) | #(SLASH expression expression) | #(DIV expression expression) | #(MOD expression expression) | #(AND expression expression) | #(NOT expression) | variable | functionDesignator | set | NUM_INT | NUM_REAL | #(CHR (NUM_INT|NUM_REAL)) | string | NIL ; functionDesignator : #(FUNC_CALL IDENT (parameterList)?) ; parameterList : #( ARGLIST (actualParameter)+ ) ; set : #(SET (element)*) ; element : #(DOTDOT expression expression) | expression ; procedureStatement : #(PROC_CALL IDENT ( parameterList )?) ; actualParameter : expression ; gotoStatement : #(GOTO label) ; structuredStatement : compoundStatement | conditionalStatement | repetetiveStatement | withStatement ; compoundStatement : statements ; statements : #(BLOCK (statement)*) ; conditionalStatement : ifStatement | caseStatement ; ifStatement : #(IF expression statement (statement)?) ; caseStatement //pspsps ??? : #(CASE expression ( caseListElement )+ ( statements )? ) ; caseListElement : #(COLON constList statement) ; repetetiveStatement : whileStatement | repeatStatement | forStatement ; whileStatement : #(WHILE expression statement) ; repeatStatement : #(REPEAT statements expression) ; forStatement : #(FOR IDENT forList statement) ; forList : #(TO initialValue finalValue) | #(DOWNTO initialValue finalValue) ; initialValue : expression ; finalValue : expression ; withStatement : #(WITH recordVariableList statement) ; recordVariableList : ( variable )+ ; antlr-2.7.7/examples/python/pascal/pascal.py0000644000175000017500000000362610522211616021027 0ustar twernertwernerimport sys import antlr class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k); self.visit1(s); self.printf(")") def visit(self,node): self.visit1(node); self.printf("\n") class PascalAST(antlr.CommonAST): def __init__(self,*args): antlr.CommonAST.__init__(self,*args) def main(): import pascal_l import pascal_p import pascal_w L = pascal_l.Lexer() P = pascal_p.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression try: P.program() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(1) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" W = pascal_w.Walker() W.program(ast) print "Ast tree walked without problems." if __name__ == "__main__": main() antlr-2.7.7/examples/python/pascal/pascal.g0000644000175000017500000003672610522211616020634 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ header { import pascal } // options { language=Python; } // I just want to import something in generated parser? How would I do // that? { import pascal } class pascal_p extends Parser; options { k = 2; // two token lookahead exportVocab=Pascal; // Call its vocabulary "Pascal" codeGenMakeSwitchThreshold = 2; // Some optimizations codeGenBitsetTestThreshold = 3; defaultErrorHandler = false; // Don't generate parser error handlers buildAST = true; ASTLabelType = "pascal.PascalAST"; } /* Define imaginary tokens used to organize tree * * One of the principles here is that any time you have a list of * stuff, you usually want to treat it like one thing (a list) a some * point in the grammar. You want trees to have a fixed number of children * as much as possible. For example, the definition of a procedure should * be something like #(PROCEDURE ID #(ARGDECLS ARG1 ARG2...)) not * #(PROCEDURE ID ARG1 ARG2 ... ) since this is harder to parse and * harder to manipulate. Same is true for statement lists (BLOCK) etc... */ tokens { BLOCK; // list of statements IDLIST; // list of identifiers; e.g., #(PROGRAM #(IDLIST ID ID...)) ELIST; // expression list for proc args etc... FUNC_CALL; PROC_CALL; SCALARTYPE; // IDLIST that is really a scalar type like (Mon,Tue,Wed) TYPELIST; // list of types such as for array declarations VARIANT_TAG;// for CASEs in a RECORD VARIANT_TAG_NO_ID;// for CASEs in a RECORD (no id, just a type) VARIANT_CASE;// a case of the variant CONSTLIST; // List of constants FIELDLIST; // list of fields in a record ARGDECLS; // overall group of declarations of args for proc/func. VARDECL; // declaration of a variable ARGDECL; // declaration of a parameter ARGLIST; // list of actual arguments (expressions) TYPEDECL; // declaration of a type FIELD; // the root a RECORD field } // Define some methods and variables to use in the generated parser. { pass } program : programHeading (INTERFACE!)? block DOT! ; programHeading : PROGRAM^ identifier LPAREN! identifierList RPAREN! SEMI! | UNIT^ identifier SEMI! ; identifier : IDENT ; block : ( labelDeclarationPart | constantDefinitionPart | typeDefinitionPart | variableDeclarationPart | procedureAndFunctionDeclarationPart | usesUnitsPart | IMPLEMENTATION )* compoundStatement ; usesUnitsPart : USES^ identifierList SEMI! ; labelDeclarationPart : LABEL^ label ( COMMA! label )* SEMI! ; label : unsignedInteger ; constantDefinitionPart : CONST^ constantDefinition ( SEMI! constantDefinition )* SEMI! ; constantDefinition : identifier EQUAL^ constant ; constantChr : CHR^ LPAREN! unsignedInteger RPAREN! ; constant : unsignedNumber |! s:sign n:unsignedNumber { #constant=#(s,n); } | identifier |! s2:sign id:identifier { #constant=#(s2,id); } | string | constantChr ; unsignedNumber : unsignedInteger | unsignedReal ; unsignedInteger : NUM_INT ; unsignedReal : NUM_REAL ; sign : PLUS | MINUS ; string : STRING_LITERAL ; typeDefinitionPart : TYPE^ typeDefinition ( SEMI! typeDefinition )* SEMI! ; //PSPSPS typeDefinition : identifier e:EQUAL^ {#e.setType(TYPEDECL);} ( type | functionType // | FUNCTION^ (formalParameterList)? COLON! resultType | procedureType // | PROCEDURE^ (formalParameterList)? ) ; functionType : FUNCTION^ (formalParameterList)? COLON! resultType ; procedureType : PROCEDURE^ (formalParameterList)? ; type : simpleType | structuredType | pointerType ; simpleType : scalarType | subrangeType | typeIdentifier | stringtype ; scalarType : LPAREN^ identifierList RPAREN! {#scalarType.setType(SCALARTYPE);} ; subrangeType : constant DOTDOT^ constant ; typeIdentifier : identifier | CHAR | BOOLEAN | INTEGER | REAL | STRING // as in return type: FUNCTION ... (...): string; ; structuredType : PACKED^ unpackedStructuredType | unpackedStructuredType ; unpackedStructuredType : arrayType | recordType | setType | fileType ; stringtype : STRING^ LBRACK! (identifier|unsignedNumber) RBRACK! ; arrayType : ARRAY^ LBRACK! typeList RBRACK! OF! componentType | ARRAY^ LBRACK2! typeList RBRACK2! OF! componentType ; typeList : indexType ( COMMA! indexType )* {#typeList = #(#[TYPELIST],#typeList);} ; indexType : simpleType ; componentType : type ; recordType : RECORD^ fieldList END! ; fieldList : ( fixedPart ( SEMI! variantPart | SEMI! )? | variantPart ) {#fieldList=#([FIELDLIST],#fieldList);} ; fixedPart : recordSection ( SEMI! recordSection )* ; recordSection : identifierList COLON! type {#recordSection = #([FIELD],#recordSection);} ; variantPart : CASE^ tag OF! variant ( SEMI! variant | SEMI! )* ; tag! : id:identifier COLON t:typeIdentifier {#tag=#([VARIANT_TAG],id,t);} | t2:typeIdentifier {#tag=#([VARIANT_TAG_NO_ID],t2);} ; variant : constList c:COLON^ {#c.setType(VARIANT_CASE);} LPAREN! fieldList RPAREN! ; setType : SET^ OF! baseType ; baseType : simpleType ; fileType : FILE^ OF! type | FILE ; pointerType : POINTER^ typeIdentifier ; // Yields a list of VARDECL-rooted subtrees with VAR at the overall root */ variableDeclarationPart : VAR^ variableDeclaration ( SEMI! variableDeclaration )* SEMI! ; variableDeclaration : identifierList c:COLON^ {#c.setType(VARDECL);} type ; procedureAndFunctionDeclarationPart : procedureOrFunctionDeclaration SEMI! ; procedureOrFunctionDeclaration : procedureDeclaration | functionDeclaration ; procedureDeclaration : PROCEDURE^ identifier (formalParameterList)? SEMI! block ; formalParameterList : LPAREN^ formalParameterSection ( SEMI! formalParameterSection )* RPAREN! {#formalParameterList.setType(ARGDECLS);} ; formalParameterSection : parameterGroup | VAR^ parameterGroup | FUNCTION^ parameterGroup | PROCEDURE^ parameterGroup ; parameterGroup! : ids:identifierList COLON! t:typeIdentifier {#parameterGroup = #([ARGDECL],ids,t);} ; identifierList : identifier ( COMMA! identifier )* {#identifierList = #(#[IDLIST],#identifierList);} ; constList : constant ( COMMA! constant )* {#constList = #([CONSTLIST],#constList);} ; functionDeclaration : FUNCTION^ identifier (formalParameterList)? COLON! resultType SEMI! block ; resultType : typeIdentifier ; statement : label COLON^ unlabelledStatement | unlabelledStatement ; unlabelledStatement : simpleStatement | structuredStatement ; simpleStatement : assignmentStatement | procedureStatement | gotoStatement | emptyStatement ; assignmentStatement : variable ASSIGN^ expression ; /* A variable is an id with a suffix and can look like: * id * id[expr,...] * id.id * id.id[expr,...] * id^ * id^.id * id^.id[expr,...] * ... * * LL has a really hard time with this construct as it's naturally * left-recursive. We have to turn into a simple loop rather than * recursive loop, hence, the suffixes. I keep in the same rule * for easy tree construction. */ variable : ( AT^ identifier // AT is root of identifier; then other op becomes root | identifier ) ( LBRACK^ expression ( COMMA! expression)* RBRACK! | LBRACK2^ expression ( COMMA! expression)* RBRACK2! | DOT^ identifier | POINTER^ )* ; expression : simpleExpression ( (EQUAL^ | NOT_EQUAL^ | LT^ | LE^ | GE^ | GT^ | IN^) simpleExpression )* ; simpleExpression : term ( (PLUS^ | MINUS^ | OR^) term )* ; term : signedFactor ( (STAR^ | SLASH^ | DIV^ | MOD^ | AND^) signedFactor )* ; signedFactor : (PLUS^|MINUS^)? factor ; factor : variable | LPAREN! expression RPAREN! | functionDesignator | unsignedConstant | set | NOT^ factor ; unsignedConstant : unsignedNumber | constantChr //pspsps added | string | NIL ; functionDesignator! : id:identifier LPAREN! args:parameterList RPAREN! {#functionDesignator = #([FUNC_CALL],id,args);} ; parameterList : actualParameter ( COMMA! actualParameter )* {#parameterList = #([ARGLIST],#parameterList);} ; set : LBRACK^ elementList RBRACK! {#set.setType(SET);} | LBRACK2^ elementList RBRACK2! {#set.setType(SET);} ; elementList : element ( COMMA! element )* | ; element : expression ( DOTDOT^ expression )? ; procedureStatement! : id:identifier ( LPAREN! args:parameterList RPAREN! )? {#procedureStatement = #([PROC_CALL],id,args);} ; actualParameter : expression ; gotoStatement : GOTO^ label ; emptyStatement : ; empty : /* empty */ ; structuredStatement : compoundStatement | conditionalStatement | repetetiveStatement | withStatement ; compoundStatement : BEGIN! statements END! ; statements : statement ( SEMI! statement )* {#statements = #([BLOCK],#statements);} ; conditionalStatement : ifStatement | caseStatement ; ifStatement : IF^ expression THEN! statement ( // CONFLICT: the old "dangling-else" problem... // ANTLR generates proper code matching // as soon as possible. Hush warning. options { generateAmbigWarnings=false; } : ELSE! statement )? ; caseStatement //pspsps ??? : CASE^ expression OF! caseListElement ( SEMI! caseListElement )* ( SEMI! ELSE! statements )? END! ; caseListElement : constList COLON^ statement ; repetetiveStatement : whileStatement | repeatStatement | forStatement ; whileStatement : WHILE^ expression DO! statement ; repeatStatement : REPEAT^ statements UNTIL! expression ; forStatement : FOR^ identifier ASSIGN! forList DO! statement ; forList : initialValue (TO^ | DOWNTO^) finalValue ; initialValue : expression ; finalValue : expression ; withStatement : WITH^ recordVariableList DO! statement ; recordVariableList : variable ( COMMA! variable )* ; class pascal_l extends Lexer; options { charVocabulary = '\0'..'\377'; exportVocab = Pascal; // call the vocabulary "Pascal" testLiterals = false; // don't automatically test for literals k = 4; // four characters of lookahead caseSensitive = false; caseSensitiveLiterals = false; } tokens { AND = "and" ; ARRAY = "array" ; BEGIN = "begin" ; BOOLEAN = "boolean" ; CASE = "case" ; CHAR = "char" ; CHR = "chr" ; CONST = "const" ; DIV = "div" ; DO = "do" ; DOWNTO = "downto" ; ELSE = "else" ; END = "end" ; FILE = "file" ; FOR = "for" ; FUNCTION = "function" ; GOTO = "goto" ; IF = "if" ; IN = "in" ; INTEGER = "integer" ; LABEL = "label" ; MOD = "mod" ; NIL = "nil" ; NOT = "not" ; OF = "of" ; OR = "or" ; PACKED = "packed" ; PROCEDURE = "procedure" ; PROGRAM = "program" ; REAL = "real" ; RECORD = "record" ; REPEAT = "repeat" ; SET = "set" ; THEN = "then" ; TO = "to" ; TYPE = "type" ; UNTIL = "until" ; VAR = "var" ; WHILE = "while" ; WITH = "with" ; METHOD ; ADDSUBOR ; ASSIGNEQUAL ; SIGN ; FUNC ; NODE_NOT_EMIT ; MYASTVAR ; LF ; UNIT = "unit" ; INTERFACE = "interface" ; USES = "uses" ; STRING = "string" ; IMPLEMENTATION = "implementation" ; //pspsps ??? // wh DOTDOT; } //---------------------------------------------------------------------------- // OPERATORS //---------------------------------------------------------------------------- PLUS : '+' ; MINUS : '-' ; STAR : '*' ; SLASH : '/' ; ASSIGN : ":=" ; COMMA : ',' ; SEMI : ';' ; COLON : ':' ; EQUAL : '=' ; NOT_EQUAL : "<>" ; LT : '<' ; LE : "<=" ; GE : ">=" ; GT : '>' ; LPAREN : '(' ; RPAREN : ')' ; LBRACK : '[' ; // line_tab[line] LBRACK2 : "(." ; // line_tab(.line.) RBRACK : ']' ; RBRACK2 : ".)" ; POINTER : '^' ; AT : '@' ; DOT : '.' ('.' { $setType(DOTDOT)} )? ; LCURLY : "{" ; RCURLY : "}" ; // Whitespace -- ignored WS : ( ' ' | '\t' | '\f' // handle newlines | ( "\r\n" // Evil DOS | '\r' // Macintosh | '\n' // Unix (the right way) ) { $newline; } ) { _ttype = SKIP; } ; COMMENT_1 : "(*" ( options { generateAmbigWarnings=false; } : { self.LA(2) != ')' }? '*' | '\r' '\n' {$newline;} | '\r' {$newline;} | '\n' {$newline;} | ~('*' | '\n' | '\r') )* "*)" {$setType(SKIP);} ; COMMENT_2 : '{' ( options {generateAmbigWarnings=false;} : '\r' '\n' {$newline;} | '\r' {$newline;} | '\n' {$newline;} | ~('}' | '\n' | '\r') )* '}' {$setType(SKIP);} ; // an identifier. Note that testLiterals is set to true! This means // that after we match the rule, we look in the literals table to see // if it's a literal or really an identifer IDENT options {testLiterals=true;} : ('a'..'z') ('a'..'z'|'0'..'9'|'_')* //pspsps ; // string literals STRING_LITERAL : '\'' ("\'\'" | ~('\''))* '\'' //pspsps * in stead of + because of e.g. '' ; NUM_INT : ('0'..'9')+ // everything starts with a digit sequence ( ( { (self.LA(2) != '.') and (self. LA(2)!=')')}? // force k=2; avoid ".." //PSPSPS example ARRAY (.1..99.) OF char; // after .. thinks it's a NUM_REAL '.' {$setType(NUM_REAL);} // dot means we are float ('0'..'9')+ (EXPONENT)? )? | EXPONENT {$setType(NUM_REAL);} // 'E' means we are float ) ; // a couple protected methods to assist in matching floating point numbers protected EXPONENT : ('e') ('+'|'-')? ('0'..'9')+ ; antlr-2.7.7/examples/python/pascal/pascal.in0000644000175000017500000000073710522211616021005 0ustar twernertwerner{ program 0.1 assuming annual inflation rates of 7, 8, and 10 per cent, find the factor by which the frank, dollar, pound sterlinh, mark, or guilder will have been devalued in 1, 2, ... n years.} program inflation(output); const n = 10; var i : integer; w1, w2, w3 : real; begin i := 0; w1 := 1.0; w2 := 1.0; w3 := 1.0; repeat i := i + 1; w1 := w1 * 1.07; w2 := w2 * 1.08; w3 := w3 * 1.10; writeln(i, w1, w2, w3); until i=n end. antlr-2.7.7/examples/python/parseBinary/0000755000175000017500000000000010522211616020217 5ustar twernertwernerantlr-2.7.7/examples/python/parseBinary/binary.g0000644000175000017500000000111610522211616021652 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language=Python; } class binary_p extends Parser; file: ( sh:SHORT { print sh.getText() } | st:STRING { print "\"" + st.getText() + "\"" } )+ ; class binary_l extends Lexer; options { charVocabulary = '\u0000'..'\u00FF'; } SHORT : '\0' high:. lo:. { v = (ord(high)<<8) + ord(lo) $setText(str(v)) } ; STRING : '\1'! // begin string (discard) ( ~'\2' )* '\2'! // end string (discard) ; antlr-2.7.7/examples/python/parseBinary/binary.py0000644000175000017500000000320210522211616022052 0ustar twernertwernerimport sys import antlr class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k); self.visit1(s); self.printf(")") def visit(self,node): self.visit1(node); self.printf("\n") def main(): import binary_l import binary_p L = binary_l.Lexer() P = binary_p.Parser(L) ### Parse the input expression try: P.file() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(0) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); if __name__ == "__main__": main() antlr-2.7.7/examples/python/parseBinary/Makefile.in0000644000175000017500000000354210522211616022270 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/parseBinary ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/binary.g \ $(eol) g_py_FILES = \ binary_p.py binary_l.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ binary.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) binary_l.py < $(_srcdir)/binary.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup binary clean :: @@ECHO@ cleaning binary ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/parseBinary/binary.in0000644000175000017500000000001310522211616022025 0ustar twernertwernera testantlr-2.7.7/examples/python/treewalk/0000755000175000017500000000000010522211616017556 5ustar twernertwernerantlr-2.7.7/examples/python/treewalk/Makefile.in0000644000175000017500000000367110522211616021632 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/treewalk ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/treewalk.g \ $(eol) g_py_FILES = \ treewalk_l.py \ treewalk_p.py \ treewalk_w.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ treewalk.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) treewalk_l.py < $(_srcdir)/treewalk.in ;\ $(python) treewalk.py < $(_srcdir)/treewalk.in ;\ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup treewalk clean :: @@ECHO@ cleaning treewalk ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/treewalk/treewalk.py0000644000175000017500000000356610522211616021760 0ustar twernertwernerimport sys import antlr class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k); self.visit1(s); self.printf(")") def visit(self,node): self.visit1(node); self.printf("\n") def main(): import treewalk_l import treewalk_p import treewalk_w L = treewalk_l.Lexer() P = treewalk_p.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression try: P.block() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(1) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" W = treewalk_w.Walker(); ### Traverse the tree created by the parser W.block(ast); ast = W.getAST(); if ast: print "List: " + ast.toStringList() if __name__ == "__main__": main() antlr-2.7.7/examples/python/treewalk/treewalk.in0000644000175000017500000000011010522211616021714 0ustar twernertwerner{ i = 3+4*5; if ( 6 ) j = 7; else j = 8; while ( 9 ) { k = 1; } } antlr-2.7.7/examples/python/treewalk/treewalk.g0000644000175000017500000000413510522211616021547 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language=Python; } class treewalk_p extends Parser; options { codeGenMakeSwitchThreshold = 3; codeGenBitsetTestThreshold = 4; buildAST=true; } block : LCURLY^ ( statement )* RCURLY! ; statement : expr SEMI! | "if"^ LPAREN! expr RPAREN! statement ( "else"! statement )? | "while"^ LPAREN! expr RPAREN! statement |! b:block { statement_AST = b_AST; } // do some manual tree returning ; expr: assignExpr ; assignExpr : aexpr (ASSIGN^ assignExpr)? ; aexpr : mexpr (PLUS^ mexpr)* ; mexpr : atom (STAR^ atom)* ; atom: ID | INT ; class treewalk_w extends TreeParser; block : #( LCURLY ( stat )+ ) ; stat: #("if" expr stat (stat)?) | #("while" expr stat) | expr | block ; expr: #(ASSIGN expr expr) {print "found assign" } | #(PLUS expr expr) {print "found +" } | #(STAR expr expr) {print "found *" } | a:ID {print "found ID " ,a.getText() } | b:INT {print "found INT ",b.getText() } ; class treewalk_l extends Lexer; WS : (' ' | '\t' | '\n' | '\r') { _ttype = SKIP; } ; LPAREN: '(' ; RPAREN: ')' ; LCURLY: '{' ; RCURLY: '}' ; STAR: '*' ; PLUS: '+' ; ASSIGN : '=' ; SEMI: ';' ; COMMA : ',' ; protected ESC : '\\' ( 'n' | 'r' | 't' | 'b' | 'f' | '"' | '\'' | '\\' | ('0'..'3') ( DIGIT (DIGIT)? )? | ('4'..'7') (DIGIT)? ) ; protected DIGIT : '0'..'9' ; INT : (DIGIT)+ ; ID : ('a'..'z'|'A'..'Z'|'_') ('a'..'z'|'A'..'Z'|'_'|'0'..'9')* { try: i = literals[self.getText()] _ttype = i; except: pass } ; antlr-2.7.7/examples/python/cpp/0000755000175000017500000000000010522211616016522 5ustar twernertwernerantlr-2.7.7/examples/python/cpp/cpp.g0000644000175000017500000002130310522211616017453 0ustar twernertwerner/* * A C PreProcessor * * Handles #define/#undef, #ifdef/#elsif/#else/#endif, and #include using only * an ANTLR lexer (actually a stack of them). This could be easily integrated * with an existing lexer to do preprocessing and tokenizing all at once. * * Author: Eric Mahurin - eric_mahurin at yahoo dot com * License: just give me credit * * BUG: missing some of the simpler directives * BUG: doesn't follow the cpp spec perfectly - haven't made any effort at * this not well tested * * Be aware that this is my first real attempt at both ANTLR and Java, so * things may not be done the best way. I welcome suggestions and fixes. * * 041124 - cpp.g translated and adapted as Python example by MK. */ header { import sys import StringIO } header "__main__" { import traceback class cpp: def __init__(self, *args): try: // will need a stack of lexers for #include and macro calls self.mainLexer = Lexer(sys.stdin) Lexer.selector.select(self.mainLexer) for token in Lexer.selector: sys.stdout.write(token.getText()) except Exception, e: sys.stderr.write("exception: " + str(e) + '\n') traceback.print_exc() Lexer.selector = antlr.TokenStreamSelector() cpp(sys.argv[1:]) } options { language="Python"; } class cppLexer extends Lexer; options { testLiterals = false; k = 4; } tokens { ENDIF ; } { selector = antlr.TokenStreamSelector() // must be assigned externally ifState = 1 // -1: no-else false, 0: false, 1: true ifStates = [] // holds nested if conditions defines = {} // holds the defines defineArgs = {} // holds the args for a macro call def uponEOF(self): if Lexer.selector.getCurrentStream() != Lexer: try: Lexer.selector.pop() // return to old lexer/stream Lexer.selector.retry() //except antlr.TokenStreamRetryException, tsre: // raise tsre except IndexError: // return a real EOF if nothing in stack pass } DIRECTIVE { args = [] condition = True } : '#' ( "include" (WS)? includeFile:STRING { if Lexer.ifState == 1: name = includeFile.getText() name = name[1:-1] try: sublexer = Lexer(file(name)) // want defines to be persistent sublexer.defines = Lexer.defines sublexer.setFilename(name) Lexer.selector.push(sublexer) Lexer.selector.retry() except IOError, e: sys.stderr.write("cannot find file " + name + '\n') } | "define" WS defineMacro:RAW_IDENTIFIER { // first element will hold the macro text } ( ( '(' // get arguments if you find them (no spaces before left paren) (WS)? defineArg0:RAW_IDENTIFIER (WS)? { args += defineArg0.getText() } ( COMMA (WS)? defineArg1:RAW_IDENTIFIER (WS)? { args += defineArg1.getText() } )* ')' | ' '|'\t'|'\f' ) ( options { greedy=true; } : ' '|'\t'|'\f' )* // store the text verbatim - tokenize when called defineText:MACRO_TEXT { args[0] = defineText.getText() } )? ('\n'|"\r\n"|'\r') { $newline } { if Lexer.ifState == 1: Lexer.defines[defineMacro.getText()] = args $skip } | "undef" WS undefMacro:RAW_IDENTIFIER { if Lexer.ifState == 1: del Lexer.defines[undefMacro.getText()] $skip } | ( "ifdef" | "ifndef" { condition=False } ) WS ifMacro:RAW_IDENTIFIER { Lexer.ifStates.append(ifState) if Lexer.ifState == 1: if Lexer.defines.has_key(ifMacro.getText()) == condition: Lexer.ifState = 1 else: Lexer.ifState = 0 else: Lexer.ifState = -1 if Lexer.ifState == 1: $skip else: // gobble up tokens until ENDIF (could be caused by else) while True: try: if Lexer.selector.nextToken().getType() == ENDIF: break except antlr.TokenStreamRetryException, r: // just continue if someone tried retry pass // retry in case we switched lexers Lexer.selector.retry() } | ( "else" // treat like elsif (true) | "elsif" WS elsifMacro:RAW_IDENTIFIER { condition = Lexer.defines.has_key(elsifMacro.getText()) } ) { if Lexer.ifState == 1: // previous if/elsif was taken - discard rest Lexer.ifState = -1; while True: try: if Lexer.selector.nextToken().getType() == ENDIF: break except antlr.TokenStreamRetryException, r: // just continue if someone tried retry pass // retry in case we switched lexers Lexer.selector.retry() elif Lexer.ifState == 0 and condition: // "elsif" (true) or "else" $setType(ENDIF) Lexer.ifState = 1 } | "endif" { if Lexer.ifState == 1: condition = True else: condition = False try: // return to previous if state del Lexer.ifStates[-1] if condition: $skip else: // tell if/else/elsif to stop discarding tokens $setType(ENDIF) except IndexError, e: // endif with no if pass } ); IDENTIFIER options { testLiterals=true; } { define = [] args = [] } : identifier:RAW_IDENTIFIER { // see if this is a macro argument define = Lexer.defineArgs.has_key(identifier.getText()) if define: define = Lexer.defineArgs[identifier.getText()] elif _begin == 0 and not define: // see if this is a macro call define = Lexer.defines.has_key(identifier.getText()) if define: define = Lexer.defines[identifier.getText()] } ( { define and len(define) }? ( WS | COMMENT )? // take in arguments if macro call requires them '(' callArg0:EXPR { args += callArg0.getText() } ( COMMA callArg1:EXPR { args += callArg1.getText() } )* { len(args) == len(define)-1 }? // better have right amount ')' | { not (define and len(define)) }? ) { if define: defineText = define[0] if _begin: // just substitute text if called from EXPR - no token created $setText(defineText) else: // create a new lexer to handle the macro text sublexer = Lexer(StringIO.StringIO(defineText)) for i in range(len(args)): // treat macro arguments similar to local defines arg = [] arg.append(args[i]) sublexer.defineArgs[define[1+i]] = arg Lexer.selector.push(sublexer) // retry in new lexer Lexer.selector.retry() }; STRING : '"' ( '\\' . | ~('\\'|'"') )* '"' // double quoted string | '\'' ( '\\' . | ~('\\'|'\'') )* '\'' // single quoted string ; protected MACRO_TEXT : ( '\\'! NL { $newline } // escaped newline | ~('\n'|'\r') )* ; protected NL options { generateAmbigWarnings=false; // single '\r' is ambig with '\r' '\n' } : '\r' | '\n' | '\r' '\n' ; WS : ( ' ' | '\t' | '\f' | NL { $newline } ) { /* $skip */ } ; COMMENT : ( "//" (~('\n'|'\r'))* NL { $newline } // single line comment | "/*" ( options{greedy=false;} : NL { $newline } | ~('\n'|'\r') )* "*/" // multi-line comment ) { /* $skip */ } ; protected RAW_IDENTIFIER : ('a'..'z'|'A'..'Z'|'_') ('a'..'z'|'A'..'Z'|'_'|'0'..'9')* ; NUMBER : ('0'..'9') ('0'..'9'|'a'..'z'|'A'..'Z'|'_')* // allow alpha suffixes on numbers (i.e. L:long) ; // group symbols into categories to parse EXPR LEFT : '(' | '[' | '{' ; RIGHT : ')' | ']' | '}' ; COMMA : ',' ; OPERATOR : '!' | '#' | '$' | '%' | '&' | '*' | '+' | '-' | '.' | '/' | ':' | ';' | '<' | '=' | '>' | '?' | '@' | '\\' | '^' | '`' | '|' | '~' ; protected EXPR // allow just about anything without being ambiguous : (WS)? (NUMBER|IDENTIFIER)? ( ( LEFT EXPR ( COMMA EXPR )* RIGHT | STRING | OPERATOR // quotes, COMMA, LEFT, and RIGHT not in here ) EXPR )? ; antlr-2.7.7/examples/python/cpp/Makefile.in0000644000175000017500000000351710522211616020575 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger/Marq Kole, 2004. ## do not change this value subdir=examples/python/cpp ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/cpp.g \ $(eol) g_py_FILES = \ cppLexer.py \ $(eol) compile : $(g_py_FILES) test :: test1 %.h : $(_srcdir)/%.h @ @CP@ $< $@ test1_deps = \ incl.h \ subincl.h \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) cppLexer.py < $(_srcdir)/cpp.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup tinyc clean :: @@ECHO@ cleaning tinyc ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/cpp/cpp.in0000644000175000017500000000004510522211616017633 0ustar twernertwernerint a,b; #include "incl.h" int c; antlr-2.7.7/examples/python/cpp/subincl.h0000644000175000017500000000001210522211616020323 0ustar twernertwernerint z; antlr-2.7.7/examples/python/cpp/incl.h0000644000175000017500000000005010522211616017613 0ustar twernertwerner int x; #include "subincl.h" int y; antlr-2.7.7/examples/python/IDL/0000755000175000017500000000000010522211616016350 5ustar twernertwernerantlr-2.7.7/examples/python/IDL/idl.in0000644000175000017500000000155110522211616017452 0ustar twernertwernermodule A { module B { module C { interface Bar { }; }; }; }; module MainModule { typedef sequence< sequence > myNestedSeq ; typedef long myArray[3][4]; struct MyStruct { long num1, num2; string name; }; const long max = 127; exception MyEx { string msg; }; struct Node { string data; Node next; }; interface Bar { }; exception MyException { string message; }; interface class { }; typedef sequence myLongSeq; interface Foo { attribute string name; long bar(in short a, inout string s, out Foo f); }; enum Color { red, blue, green }; typedef Foo MyFoo; union MyUnion switch(long) { case 1: string name; case 2: long num; case 3: MyStruct str; case 4: MyFoo mf; default: boolean b; }; };antlr-2.7.7/examples/python/IDL/Makefile.in0000644000175000017500000000356210522211616020423 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/IDL ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/idl.g \ $(eol) g_py_FILES = \ idl_l.py \ idl_p.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ test :: test1 test1_deps = \ idl.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) idl_l.py < $(_srcdir)/idl.in ;\ $(python) idl.py < $(_srcdir)/idl.in ;\ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup idl clean :: @@ECHO@ cleaning idl ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/IDL/idl.py0000644000175000017500000000330410522211616017472 0ustar twernertwernerimport sys import antlr class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k); self.visit1(s); self.printf(")") def visit(self,node): self.visit1(node); self.printf("\n") def main(): import idl_l import idl_p L = idl_l.Lexer() P = idl_p.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression try: P.specification() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(0) ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" if __name__ == "__main__": import sys main() antlr-2.7.7/examples/python/IDL/idl.g0000644000175000017500000002331310522211616017272 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { language=Python; } class idl_p extends Parser; options { exportVocab=IDL; buildAST=true; } specification : (definition)+ ; definition : ( type_dcl SEMI! | const_dcl SEMI! | except_dcl SEMI! | interf SEMI! | module SEMI! ) ; module : "module" identifier LCURLY d:definition_list RCURLY ; definition_list : (definition)+ ; interf : "interface" identifier inheritance_spec (interface_body)? ; interface_body : LCURLY! (export)* RCURLY! ; export : ( type_dcl SEMI | const_dcl SEMI | except_dcl SEMI | attr_dcl SEMI | op_dcl SEMI ) ; inheritance_spec : COLON scoped_name_list | ; scoped_name_list : scoped_name (COMMA scoped_name)* ; scoped_name : opt_scope_op identifier (SCOPEOP identifier)* ; opt_scope_op : SCOPEOP | ; const_dcl : "const" const_type identifier ASSIGN const_exp ; const_type : integer_type | char_type | boolean_type | floating_pt_type | string_type | scoped_name ; /* EXPRESSIONS */ const_exp : or_expr ; or_expr : xor_expr ( or_op xor_expr )* ; or_op : OR ; xor_expr : and_expr ( xor_op and_expr )* ; xor_op : XOR ; and_expr : shift_expr ( and_op shift_expr )* ; and_op : AND ; shift_expr : add_expr ( shift_op add_expr )* ; shift_op : LSHIFT | RSHIFT ; add_expr : mult_expr ( add_op mult_expr )* ; add_op : PLUS | MINUS ; mult_expr : unary_expr ( mult_op unary_expr )* ; mult_op : STAR | DIV | MOD ; unary_expr : unary_operator primary_expr | primary_expr ; unary_operator : MINUS | PLUS | TILDE ; // Node of type TPrimaryExp serves to avoid inf. recursion on tree parse primary_expr : scoped_name | literal | LPAREN const_exp RPAREN ; literal : integer_literal | string_literal | character_literal | floating_pt_literal | boolean_literal ; boolean_literal : "TRUE" | "FALSE" ; positive_int_const : const_exp ; type_dcl : "typedef" type_declarator | struct_type | union_type | enum_type | | "native" simple_declarator ; type_declarator : type_spec declarators ; type_spec : simple_type_spec | constr_type_spec ; simple_type_spec : base_type_spec | template_type_spec | scoped_name ; base_type_spec : integer_type | char_type | boolean_type | floating_pt_type | "octet" | "any" ; integer_type : ("unsigned")? ("short" | "long") ; char_type : "char" ; floating_pt_type : "float" | "double" ; boolean_type : "boolean" ; template_type_spec : sequence_type | string_type ; constr_type_spec : struct_type | union_type | enum_type ; declarators : declarator (COMMA declarator)* ; declarator : identifier opt_fixed_array_size ; opt_fixed_array_size : (fixed_array_size)* ; simple_declarator : identifier ; struct_type : "struct" identifier LCURLY member_list RCURLY ; member_list : (member)+ ; member : type_spec declarators SEMI ; union_type : "union" identifier "switch" LPAREN switch_type_spec RPAREN LCURLY switch_body RCURLY ; switch_type_spec : integer_type | char_type | boolean_type | enum_type | scoped_name ; switch_body : case_stmt_list ; case_stmt_list : (case_stmt)+ ; case_stmt : case_label_list element_spec SEMI ; case_label_list : (case_label)+ ; case_label : "case" const_exp COLON | "default" COLON ; element_spec : type_spec declarator ; enum_type : "enum" identifier LCURLY enumerator_list RCURLY ; enumerator_list : enumerator (COMMA enumerator)* ; enumerator : identifier ; sequence_type : "sequence" LT simple_type_spec opt_pos_int GT ; opt_pos_int : (COMMA positive_int_const)? ; string_type : "string" opt_pos_int_br ; opt_pos_int_br : (LT positive_int_const GT)? ; fixed_array_size : LBRACK positive_int_const RBRACK ; attr_dcl : ("readonly")? "attribute" param_type_spec simple_declarator_list ; simple_declarator_list : simple_declarator (COMMA simple_declarator)* ; except_dcl : "exception" identifier LCURLY opt_member_list RCURLY ; opt_member_list : (member)* ; op_dcl : op_attribute op_type_spec identifier parameter_dcls opt_raises_expr c:opt_context_expr ; opt_raises_expr : (raises_expr)? ; opt_context_expr : (context_expr)? ; op_attribute : "oneway" | ; op_type_spec : param_type_spec | "void" ; parameter_dcls : LPAREN (param_dcl_list)? RPAREN! ; param_dcl_list : param_dcl (COMMA param_dcl)* ; param_dcl : param_attribute param_type_spec simple_declarator ; param_attribute : "in" | "out" | "inout" ; raises_expr : "raises" LPAREN scoped_name_list RPAREN ; context_expr : "context" LPAREN string_literal_list RPAREN ; string_literal_list : string_literal (COMMA! string_literal)* ; param_type_spec : base_type_spec | string_type | scoped_name ; integer_literal : INT | OCTAL | HEX ; string_literal : (STRING_LITERAL)+ ; character_literal : CHAR_LITERAL ; floating_pt_literal : f:FLOAT ; identifier : IDENT ; /* IDL LEXICAL RULES */ class idl_l extends Lexer; options { exportVocab=IDL; k=4; } SEMI options { paraphrase = ";"; } : ';' ; QUESTION options { paraphrase = "?"; } : '?' ; LPAREN options { paraphrase = "("; } : '(' ; RPAREN options { paraphrase = ")"; } : ')' ; LBRACK options { paraphrase = "["; } : '[' ; RBRACK options { paraphrase = "]"; } : ']' ; LCURLY options { paraphrase = "{"; } : '{' ; RCURLY options { paraphrase = "}"; } : '}' ; OR options { paraphrase = "|"; } : '|' ; XOR options { paraphrase = "^"; } : '^' ; AND options { paraphrase = "&"; } : '&' ; COLON options { paraphrase = ":"; } : ':' ; COMMA options { paraphrase = ","; } : ',' ; DOT options { paraphrase = "."; } : '.' ; ASSIGN options { paraphrase = "="; } : '=' ; NOT options { paraphrase = "!"; } : '!' ; LT options { paraphrase = "<"; } : '<' ; LSHIFT options { paraphrase = "<<"; } : "<<" ; GT options { paraphrase = ">"; } : '>' ; RSHIFT options { paraphrase = ">>"; } : ">>" ; DIV options { paraphrase = "/"; } : '/' ; PLUS options { paraphrase = "+"; } : '+' ; MINUS options { paraphrase = "-"; } : '-' ; TILDE options { paraphrase = "~"; } : '~' ; STAR options { paraphrase = "*"; } : '*' ; MOD options { paraphrase = "%"; } : '%' ; SCOPEOP options { paraphrase = "::"; } : "::" ; WS options { paraphrase = "white space"; } : (' ' | '\t' | '\n' { $newline; } | '\r') { $setType(SKIP); } ; PREPROC_DIRECTIVE options { paraphrase = "a preprocessor directive"; } : '#' (~'\n')* '\n' { $setType(SKIP); } ; SL_COMMENT options { paraphrase = "a comment"; } : "//" (~'\n')* '\n' { $setType(SKIP) $newline() } ; ML_COMMENT options { paraphrase = "a comment"; } : "/*" ( STRING_LITERAL | CHAR_LITERAL | '\n' { $newline; } | '*' ~'/' | ~'*' )* "*/" { $setType(SKIP); } ; CHAR_LITERAL options { paraphrase = "a character literal"; } : '\'' ( ESC | ~'\'' ) '\'' ; STRING_LITERAL options { paraphrase = "a string literal"; } : '"' (ESC|~'"')* '"' ; protected ESC options { paraphrase = "an escape sequence"; } : '\\' ( 'n' | 't' | 'v' | 'b' | 'r' | 'f' | 'a' | '\\' | '?' | '\'' | '"' | ('0' | '1' | '2' | '3') ( /* Since a digit can occur in a string literal, * which can follow an ESC reference, ANTLR * does not know if you want to match the digit * here (greedy) or in string literal. * The same applies for the next two decisions * with the warnWhenFollowAmbig option. */ options { warnWhenFollowAmbig = false; } : OCTDIGIT ( options { warnWhenFollowAmbig = false; } : OCTDIGIT )? )? | 'x' HEXDIGIT ( options { warnWhenFollowAmbig = false; } : HEXDIGIT )? ) ; protected VOCAB options { paraphrase = "an escaped character value"; } : '\3'..'\377' ; protected DIGIT options { paraphrase = "a digit"; } : '0'..'9' ; protected OCTDIGIT options { paraphrase = "an octal digit"; } : '0'..'7' ; protected HEXDIGIT options { paraphrase = "a hexadecimal digit"; } : ('0'..'9' | 'a'..'f' | 'A'..'F') ; /* octal literals are detected by checkOctal */ HEX options { paraphrase = "a hexadecimal value value"; } : ("0x" | "0X") (HEXDIGIT)+ ; INT options { paraphrase = "an integer value"; } : (DIGIT)+ // base-10 ( '.' (DIGIT)* {$setType(FLOAT);} (('e' | 'E') ('+' | '-')? (DIGIT)+)? | ('e' | 'E') ('+' | '-')? (DIGIT)+ {$setType(FLOAT);} )? ; FLOAT options { paraphrase = "an floating point value"; } : '.' (DIGIT)+ (('e' | 'E') ('+' | '-')? (DIGIT)+)? ; IDENT options { testLiterals = true; paraphrase = "an identifer"; } : ('a'..'z'|'A'..'Z'|'_') ('a'..'z'|'A'..'Z'|'_'|'0'..'9')* ; antlr-2.7.7/examples/python/inherit.tinyc/0000755000175000017500000000000010522211616020527 5ustar twernertwernerantlr-2.7.7/examples/python/inherit.tinyc/Makefile.in0000644000175000017500000000560510522211616022602 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/inherit.tinyc ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/inherit.g \ $(eol) g_py_FILES = \ inherit_p.py \ tinyc_p.py \ tinyc_l.py \ expandedinherit.g \ $(eol) compile : $(g_py_FILES) ## we need some local Python modules %.py : $(_srcdir)/%.py @-@RMF@ $@ &&@CP@ $< $@ %.py : @abs_this_builddir@/examples/python/tinyc/%.py @CP@ $< $@ ## We inherit from grammar files in tinyc. Rather than ## using them directly we copy them locally to avoid problems ## due to vocabularies that can't be found. %.g : @abs_top_srcdir@/examples/python/tinyc/%.g @-@RMF@ $@ &&@CP@ $< $@ ## This vocabluary file is imported by the expanded grammar. ## Unfortunaly, there is no way to tell ANTLR where to search ## for it. ANTLR assumes it's always there where the grammar ## file resides. Therefore we are going to "make" the base ## grammar first and then copy required vocabulary over .. TinyCTokenTypes.txt : @MAKE@ -C ../tinyc @CP@ ../tinyc/$@ $@ @abs_this_builddir@/examples/python/tinyc/%.py : @MAKE@ -C ../tinyc test :: test1 test1_deps = \ inherit.py \ tinyc_p.py \ tinyc_l.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) inherit.py < $(_srcdir)/inherit.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) tinyc_p.g tinyc_l.g TinyCTokenTypes.txt @ @RMF@ $(g_py_FILES) @ @RMF@ $(g_src_FILES) @ANTLR_COMPILE_CMD@ tinyc_p.g tinyc_l.g @ANTLRFLAGS="-glib `@CYGPATH_W@ tinyc_p.g`" \ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup tinyc clean :: @@ECHO@ cleaning inherit ... @ -@RMF@ $(g_py_FILES) tinyc_l.g tinyc_p.g TinyCTokenTypes.txt @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/inherit.tinyc/tinyc_l.g0000644000175000017500000000276510522211616022352 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ /* * Make sure to run antlr.Tool on the lexer.g file first! */ options { mangleLiteralPrefix = "TK_"; language=Python; } class tinyc_l extends Lexer; options { k=2; exportVocab=TinyC; charVocabulary = '\3'..'\377'; } tokens { "int"; "char"; "if"; "else"; "while"; } WS : (' ' | '\t' | '\n' { $newline;} | '\r') { _ttype = SKIP; } ; SL_COMMENT : "//" (~'\n')* '\n' { _ttype = Token.SKIP; $newline; } ; ML_COMMENT : "/*" ( { self.LA(2) != '/' }? '*' | '\n' { $newline; } | ~('*'|'\n') )* "*/" { $setType(SKIP); } ; LPAREN options { paraphrase="'('"; } : '(' ; RPAREN options { paraphrase="')'"; } : ')' ; LCURLY: '{' ; RCURLY: '}' ; STAR: '*' ; PLUS: '+' ; ASSIGN : '=' ; SEMI: ';' ; COMMA : ',' ; CHAR_LITERAL : '\'' (ESC|~'\'') '\'' ; STRING_LITERAL : '"' (ESC|~'"')* '"' ; protected ESC : '\\' ( 'n' | 'r' | 't' | 'b' | 'f' | '"' | '\'' | '\\' | '0'..'3' ( options { warnWhenFollowAmbig = false; } : DIGIT ( options { warnWhenFollowAmbig = false; } : DIGIT )? )? | '4'..'7' ( options { warnWhenFollowAmbig = false; } : DIGIT )? ) ; protected DIGIT : '0'..'9' ; INT : (DIGIT)+ ; ID options { testLiterals = true; paraphrase = "an identifier"; } : ('a'..'z'|'A'..'Z'|'_') ('a'..'z'|'A'..'Z'|'_'|'0'..'9')* ; antlr-2.7.7/examples/python/inherit.tinyc/inherit.in0000644000175000017500000000037410522211616022525 0ustar twernertwernerint i = 4; // look: tinyc cannot do this, but we can! int *i; int f(char c, char *d) { int f = 5; // look: tinyc cannot do this, but we can! c = '\033'+'\47'+'\''+'\\'; d = " \" '\\' foo"; i = c+3*f; if ( i ) { f = c; } else { f = 1; } } antlr-2.7.7/examples/python/inherit.tinyc/tinyc_p.g0000644000175000017500000000335110522211616022346 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ /* * Make sure to run antlr.Tool on the lexer.g file first! */ options { mangleLiteralPrefix = "TK_"; language=Python; } { def main(): import tinyc_l import tinyc_p L = tinyc_l.Lexer() P = tinyc_p.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression try: P.program() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" import sys sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." return ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" if __name__ == "__main__": main() } class tinyc_p extends Parser; options { importVocab=TinyC; } program : ( declaration )* EOF ; declaration : (variable) => variable | function ; declarator : id:ID | STAR id2:ID ; variable : type declarator SEMI ; function : type id:ID LPAREN (formalParameter (COMMA formalParameter)*)? RPAREN block ; formalParameter : type declarator ; type: ( TK_int | TK_char | id:ID ) ; block : LCURLY ( statement )* RCURLY ; statement : (declaration) => declaration | expr SEMI | TK_if LPAREN expr RPAREN statement ( TK_else statement )? | TK_while LPAREN expr RPAREN statement | block ; expr: assignExpr ; assignExpr : aexpr (ASSIGN assignExpr)? ; aexpr : mexpr (PLUS mexpr)* ; mexpr : atom (STAR atom)* ; atom: ID | INT | CHAR_LITERAL | STRING_LITERAL ; antlr-2.7.7/examples/python/inherit.tinyc/inherit.g0000644000175000017500000000052010522211616022336 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ options { mangleLiteralPrefix = "TK_"; language="Python"; } class inherit_p extends tinyc_p; // add initializers to variables variable { pass } : type declarator (ASSIGN aexpr)? SEMI ; antlr-2.7.7/examples/python/inherit.tinyc/inherit.py0000644000175000017500000000127510522211616022550 0ustar twernertwerner import sys import antlr import tinyc_l import inherit_p def main(): L = tinyc_l.Lexer() P = inherit_p.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression try: P.program() except antlr.ANTLRException, ex: print "*** error(s) while parsing." print ">>> exit(1)" sys.exit(1) ast = P.getAST() if not ast: print "stop - no AST generated." return ###show tree print "Tree: " + ast.toStringTree() print "List: " + ast.toStringList() print "Node: " + ast.toString() print "visit>>" visitor = Visitor() visitor.visit(ast); print "visit<<" if __name__ == "__main__": main() antlr-2.7.7/examples/python/xml/0000755000175000017500000000000010522211616016540 5ustar twernertwernerantlr-2.7.7/examples/python/xml/xml.in0000644000175000017500000000055410522211616017674 0ustar twernertwerner ]> Text 蕈゚ & < antlr-2.7.7/examples/python/xml/Makefile.in0000644000175000017500000000346710522211616020617 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/xml ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/xml.g \ $(eol) g_py_FILES = \ xml_l.py \ $(eol) compile : $(g_py_FILES) %.py : $(_srcdir)/%.py @@RMF@ $@ && @CP@ $< $@ test :: test1 test1_deps = \ xml.py \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) xml.py < $(_srcdir)/xml.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup xml clean :: @@ECHO@ cleaning xml ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/xml/xml.py0000644000175000017500000000200310522211616017705 0ustar twernertwernerimport sys import antlr import codecs import xml_l def warn(msg): print >>sys.stderr,"warning:",msg sys.stderr.flush() def error(msg): print >>sys.stderr,"got error:",msg sys.stderr.flush() ### Xml handling depends very much on whether ### your terminal can handle (print) xml chars. ### To be sure about it, just create a non ASCII ### letter and try to print it. If that is not going ### to work, we create an alternative method which ### maps non printable chars to '?'. c = u"\N{LATIN SMALL LETTER O WITH ACUTE}" try: print c except: warn("terminal can't display unicode chars.") sys.stderr.flush() ## I'm just going to redefine 'unicode' to return ## a ASCII string. def unicode(x): return x.__str__().encode("ascii","replace") ### Now for the input. This should ideally be done ### in the lexer .. ### replace stdin with a wrapper that spits out ### unicode chars. sys.stdin = codecs.lookup('latin1')[-2](sys.stdin) for token in xml_l.Lexer() : pass antlr-2.7.7/examples/python/xml/xml.g0000644000175000017500000001235610522211616017517 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ /* Rudimentary lexer grammar for a non-validating XML parser. Lexer is not intended to be used by parser, but is standalone. Use something like while ( lexer.nextToken().getType() != Token.EOF_TYPE ); to iterate through tokens. Replace print statements (only there to make something visible) with your own code and have fun. Limitations: - internal DTD is parsed but not processed - only supported encoding is iso-8859-1 aka extended ASCII aka ISO-latin-1 - special entity references (like & <) do not get resolved (to '&', '<') - uses SAX attribute implementation (could easily be dropped) [TJP: commented out so it compiles w/o SAX.] - probably many more The good thing about some of these limitations is, that the parsed XML can be written *literally* unmodified. Author: Olli Z. (oliver@zeigermann.de) Initial date: 07.02.1999 (02/07/99) Complete revision: 16.01.2003 (01/16/03) Developed and testes with ANTLR 2.7.2 */ options { language=Python; } { def println(*args): for arg in args: print arg.encode("iso-8859-1") } class xml_l extends Lexer; options { // needed to tell "" and "" // also on exit branch "]]>", "-->" k=3; charVocabulary = '\3'..'\377'; // extended ASCII (3-255 in octal notation) caseSensitive=true; } DOCTYPE! : "' ; protected INTERNAL_DTD : '['! // reports warning, but is absolutely ok (checked generated code) // besides this warning was not generated with k=1 which is // enough for this rule... ( options {greedy=false;} : NL | STRING // handle string specially to avoid to mistake ']' in string for end dtd | . )* ']'! ; PI! : // { AttributesImpl attributes = new AttributesImpl(); } "" ; ////////////////// COMMENT! : "" { println( "COMMENT: "+c.getText())} ; protected COMMENT_DATA : ( options {greedy=false;} : NL | . )* ; ////////////////// ENDTAG! : "' { println( "ENDTAG: "+g.getText())} ; ////////////////// STARTTAG! : // XXX should org.xml.sax.AttributesImpl be replaced by something else? // { AttributesImpl attributes = new AttributesImpl(); } '<' g:NAME ( WS )? ( ATTR /*[attributes]*/ ( WS )? )* ( "/>" { println( "EMTYTAG: "+g.getText())} | '>' { println( "STARTTAG: "+g.getText())} ) ; PCDATA! : p:PCDATA_DATA { println( "PCDATA: X" + unicode(p.getText()) ) } ; protected PCDATA_DATA : ( options {greedy=true;} : NL | ~( '<' | '\n' | '\r' ) )+ ; CDATABLOCK! : "" { println("CDATABLOCK: "+p.getText()) } ; protected CDATA_DATA : ( options {greedy=false;} : NL | . )* ; protected ATTR // [AttributesImpl attributes] : name:NAME ( WS )? '=' ( WS )? value:STRING_NO_QUOTE /* { attributes.addAttribute("", "", name.getText(), "CDATA", value.getText()); } */ { println( "ATTRIBUTE: "+name.getText()+"="+value.getText()) } ; protected STRING_NO_QUOTE : '"'! (~'"')* '"'! | '\''! (~'\'')* '\''! ; protected STRING : '"' (~'"')* '"' | '\'' (~'\'')* '\'' ; protected NAME : ( LETTER | '_' | ':') ( options {greedy=true;} : NAMECHAR )* ; protected NAMECHAR : LETTER | DIGIT | '.' | '-' | '_' | ':' ; protected DIGIT : '0'..'9' ; protected LETTER : 'a'..'z' | 'A'..'Z' ; protected WS : ( options { greedy = true; } : ' ' | ESC )+ ; protected ESC : ( '\t' | NL ) ; // taken from html.g // Alexander Hinds & Terence Parr // from antlr 2.5.0: example/html // // '\r' '\n' can be matched in one alternative or by matching // '\r' in one iteration and '\n' in another. I am trying to // handle any flavor of newline that comes in, but the language // that allows both "\r\n" and "\r" and "\n" to all be valid // newline is ambiguous. Consequently, the resulting grammar // must be ambiguous. I'm shutting this warning off. protected NL : ( options { generateAmbigWarnings=false; greedy = true; } : '\n' | "\r\n" | '\r' ) { $newline; } ; antlr-2.7.7/examples/python/tinybasic/0000755000175000017500000000000010522211616017725 5ustar twernertwernerantlr-2.7.7/examples/python/tinybasic/basic.g0000755000175000017500000007666110522211616021201 0ustar twernertwernerheader{ import basic } options { language=Python; } class basic_p extends Parser; options { k = 4; // two token lookahead exportVocab=TinyBasic; // Call its vocabulary "TinyBasic" //codeGenMakeSwitchThreshold = 2; // Some optimizations //codeGenBitsetTestThreshold = 3; defaultErrorHandler = false; // Don't generate parser error handlers //analyzerDebug=true; buildAST = true; } tokens { WS; } { } imaginaryTokenDefinitions : SLIST TYPE PROGRAM_DEF SUBROUTINE_DEF FUNCTION_DEF EXIT_MODULE PARAMETERS PARAMETER_DEF LABELED_STAT NUMBERED_STAT UNARY_MINUS UNARY_PLUS CASE_GROUP ARGLIST FOR_LOOP FOR_FROM FOR_TO FOR_BY FOR_BY_ONE FOR_BODY INT_FN_EXECUTE FLT_FN_EXECUTE STR_FN_EXECUTE SUB_EXECUTE EQ_COMP INDEX_OP SUBSTRING_OP DOT ARRAY1D ARRAY2D ARRAY3D ARRAY1D_PROXY ARRAY2D_PROXY ARRAY3D_PROXY VAR_PROXY WHEN_ERROR_CALL WHEN_ERROR_IN PRINT_ASCII PRINT_TAB PRINT_NUMERIC PRINT_STRING PRINT_COMMA PRINT_SEMI IF_THEN_BLOCK IF_BLOCK ELSE_IF_BLOCK ELSE_BLOCK CODE_BLOCK CONDITION ; // Compilation Unit: In TinyBasic, this is a single file. This is the start // rule for this parser compilationUnit[context] returns [r] { if not context: context = basic.Context() self.theContext=context; r = self.theContext } : // A compilation unit starts with an optional program definition ( programDefinition | /* nothing */ ) // Next we have a series of zero or more sub/function blocks ( subroutineDefinition | functionDefinition )* EOF! ; // PROGRAM ( parameter, parameter) programDefinition options {defaultErrorHandler = true;} // let ANTLR handle errors { pVector=None } : "program"! { self.theContext.setProgramScope() } pVector=parameters eol! // now parse the body cb:procedureBlock quit:"end" eol! { #quit.setType(EXIT_MODULE); #programDefinition = #(#[PROGRAM_DEF,"PROGRAM_DEF"],#programDefinition); self.theContext.popScope(); } ; // SUB IDENT ( parameter)* subroutineDefinition options {defaultErrorHandler = true;} // let ANTLR handle errors { pVector=None; } : p:"sub"! n:subName { self.theContext.pushSubroutineScope(); } pVector=params:parameters eol! // now parse the body of the class cb:procedureBlock quit:"end" "sub"! eol! { #quit.setType(EXIT_MODULE); #subroutineDefinition = #(#[SUBROUTINE_DEF,"SUBROUTINE_DEF"],#subroutineDefinition); sub=basic.DTSubroutine(#subroutineDefinition,#cb,self.theContext.getCurrentScope(),pVector,#n.getText()); self.theContext.popScope(); self.theContext.insertSubroutine(#n.getText(),sub); } ; // FUNCTION IDENT ( parameter)* functionDefinition options {defaultErrorHandler = true;} // let ANTLR handle errors { } : p:"function"^ fnType=n:newFunction {#p.setType(FUNCTION_DEF);} { self.theContext.pushScope(basic.FunctionScope(self.theContext.getCurrentScope())); } pVector=params:parameters eol! // now parse the body of the class cb:procedureBlock quit:"end" "function"! eol! { #quit.setType(EXIT_MODULE); fnc=basic.DTFunction(fnType,#params,#cb,self.theContext.getCurrentScope(),pVector,#n.getText()); #functionDefinition = #(#[FUNCTION_DEF,"FUNCTION_DEF"],#functionDefinition); self.theContext.popScope(); self.theContext.insertFunction(#n.getText(),fnc); } ; //funcName // : // INT_FN // | FLT_FN // | STR_FN // ; newFunction returns [r] : INT_FN { r=INT_FN; } | STR_FN { r=STR_FN; } | FLT_FN { r=FLT_FN; } ; // This is the body of a procedure. procedureBlock : codeBlock ; statement : nl ( singleStatement | ifStatements | compoundStatement ) ; parameters returns [r] { r = [] } : ( (LPAREN)=> LPAREN! parameterDeclarationList[r] RPAREN! | ) ; // A list of formal parameters parameterDeclarationList [r] { tbd=None; } : tbd=parameterDeclaration { r.append(tbd); } ( COMMA! tbd=parameterDeclaration { r.append(tbd); } )* {#parameterDeclarationList = #(#[PARAMETERS,"PARAMETERS"], #parameterDeclarationList);} ; parameterDeclaration returns [r] { varType=0 r = None } : varType=v:newVariable ( LPAREN! //d1:integerExpression ( COMMA! //d2:integerExpression ( COMMA! //d3:integerExpression { r = basic.DTDataTypeProxy(varType,self.theContext.getCurrentScope(),3); } | { r = basic.DTDataTypeProxy(varType,self.theContext.getCurrentScope(),2); } ) | { r = basic.DTDataTypeProxy(varType,self.theContext.getCurrentScope(),1); } ) RPAREN! | { r = basic.DTDataTypeProxy(varType,self.theContext.getCurrentScope(),0); } ) { #parameterDeclaration = #([VAR_PROXY], #parameterDeclaration); self.theContext.insertVariable(#v.getText(),r); } ; compoundStatement : forNextBlock | doUntilLoopBlock | doLoopUntilBlock | selectCaseBlock | eventCompoundStatements ; ifThenBlock : ifBlock ( options { warnWhenFollowAmbig = false; } : elseIfBlock )* ( options { warnWhenFollowAmbig = false; } : elseBlock )? endIfBlock { #ifThenBlock = #(#[IF_THEN_BLOCK,"IF_THEN_BLOCK"],#ifThenBlock);} ; ifStatements : (ifStatement)=> ifStatement | ifThenBlock ; ifStatement : "if"! condition "then"! singleStatement eol! ; ifBlock : "if"! condition "then"! eol! codeBlock { #ifBlock = #(#[IF_BLOCK,"IF_BLOCK"],#ifBlock);} ; elseIfBlock : nl ("else"! "if"! | "elseif"! ) condition "then"! eol! codeBlock { #elseIfBlock = #(#[ELSE_IF_BLOCK,"ELSE_IF_BLOCK"],#elseIfBlock);} ; elseBlock : nl "else"! eol! codeBlock { #elseBlock = #(#[ELSE_BLOCK,"ELSE_BLOCK"],#elseBlock);} ; endIfBlock : nl ("end"! "if"! | "endif"! ) eol! ; condition : relationalExpression { #condition = #(#[CONDITION,"CONDITION"],#condition);} ; codeBlock : ( options { warnWhenFollowAmbig = false; } : statement )* {#codeBlock = #(#[CODE_BLOCK,"CODE_BLOCK"],#codeBlock);} ; forNextBlock : "for"! ( // I=1 TO 2 (BY 1)? forFrom forTo forBy eol! forBody ) {#forNextBlock = #(#[FOR_LOOP,"FOR_LOOP"],#forNextBlock);} ; // The initializer for a for loop forFrom : numericStore EQ^ numericExpression {#forFrom = #(#[FOR_FROM,"FOR_FROM"],#forFrom);} ; forTo : "to"! numericExpression {#forTo = #(#[FOR_TO,"FOR_TO"],#forTo);} ; forBy : ( "by"! numericExpression {#forBy = #(#[FOR_BY,"FOR_BY"],#forBy);} | {#forBy = #(#[FOR_BY_ONE,"FOR_BY_ONE"],#forBy);} ) ; forBody : codeBlock nextStatement! {#forBody = #(#[FOR_BODY,"FOR_BODY"],#forBody);} ; nextStatement : nl "next" numericStore eol! ; doUntilLoopBlock : "do"! "until"^ condition eol! codeBlock nl "loop"! eol! ; doLoopUntilBlock : "do"^ eol! codeBlock nl "loop"! "until"! condition eol! ; selectCaseBlock : "select"^ "case"! expression eol (casesGroup)* nl "end" "select" eol! ; singleStatement : ( "library"^ STR_CONST | "dim"^ dimensionedVariables | "global"^ parameterDeclarationList[[]] | "beep" | "chain"^ stringExpression ("with" LPAREN! argList RPAREN!)? | "gosub"^ lineLabel | "goto"^ lineLabel | callSubroutineStatement | "return"^ (expression)? | ex:"exit"^ "sub"! {#ex.setType(EXIT_MODULE);} | ("let"!)? assignmentExpression | ("on" numericExpression)=> "on"^ numericExpression ("goto"^ | "gosub"^ ) lineLabel (COMMA! lineLabel)* | eventSingleStatements | "option"^ "base" INT_CONST | "out"^ integerExpression COMMA! integerExpression | "pause"^ (numericExpression)? | "redim"^ dimensionedVariables | "poke"^ integerExpression COMMA! integerExpression COMMA! integerExpression | "randomize"^ integerExpression | graphicsOutput | inputOutput | line_stuff | set_stuff ) eol! ; callSubroutineStatement : call:"call"^ subName (LPAREN! argList RPAREN!)? { #call.setType(SUB_EXECUTE); } ; dimensionedVariables { av=None; varType=0;} : ( varType=v:newVariable LPAREN! d1:integerExpression ( COMMA! d2:integerExpression ( COMMA! d3:integerExpression { av= basic.DTArray3D(varType,self.theContext.getCurrentScope()); #dimensionedVariables = #([ARRAY3D, "ARRAY3D"], #dimensionedVariables); } | { av= basic.DTArray2D(varType,self.theContext.getCurrentScope()); #dimensionedVariables = #([ARRAY2D, "ARRAY2D"], #dimensionedVariables); } ) | { av= basic.DTArray1D(varType,self.theContext.getCurrentScope()); #dimensionedVariables = #([ARRAY1D, "ARRAY1D"], #dimensionedVariables); } ) RPAREN! { self.theContext.insertVariable(#v.getText(),av);} ) ( COMMA dimensionedVariables )? ; lineLabel : INT_CONST | IDENT ; nl : ( options { warnWhenFollowAmbig = false; } : IDENT^ c:COLON! {#c.setType(LABELED_STAT);} | INT_CONST^ {#c.setType(NUMBERED_STAT);} )? ; constant : stringConstant | floatNumber ; binaryReadVariables : ( numericStore | stringStore "until" integerExpression ) (COMMA binaryReadVariables)? ; printList : ( tabExpression | printString | printNumeric ) ( ( c:COMMA { #c.setType(PRINT_COMMA);} | s:SEMI { #s.setType(PRINT_SEMI);} ) (printList)? )? ; tabExpression : "tab"! LPAREN! numericExpression RPAREN! { #tabExpression = #(#[PRINT_TAB,"PRINT_TAB"],#tabExpression);} ; printString : stringExpression { #printString = #(#[PRINT_STRING,"PRINT_STRING"],#printString);} ; printNumeric : numericExpression { #printNumeric = #(#[PRINT_NUMERIC,"PRINT_NUMERIC"],#printNumeric);} ; inputList : ( numericStore | stringStore ) (COMMA inputList)? ; inputOutput : "close"^ (POUND! integerExpression)? //| "cominfo" | "data"^ constant (COMMA! constant)* | "deletefile" stringExpression //| "fileinfo" | "input" ( "binary" (chanNumber)? binaryReadVariables | chanAndPrompt inputList ) | "open" chanNumber stringExpression ( COMMA ( "access" ( "input" | "output" | "outin" | "append" ) | "organization" ( "sequential" | "random" | "stream" | "append" ) | "recsize" integerExpression ) )+ //| "output" | print_ascii | "print" "binary" (chanNumber)? printList | "read" inputList | "restore" ; set_stuff : "set" ( "timer" numericExpression | "loc" LPAREN integerExpression COMMA integerExpression RPAREN | (chanNumber)? specifier integerExpression ) ; print_ascii : "print"! (chanNumber)? ("using" stringExpression)? printList {#print_ascii = #([PRINT_ASCII, "PRINT_ASCII"], #print_ascii);} ; specifier : "margin" | "zonewidth" | "address" | "record" ; chanNumber : POUND integerExpression COLON ; prompt : "prompt" stringExpression COLON ; chanAndPrompt : (chanNumber)? (prompt)? ; casesGroup : aCase codeBlock {#casesGroup = #([CASE_GROUP, "CASE_GROUP"], #casesGroup);} ; integerArray : argArray ; symbolicAddress : stringExpression ; deviceAddress : (adapterAddress COMMA!)? primaryAddress (COMMA! secondaryAddress)? ; primaryAddress : integerExpression ; secondaryAddress : integerExpression ; adapterAddress : stringExpression | "@" integerExpression ; combinationAddress : (deviceAddress)=> deviceAddress | adapterAddress ; aCase : "case"^ expression (COMMA! expression)* eol! ; integerArrayVariable : integerVariable ; stringArrayVariable : stringVariable ; floatArrayVariable : floatVariable ; arrayVariable : integerArrayVariable | stringArrayVariable | floatArrayVariable ; graphicsOutput : "brush"^ integerExpression | "circle"^ LPAREN! integerExpression COMMA integerExpression RPAREN! COMMA integerExpression ( COMMA integerExpression )? | "clear"^ ("metafileon" | "metafileoff" )? | "ellipse"^ LPAREN! integerExpression COMMA integerExpression RPAREN! MINUS LPAREN! integerExpression COMMA integerExpression RPAREN! ( COMMA integerExpression )? | "font"^ integerExpression ( COMMA integerExpression ( COMMA integerExpression )? )? | "loc"^ integerStore COMMA integerStore | "pen"^ integerExpression COMMA integerExpression COMMA integerExpression | "picture"^ stringExpression COMMA LPAREN! integerExpression COMMA integerExpression RPAREN! ( COMMA integerExpression )? | "polyline"^ integerArrayVariable LPAREN COMMA RPAREN ( COMMA integerExpression )? | "rectangle"^ LPAREN! integerExpression COMMA integerExpression RPAREN! MINUS LPAREN! integerExpression COMMA integerExpression RPAREN! ( COMMA integerExpression )? | "screen"^ ( "normal" | "condensed" | "display" | "zoom" | "unzoom" | "close_basic" ) ; line_stuff // ambiguity forced left factoring : "line" ( "input" (chanNumber)? stringStore | "enter" combinationAddress (prompt)? stringStore ("until" integerExpression)? | (LPAREN! integerExpression COMMA integerExpression RPAREN!)? MINUS LPAREN! integerExpression COMMA integerExpression RPAREN! ( COMMA integerExpression )? ) ; eventSingleStatements : "cause" ("error")? integerExpression | "cause" "event" integerExpression | ("disable" | "enable") ("srq"|"timer"|"gpib") ("discard")? | ("disable" | "enable") "event" integerExpression ("discard")? | "error" ( "abort" integerExpression | "retry" | "continue" | "stop" ) | "on" ( "event" integerExpression | "srq" | "timer" | "gpib" ) "call" subName ; eventCompoundStatements : w:"when"^ "error" ( "call"^ subName (LPAREN! argList RPAREN!)? eol! {#w.setType(WHEN_ERROR_CALL);} | "in"! eol! {#w.setType(WHEN_ERROR_IN);} (singleStatement)+ "use"^ eol! (singleStatement)+ ("end"! "when"! | "endwhen"!) eol ) ; subName : IDENT ; expression : numericExpression | stringExpression ; argList : arg ( COMMA! arg )* {#argList = #(#[ARGLIST,"ARGLIST"], argList);} ; arg : //(variable LPAREN COMMA)=> //variable LPAREN COMMA {dimCount=2;} ( COMMA {dimCount++;} )* RPAREN (argArray)=>argArray //| (variable LPAREN RPAREN)=> //variable LPAREN RPAREN | expression ; argArray : (variable LPAREN COMMA)=> v23:variable LPAREN! COMMA! ( COMMA! { #v23.setType(ARRAY3D); } | { #v23.setType(ARRAY2D); } ) RPAREN! | //(variable LPAREN RPAREN)=> v1:variable LPAREN RPAREN { #v1.setType(ARRAY1D); } ; // assignment expression (level 13) assignmentExpression : stringStore EQ^ stringExpression | integerStore EQ^ integerExpression | floatStore EQ^ numericExpression ; stringStore : (stringVariable LPAREN)=> {self.theContext.isArrayVariable(self.LT(1).getText())}? stringVariable lp:LPAREN^ {#lp.setType(INDEX_OP);} indices RPAREN | (stringVariable LBRACK)=> stringVariable lb:LBRACK^ {#lb.setType(SUBSTRING_OP);} integerExpression COLON! integerExpression RBRACK! | stringVariable ; integerStore : ( integerVariable LPAREN )=> {self.theContext.isArrayVariable(self.LT(1).getText())}? integerVariable lp:LPAREN^ {#lp.setType(INDEX_OP);} indices RPAREN! | integerVariable ; floatStore : ( floatVariable LPAREN )=> {self.theContext.isArrayVariable(self.LT(1).getText())}? floatVariable lp:LPAREN^ {#lp.setType(INDEX_OP);} indices RPAREN! | floatVariable ; numericStore : integerStore | floatStore ; stringVariable : STR_VAR ; integerVariable : INT_VAR ; floatVariable : ( FLT_VAR | IDENT ) ; // boolean relational expressions (level 5) relationalExpression : relationalXORExpression ; relationalXORExpression : relationalORExpression ( "xor"^ relationalORExpression )* ; relationalORExpression : relationalANDExpression ( "or"^ relationalANDExpression )* ; relationalANDExpression : relationalNOTExpression ( "and"^ relationalNOTExpression )* ; relationalNOTExpression : ("not"^)? primaryRelationalExpression ; primaryRelationalExpression : (numericExpression)=> numericExpression ( LT^ | GT^ | LE^ | GE^ | e1:EQ^ {#e1.setType( EQ_COMP );} | NE_COMP^ ) numericExpression | stringExpression ( LT^ | GT^ | LE^ | GE^ | e2:EQ^ {#e2.setType( EQ_COMP );} | NE_COMP^ ) stringExpression | LPAREN! relationalExpression RPAREN! ; numericValuedFunctionExpression : "abs"^ LPAREN! numericExpression RPAREN! | "acos"^ LPAREN! numericExpression RPAREN! | "asc"^ LPAREN! stringExpression RPAREN! | "atn"^ LPAREN! numericExpression RPAREN! | "cos"^ LPAREN! numericExpression RPAREN! | "dround"^ LPAREN! numericExpression COMMA! integerExpression RPAREN! | "errl"^ | "errn"^ | "exp"^ LPAREN! numericExpression RPAREN! | "fract"^ LPAREN! numericExpression RPAREN! | "get_event"^ LPAREN! numericExpression RPAREN! | "in"^ LPAREN! numericExpression RPAREN! | "instr"^ LPAREN! stringExpression COMMA! stringExpression RPAREN! | "int"^ LPAREN! numericExpression RPAREN! | "ival"^ LPAREN! stringExpression RPAREN! | "len"^ LPAREN! stringExpression RPAREN! | "lgt"^ LPAREN! numericExpression RPAREN! | "log"^ LPAREN! numericExpression RPAREN! | "max"^ LPAREN! (numericExpression)+ RPAREN! | "min"^ LPAREN! (numericExpression)+ RPAREN! | "peek"^ LPAREN! numericExpression COMMA! integerExpression RPAREN! | "pi"^ | "rnd"^ | "sgn"^ LPAREN! numericExpression RPAREN! | "signed"^ LPAREN! integerExpression RPAREN! | "sin"^ LPAREN! numericExpression RPAREN! | "sqr"^ LPAREN! numericExpression RPAREN! | "tan"^ LPAREN! numericExpression RPAREN! | "time"^ | "ubound"^ LPAREN! stringExpression COMMA! integerExpression RPAREN! | "val"^ LPAREN! stringExpression RPAREN! | "andb"^ LPAREN! integerExpression COMMA! integerExpression RPAREN! | "orb"^ LPAREN! integerExpression COMMA! integerExpression RPAREN! | "notb"^ LPAREN! integerExpression RPAREN! | "shiftb"^ LPAREN! integerExpression COMMA! integerExpression RPAREN! | "xorb"^ LPAREN! integerExpression COMMA! integerExpression RPAREN! ; integerExpression : numericExpression ; stringValuedFunctionExpression : "chr$"^ LPAREN! integerExpression RPAREN! | "date$"^ | "dround$"^ LPAREN! numericExpression COMMA! integerExpression RPAREN! | "errl$"^ | "errn$"^ LPAREN! integerExpression RPAREN! | "inchr$"^ | "ival$"^ LPAREN! integerExpression COMMA! integerExpression RPAREN! | "lwc$"^ LPAREN! stringExpression RPAREN! | "rpt$"^ LPAREN! stringExpression COMMA! integerExpression RPAREN! | "time$"^ | "upc$"^ LPAREN! stringExpression RPAREN! | "val$"^ LPAREN! numericExpression RPAREN! ; //numericExpression // : numericAdditiveExpression // ; // binary addition/subtraction (level 3) numericExpression : numericMultiplicativeExpression ( options { warnWhenFollowAmbig = false; } : (PLUS^ | MINUS^) numericMultiplicativeExpression )* ; // multiplication/division/modulo (level 2) numericMultiplicativeExpression : numericExponentialExpression ((STAR^ | "div"^ | "mod"^ | SLASH^ ) numericExponentialExpression)* ; numericExponentialExpression : numericUnaryExpression ( EXPO^ numericUnaryExpression)* ; numericUnaryExpression : ( p:PLUS^ {#p.setType(UNARY_PLUS);} | m:MINUS^ {#m.setType(UNARY_MINUS);} )? numericPrimaryExpression ; numericPrimaryExpression : floatNumber | numericStore | //(FLT_FN|INT_FN)=> ( FLT_FN^ {#FLT_FN.setType(FLT_FN_EXECUTE);} | INT_FN^ {#INT_FN.setType(INT_FN_EXECUTE);} ) ( (LPAREN)=> LPAREN argList RPAREN | ) | numericValuedFunctionExpression | e:LPAREN! numericExpression RPAREN! ; floatNumber : integerNumber | FLT_CONST ; stringExpression : stringConcatanateExpression ; // binary addition/subtraction (level 3) stringConcatanateExpression : stringPrimaryExpression ( AMPERSAND^ stringConcatanateExpression)? ; stringPrimaryExpression : stringStore | stringConstant | STR_FN^ ((LPAREN)=>LPAREN! argList RPAREN!)? {#STR_FN.setType(STR_FN_EXECUTE);} | stringValuedFunctionExpression ; indices : numericExpression (COMMA! indices)? ; stringConstant : STR_CONST ; integerNumber : INT_CONST | BINARY_INTEGER | OCTAL_INTEGER | HEXADECIMAL_INTEGER ; newVariable returns [r] { r=0;} : INT_VAR { r=INT_VAR; } | STR_VAR { r=STR_VAR; } | FLT_VAR { r=FLT_VAR; } | IDENT { r=FLT_VAR; } ; variable : numericStore | stringStore ; eol! : ( options { warnWhenFollowAmbig = false; } : EOL! )+ ; //---------------------------------------------------------------------------- //---------------------------------------------------------------------------- // The TinyBasic scanner //---------------------------------------------------------------------------- //---------------------------------------------------------------------------- class basic_l extends Lexer; options { importVocab=TinyBasic; // call the vocabulary "TinyBasic" testLiterals=true; // automatically test for literals k=6; // four characters of lookahead caseSensitive=false; caseSensitiveLiterals = false; } // OPERATORS AMPERSAND : '&' ; LPAREN : '(' ; RPAREN : ')' ; LBRACK : '[' ; RBRACK : ']' ; COLON : ':' ; COMMA : ',' ; //DOT : '.' ; EQ : '=' ; NE_COMP : "<>" ; //BNOT : '~' ; SLASH : '/' ; PLUS : '+' ; MINUS : '-' ; STAR : '*' ; GE : ">=" ; GT : ">" ; LE : "<=" ; LT : '<' ; SEMI : ';' ; POUND : '#' ; BINARY_INTEGER : "&b" ('0' | '1' ) + ; OCTAL_INTEGER : "&o" ('0'..'7' ) + ; HEXADECIMAL_INTEGER : "&h" ('0'..'9' | 'a'..'f' ) + ; // Whitespace -- ignored WS : ( ' ' | '\t' | '\f' ) { _ttype = Token.SKIP; } ; EOL : ( "\r\n" // Evil DOS | '\r' // Macintosh | '\n' // Unix (the right way) ) { self.newline(); } ; // Single-line comments SL_COMMENT : '!' (~('\n'|'\r'))* //('\n'|'\r'('\n')?) { $setType(Token.SKIP); //newline(); } ; // character literals CHAR_LITERAL : '\'' ( (ESCc)=> ESCc | ~'\'' ) '\'' ; // string literals STR_CONST : '"'! ( (ESCs)=> ESCs | (ESCqs)=> ESCqs | ~('"'))* '"'! ; protected ESCc : '<' ('0'..'9')+ '>' ; protected ESCs : "<<" ('0'..'9')+ ">>" ; protected ESCqs : '"' '"'! ; // hexadecimal digit (again, note it's protected!) protected HEX_DIGIT : ('0'..'9'|'a'..'f') ; // a dummy rule to force vocabulary to be all characters (except special // ones that ANTLR uses internally (0 to 2) protected VOCAB : '\3'..'\377' ; // an identifier. Note that testLiterals is set to true! This means // that after we match the rule, we look in the literals table to see // if it's a literal or really an identifer IDENT options {testLiterals=true;} : ('a'..'z') ('a'..'z'|'0'..'9'|'_'|'.')* ( '$' { if $getText[0:2].lower() == "fn" : _ttype=STR_FN; else: _ttype=STR_VAR; } | '%' { if $getText[0:2].lower() == "fn" : _ttype=INT_FN; else: _ttype=INT_VAR; } | '#' { if $getText[0:2].lower() == "fn" : _ttype=FLT_FN; else: _ttype=FLT_VAR; } | { if $getText[0:2].lower() == "fn" : _ttype=FLT_FN; } ) ; // a numeric literal INT_CONST { isDecimal=False } : '.' { $setType(DOT) } (('0'..'9')+ (EXPONENT)? (FLT_SUFFIX)? { $setType(FLT_CONST) })? | ( '0' {isDecimal = True} // special case for just '0' ( ('x') ( // hex // the 'e'|'E' and float suffix stuff look // like hex digits, hence the (...)+ doesn't // know when to stop: ambig. ANTLR resolves // it correctly by matching immediately. It // is therefor ok to hush warning. options { warnWhenFollowAmbig=false; } : HEX_DIGIT )+ | ('0'..'7')+ // octal )? | ('1'..'9') ('0'..'9')* {isDecimal=True} // non-zero decimal ) ( ('l') // only check to see if it's a float if looks like decimal so far | {isDecimal}? ( '.' ('0'..'9')* (EXPONENT)? (FLT_SUFFIX)? | EXPONENT (FLT_SUFFIX)? | FLT_SUFFIX ) { $setType(FLT_CONST); } )? ; // a couple protected methods to assist in matching floating point numbers protected EXPONENT : ('e') ('+'|'-')? ('0'..'9')+ ; protected FLT_SUFFIX : 'f'|'d' ; antlr-2.7.7/examples/python/tinybasic/Makefile.in0000644000175000017500000000265310522211616022000 0ustar twernertwerner## This file is part of PyANTLR. See LICENSE.txt for license ## details..........Copyright (C) Wolfgang Haefelinger, 2004. ## ## do not change this value subdir=examples/python/tinybasic ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stdvars@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## get configured standard deps @stddeps@ srcdir = @abs_top_srcdir@/$(subdir) gen_FILES = basic_l.py basic_p.py basic_w.py g_FILES = $(srcdir)/basic.g $(srcdir)/basic_w.g all :: $(gen_FILES) test :: $(gen_FILES) basic.py $(python) basic_l.py < $(srcdir)/basic.in $(python) basic.py < $(srcdir)/basic.in $(python) basic.py < $(srcdir)/basic1.in $(python) basic.py < $(srcdir)/basic2.in basic : test basic.py : $(srcdir)/basic.py @-@RMF@ $@ && @CP@ $< . $(gen_FILES) : $(g_FILES) @ @RMF@ $(gen_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(gen_FILES) $(gen_FILES) : @ANTLR_JAR@ ### cleanup basic clean :: @@ECHO@ cleaning basic ... @ -@RMF@ $(gen_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes .PHONY: basic .PHONY: test1 antlr-2.7.7/examples/python/tinybasic/basic.in0000755000175000017500000000100410522211616021334 0ustar twernertwerner PROGRAM ( ChainParameterString$ , DebugMode% ) a$="hello" b$="somanchi" print a$,b$ x%=2 do until x%=100 x% = x% + 1 print x% loop print "Tine" do x%=x%+1 print x%,x%,"Hello World" loop until x%>100 if x% = 1 then print 21 else if x% = 2 then print 22 else if x% = 5 then print 23 else print 24 end if for x%=1 to 3 y% = x% print x% , y% + 2 next x% y% = 2 if 1 = 0 then print "1==1" else print "1!=1" end if end antlr-2.7.7/examples/python/tinybasic/basic.py0000755000175000017500000005207310522211616021372 0ustar twernertwernerimport sys import antlr version = sys.version.split()[0] if version < '2.2.1': False = 0 if version < '2.3': True = not False class Stack: def __init__(self): self.data = [] def push(self,item): self.data.append(item) def pop(self): self.data.pop() def peek(self): return self.data[-1] class HashTab: def __init__(self): self.data = {} def put(self,k,v): self.data[k] = v def get(self,k): r = None try: r = self.data[k] except: pass return r class Context(object): def __init__(self): self.theGlobalScope = GlobalScope() self.theScopeStack = Stack() self.theScopeStack.push(self.theGlobalScope) self.subroutineTable = HashTab() self.functionTable = HashTab() self._invar() def _invar(self): s = self.theScopeStack.peek() assert s if not isinstance(s,Scope): print "type(s)=",type(s) x = ProgramScope() print "type(x)=",type(x) assert isinstance(x,Scope) assert 0 def insertSubroutine(self, v, t): self.subroutineTable.put(v.lower(),t) def getSubroutine(self, v): return self.subroutineTable.get(v.lower()) def insertFunction(self, v, t): self.functionTable.put(v.lower(),t) def getFunction(self, v): return self.functionTable.get(v.lower()) def insertGlobalVariable(self, v, t) : self.theGlobalScope.insertVariable(v,t) def insertVariable(self, v, t) : self.getCurrentScope().insertVariable(v,t) def getVariable(self, var): t=self.getCurrentScope().getVariable(var) if(t==None): t=self.theGlobalScope.getVariable(var) return t def getVariableDimension(self, var): dim=self.getCurrentScope().getVariableDimension(var) if(dim==0): dim=self.theGlobalScope.getVariableDimension(var) return dim def getVariableType(self, var): t=self.getCurrentScope().getVariable(var) if(t==None): t=self.theGlobalScope.getVariable(var) if t: return t.getType() else: return 0 def isArrayVariable(self,s): return (self.getVariableDimension(s) > 0) def getPrev(self): return self.getCurrentScope().getPrev() def pushScope(self,scope): assert isinstance(scope,Scope) self.theScopeStack.push(scope) def popScope(self): if self.getCurrentScope() == self.theGlobalScope : return self.theGlobalScope else: return self.theScopeStack.pop() def getCurrentScope(self): self._invar() r = self.theScopeStack.peek() assert r assert isinstance(r,Scope) return r def getGlobalScope(self): return theGlobalScope def setProgramScope(self,scope=None): if not scope: scope = ProgramScope(self.getCurrentScope()) self.theProgramScope = scope assert isinstance(self.theProgramScope,Scope) while self.theScopeStack.peek() != self.theGlobalScope: self.theScopeStack.pop() self.theScopeStack.push(self.theProgramScope) def getProgramScope(self): return self.theProgramScope def pushSubroutineScope(self): self.pushScope(SubroutineScope(self.getCurrentScope())) def initialize(self): self.setProgramScope(self.getProgramScope()) def setDimension(self,s,*args): v = self.getVariable(s) v.setDimension(*args) def getDTDataType(self,s,*args): t = self.getVariable(s) r = t.getDTDataType(*args) return r def ensureVariable(self,s,t): v = self.getVariable(s) if not v: assert isinstance(t,int) v = self.getOne(t,self.getCurrentScope()) self.insertVariable(s,v) return v def getOne(self,t,scope): assert isinstance(t,int) assert isinstance(scope,Scope) t = makeone(t,scope) assert t return t class SaveEnv: def __init__(self,scope,args): self.scope=scope self.args=args def getScope(self): return scope def getArgs(self): return args class CodeContext: def __init__(self,context,scope,args): self.context = context self.scope = scope self.args = args class DTCodeType(object): def __init__(self,entry,cb,scope,args,name): self.entry = entry self.cb = cb self.scope = scope self.args = args self.name = name self.callDepthStack = Stack() def newCall(self,context): codeContext = CodeContext(context,self.scope,self.args) self.callDepthStack.push(codeContext) context.pushScope(self.scope) def attachArg(self,argnum,arg): proxy = self.args[argnum-1] proxy.attach(arg) def getAST(self): return self.entry ### create data instance def makeone(aType,scope): import basic_p assert isinstance(aType,int) if ( aType==basic_p.INT_CONST ): return DTInteger(scope,0) if ( aType==basic_p.INT_VAR ): return DTInteger(scope,0) if ( aType==basic_p.FLT_CONST ): return DTFloat(scope,0.0) if ( aType==basic_p.FLT_VAR ): return DTFloat (scope,0.0) if ( aType==basic_p.STR_CONST ): return DTString (scope,"") if ( aType==basic_p.STR_VAR ): return DTString (scope,"") assert 0 return None class DTDataType(object): def __init__(self,scope,_ttype): if scope: assert isinstance(scope,Scope) self.scope=scope else: self.scope = None assert isinstance(_ttype,int) self.theType=_ttype def getType(self): return self.theType def getInteger(self): return 12345 def getFloat(self): return 12345.0 def getString(self): return None def _set_int(self,tbd): pass def setInteger(self,tbd): if isinstance(tbd,int): self._set_int(tbd) return if isinstance(tbd,str): self._set_int(int(tbd)) return self.setInteger(tbd.getInteger()) def _set_float(self,tbd): pass def setFloat(self,tbd): if isinstance(tbd,float): self._set_float(tbd) return if isinstance(tbd,str): self._set_float(float(tbd)) return self.setFloat(tbd.getFloat()) def _set_string(self,tbd): pass def setString(self,tbd): if isinstance(tbd,str): self._set_string(tbd) return else: self.setString(tbd.getString()) def getDTDataType(self,*args): return None def setDTDataType(self,*args): pass def assign(self,tbd): pass def getDimension(self): return 0 def getDimensioned(self,i): return 0 def multiply(self,other): return None def divide(self,other): return None def add(self,other): return None def subtract(self,other): return None def mod(self,other): return None def round(self,other): return None def truncate(self,other): return None def getOne(self,arg=None): t = makeone(self.theType,self.scope) assert t if arg: t.assign(arg) assert t return t def cloneDTDataType(self): return self.getOne(self) def setDimension(self,*args): pass def compareTo(self,o): pass def attach(self,theBoss):pass ######## class DTDataTypeProxy (DTDataType): def __init__(self,theType,scope,dims): DTDataType.__init__(self,scope,theType) self.dims = dims def getType(self): return self.theBoss.getType() def getInteger(self): return self.theBoss.getInteger() def getFloat(self): return self.theBoss.getFloat() def getString(self): return self.theBoss.getString() def setInteger(self,item): theBoss.setInteger(item) def setFloat(self,item): theBoss.setFloat(item) def setString(self,item): theBoss.setString(item) def getDTDataType(self,*args): return self.theBoss.getDTDataType(*args) def setDTDataType(self,*args): self.theBoss.setDTDataType(*args) def assign(self,tbd): self.theBoss.assign(tbd) def getDimension(self,item=None): if not item: return self.dims return self.theBoss.getDimensioned(item) def multiply(self,other): return self.theBoss.multiply(other) def divide(self,other): return self.theBoss.divide(other) def add(self,other): return self.theBoss.add(other) def subtract(self,other): return self.theBoss.subtract(other) def mod(self,other): return self.theBoss.mod(other) def round(self,other): return self.theBoss.round(other) def truncate(self,other): return self.theBoss.truncate(other) def getOne(self): return self.theBoss.getOne() def compareTo(self, o): return self.theBoss.compareTo(o) def attach(self,theBoss): self.theBoss=theBoss def cloneDTDataType(self): return DTDataTypeProxy(theType,scope,dims) def __str__(self): return str(self.theBoss) toString = __str__ class DTExecException(Exception): def __init__(self,s): Exception.__init__(self,s) class DTExitModuleException(DTExecException): def __init__(self,s): DTExecException.__init__(self,s) class DTFunction(DTCodeType): def __init__(self,_ttype, entry, cb,scope, args,name): DTDataType.__init__(entry,scope,args,name) class DTFloat(DTDataType): def __init__(self,scope,item): import basic_p DTDataType.__init__(self,scope,basic_p.FLT_VAR) self.setFloat(item) def _set_int(self,tbd): self.s = tbd * 1.0 def _set_float(self,tbd): self.s = tbd def getFloat(self): return self.s def assign(self,tbd): self.setFloat(tbd) def multiply(self,other): return DTFloat(None,self.getFloat()*other.getFloat()) def divide(self,other): return DTFloat(None,getFloat()/other.getFloat()) def add(self,other): return DTFloat(None,getFloat()+other.getFloat()) def subtract(self,other): return DTFloat(None,getFloat()-other.getFloat()) def mod(self,other): return DTFloat(None,getFloat() % other.getFloat()) def round(self): return DTInteger(None,DTFloat(None,self.getFloat()+0.5)) def truncate(self): return DTInteger(None,self.getInteger()) def compareTo(self, o): if(getFloat() < (o).getFloat()): return -1 if ( getFloat() > (o).getFloat()): return 1 return 0 def __str__(self): return str(self.s) toString = __str__ class DTInteger (DTDataType): def __init__(self,scope,item): import basic_p DTDataType.__init__(self,scope,basic_p.INT_VAR) self.setInteger(item) def getInteger(self): assert isinstance(self.s,int) return self.s def _set_int(self,tbd): assert isinstance(tbd,int) self.s = tbd def _set_float(self,tbd): self.s = float(tbd) def getFloat(self): return self.s * 1.0 def assign(self,tbd): self.setInteger(tbd) def multiply(self,other): if isinstance(other, DTFloat): t = DTFloat(None,self) return t.multiply(other) return DTInteger(None,self.getInteger()*other.getInteger()) def divide(self,other): if isinstance(other, DTFloat): t = DTFloat(None,self) return t.divide(other) return DTInteger(None,self.getInteger()/other.getInteger()) def add(self,other): if isinstance(other, DTFloat): t = DTFloat(None,self) return t.add(other) return DTInteger(None,self.getInteger()+other.getInteger()) def subtract(self,other): if isinstance(other, DTFloat): t = DTFloat(None,self) return t.subtract(other) return DTInteger(None,self.getInteger()-other.getInteger()) def mod(self,other): if isinstance(other, DTFloat): t = DTFloat(None,self) return t.mod(other) return DTInteger(None,self.getInteger() % other.getInteger()) def round(self): return self def truncate(self): return this def compareTo(self, o): if( self.getInteger() < (o).getInteger()): return -1 if ( self.getInteger() > (o).getInteger()): return 1 return 0 def __str__(self): return str(self.s) toString=__str__ class DTString (DTDataType): def __init__(self,scope,item): import basic_p DTDataType.__init__(self,scope,basic_p.STR_VAR) self.setString(item) def _set_string(self,s): self.s=s def getString(self): return self.s def compareTo(self, o): return s.compareTo(o.getString()) def assign(self,tbd): self.setString(tbd) def __str__(self): return self.s toString=__str__ class DTSubroutine(DTCodeType): def __init__(self,entry,cb,scope,args,name): DTCodeType.__init__(self,entry,cb,scope,args,name) class DTArray1D(DTDataType): def __init__(self,_type,scope): DTDataType.__init__(self,scope,_type) self.data = [] self.dim1 = 1 self.base = 0 def init(self): self.data = [] * self.dim1 def _get(self,idx1): assert isinstance(idx1,int) t = self.data[idx1] return t def _set(self,item,idx1): assert isinstance(idx1,int) self.data[idx1] = item def getDTDataType(self,i1): idx1 = i1.getInteger()-self.base if self.dim1==0 : self.dim1=10 self.init() if idx1>self.dim1: return None t = self._get(idx1) if not t: self._set(self.getOne(),idx1) return t def setDTDataType(self,i1,s): idx1 = i1.getInteger() - self.base if self.dim1==0: self.dim1 = 10 self.init() if(idx1<=self.dim1): t = self._get(idx1) if not t: t = self._set(self.getOne(s),idx1) else: t.assign(s) def getDimension(self): return 1 def getDimensiond(self,i): if i==1: return self.dim1 return 0 def setDimension(self,i1): if isinstance(i1,int): self.dim1 = i1 self.init() return if isinstance(i1,DTInteger): self.setDimension(i1.getInteger()) return assert 0 def compareTo(self,o): return 0 class DTArray2D(DTDataType): def __init__(self,_type,scope): DTDataType.__init__(self,scope,_type) self.data = [] self.dim1 = 0 self.dim2 = 0 self.base = 0 self._checkdim() def _checkdim(self): assert isinstance(self.dim1,int) assert isinstance(self.dim2,int) def init(self): self._checkdim() self.data = [None] * (self.dim2 * self.dim1) def _get(self,idx1,idx2): assert isinstance(idx1,int) assert isinstance(idx2,int) t = self.data[idx1*self.dim1+idx2] return t def _set(self,item,idx1,idx2): assert isinstance(idx1,int) assert isinstance(idx2,int) self.data[idx1*self.dim1+idx2] = item return self._get(idx1,idx2) def getDTDataType(self,i1,i2): idx1=i1.getInteger()-self.base idx2=i2.getInteger()-self.base if(self.dim1==0): self.dim1=10 self.dim2=10 self.init() if(idx1>self.dim1): return None if(idx2>self.dim2): return None t = self._get(idx1,idx2) if not t: t = self._set(self.getOne(),idx1,idx2) assert t return t def setDTDataType(self,i1,i2,s): idx1=i1.getInteger()-self.base idx2=i2.getInteger()-self.base if(self.dim1==0): self.dim1=10 self.dim2=10 self.init() if idx1<=self.dim1 and idx2<=self.dim2: t = self._get(idx1,idx2) if not t: t = self._set(self.getOne(s),idx1,idx2) else: t.assign(s) else: raise Exception("index out of range:") def getDimension(self): return 2 def getDimensioned(self,i): if (i==1): return self.dim1 if (i==2): return self.dim2 return 0 def _toint(self,item): if isinstance(item,int): return item if isinstance(item,DTInteger): return item.getInteger() assert 0 def setDimension(self,i1,i2): self.dim1 = self._toint(i1) self.dim2 = self._toint(i2) self._checkdim() self.init() def compareTo(self,o): return 0 class DTArray3D(DTDataType): def __init__(self,_type,scope): DTDataType.__init__(self,scope,_type) self.data = [] self.dim1 = 0 self.dim2 = 0 self.dim3 = 0 self.base = 0 def init(self): self.data = [None] * (self.dim1 * self.dim2 * self.dim3) def _get(self,idx1,idx2,idx3): assert isinstance(idx1,int) assert isinstance(idx2,int) assert isinstance(idx3,int) t = self.data[idx1 * (self.dim1*self.dim2) + idx2*self.dim2 + idx3] return t def _set(self,item,idx1,idx2,idx3): assert isinstance(idx1,int) assert isinstance(idx2,int) assert isinstance(idx3,int) self.data[idx1 * (self.dim1*self.dim2) + idx2*self.dim2 + idx3] = item def getDTDataType(self,i1,i2,i3): idx1=i1.getInteger()-self.base idx2=i2.getInteger()-self.base idx3=i3.getInteger()-self.base if not dim1: self.dim1 = self.dim2 = self.dim3 =10 self.init() if(idx1>self.dim1): return None if(idx2>self.dim2): return None if(idx3>self.dim3): return None t = self._get(idx1,idx2,idx3) if not t: t = self._set(self.getOne(),idx1,idx2,idx3) return t def setDTDataType(self,i1,i2,i3,s): idx1=i1.getInteger()-self.base idx2=i2.getInteger()-self.base idx3=i3.getInteger()-self.base if(self.dim1==0): self.dim1 = self.dim2 = self.dim3 = 10 self.init() if(idx1<=self.dim1 and idx2<=self.dim2 and idx3<=self.dim3): t= self._get(idx1,idx2,idx3) if not t: t = self._set(self.getOne(s),idx1,idx2,idx3) else: t.assign(s) def getDimension(self): return 3 def getDimensioned(self,i): if(i==1): return self.dim1 if (i==2): return self.dim2 if (i==3): return self.dim3 return 0 def setDimension(self,i1,i2,i3): self.dim1 = i1 self.dim2 = i2 self.dim3 = i3 self.init() def compareTo(self,o): return 0 class Scope(object): def __init__(self,prev=None): self.prev = prev self.symbolTable = HashTab() def cloneScope(self,prev): newScope = Scope(prev) return newScope def insertVariable(self,v,t): self.symbolTable.put(v.lower(),t) def getVariable(self,v): t=self.symbolTable.get(v.lower()) return t def getVariableDimension(self,v): t=self.getVariable(v) if t: return t.getDimension() else: return 0 def getVariableType(self,v): t=self.getVariable(v) if t: return t.getType() else: return 0 def isArrayVariable(self,s): return (self.getVariableDimension(s) > 0) def getPrev(self): return self.prev class FunctionScope(Scope): def __init(self,prev): Scope.__init__(self,prev) class GlobalScope(Scope): def __init(self,prev=None): Scope.__init__(self,prev) class ProgramScope(Scope): def __init(self,prev): Scope.__init__(self,prev) assert isinstance(self,Scope) class SubroutineScope(Scope): def __init(self,prev): Scope.__init__(self,prev) class Visitor(antlr.ASTVisitor): def __init__(self,*args): super(Visitor,self).__init__(*args) self.level = 0 if not args: self.cout = sys.stdout return if isinstance(args[0],file): self.cout = args[0] return assert 0 def tabs(self): print " " * self.level def printf(self,fmt,*args): if not args: sys.stdout.write(fmt) return argv = tuple(args) self.cout.write(fmt % argv) def flush(self): self.cout.flush() def visit1(self,node): if not node: self.printf(" nil ") return c = node.getType() t = node.getText() k = node.getFirstChild() s = node.getNextSibling() self.printf("( <%s> ",c) if t: self.printf(" %s ",t) self.visit1(k) self.visit1(s) self.printf(")") def visit(self,node): self.visit1(node) self.printf("\n") if __name__ == "__main__": import basic_l import basic_p import basic_w L = basic_l.Lexer() P = basic_p.Parser(L) P.setFilename(L.getFilename()) ### Parse the input expression C = P.compilationUnit(None) ast = P.getAST() if not ast: print "stop - no AST generated." import sys sys.exit(1) W = basic_w.Walker() W.compilationUnit(ast,C) antlr-2.7.7/examples/python/tinybasic/basic1.in0000644000175000017500000000050110522211616021413 0ustar twernertwernerPROGRAM ( ChainParameterString$ , DebugMode% ) x%=2 do until x%=5 x% = x% +1 print x% loop do x%=x%+1 print x%,x%,"Hello World" loop until x%=7 if x% = 1 then print 21 else if x% = 2 then print 22 else if x% = 5 then print 23 else print 24 end if for x%=1 to 3 y%=x% print x% , y%+2 next x% y% = 2 end antlr-2.7.7/examples/python/tinybasic/basic_w.g0000755000175000017500000003441210522211616021513 0ustar twernertwernerheader{ import basic } options { language=Python; } { def println(*args): if not args: print "" return // make empty line here to test for E0009 for x in args[0:-1]: print x, print args[-1] def printx(*args): if not args: return for x in args: print x, } class basic_w extends TreeParser; options { importVocab = TinyBasic; } { } compilationUnit[context] returns [self.theContext = context] { self.theContext.initialize(); self.zero = basic.DTInteger(self.theContext.getCurrentScope(),0); self.posOne = basic.DTInteger(self.theContext.getCurrentScope(),1); self.negOne = basic.DTInteger(self.theContext.getCurrentScope(),-1); } : pd:PROGRAM_DEF { try: self.programDefinition(pd) except basic.DTExecException, didit: print "Yes it works!", didit } ; programDefinition : #( PROGRAM_DEF moduleBody ) ; subroutineDefinition : #( SUBROUTINE_DEF IDENT moduleBody ) ; moduleBody : #(PARAMETERS parameters ) #(CODE_BLOCK ( statement )+ ) EXIT_MODULE { raise basic.DTExitModuleException("Done folks") } ; parameters : (parameter)* ; parameter { argNum=0;} : #(VAR_PROXY arg[argNum]{ argNum += 1} ) ; arg[r] : ( s:STR_VAR //{v=theContext.ensureVariable(s.getText(),STR_VAR);} | i:INT_VAR //{v=theContext.ensureVariable(i.getText(),INT_VAR);} | f:FLT_VAR //{v=theContext.ensureVariable(f.getText(),FLT_VAR);} ) ; statement { expr = None } : when_error_call_statement | dim_statement // done | forNextStatement // done | printAsciiStatement // done | ifThenBlock // done | expr=assign_statement // done | doUntilLoop // done | doLoopUntil // done | subExecuteStatement // done | exitModuleStatement // done ; exitModuleStatement : EXIT_MODULE { raise basic.DTExitModuleException("Asynchronous return") } ; subExecuteStatement { sub=None; argNum=0;tbd=None; } : #(SUB_EXECUTE i:IDENT { sub=self.theContext.getSubroutine(i.getText()) } #(ARGLIST ( tbd=argExpr { argNum +=1 sub.attachArg(argNum,tbd) } )* ) { try: sub.newCall(self.theContext); try: self.subroutineDefinition(sub.getAST()) except basic.DTExitModuleException, didit: print "Yes it works!", didit except antlr.ANTLRException, ex: pass } ) ; argExpr returns [exprValue] : a1d:ARRAY1D { exprValue=self.theContext.getVariable(a1d.getText()); } | a2d:ARRAY2D { exprValue=self.theContext.getVariable(a2d.getText()); } | a3d:ARRAY3D { exprValue=self.theContext.getVariable(a3d.getText()); } | exprValue=expr ; printAsciiStatement : #(PRINT_ASCII (printField)* { print } ) ; printField {d=None} : #(PRINT_NUMERIC d=expr { printx(d)}) | #(PRINT_STRING d=expr { printx(d)}) | #(PRINT_TAB d=expr) | PRINT_COMMA { printx("\t")} | PRINT_SEMI ; assign_statement returns [exprValue] { e=None } : #( EQ exprValue=data_store e=expr { exprValue.assign(e); } ) ; expr returns [exprValue] { c=None;d=None;e1=None;e2=None;e3=None } : #( STAR e1=expr e2=expr { exprValue=e1.multiply(e2) } ) | #( PLUS e1=expr e2=expr { exprValue=e1.add(e2) } ) | #( SLASH e1=expr e2=expr { exprValue=e1.multiply(e2) } ) | #( "div" e1=expr e2=expr { exprValue=e1.divide(e2) } ) | #( "mod" e1=expr e2=expr { exprValue=e1.mod(e2) } ) // comparison operators | #( EQ_COMP e1=expr e2=expr { exprValue=basic.DTInteger(None, antlr.ifelse(e1.compareTo(e2)==0,1,0)) } ) | #( NE_COMP e1=expr e2=expr { exprValue=basic.DTInteger(None, antlr.ifelse(e1.compareTo(e2)==0,0,1)) } ) | #( LE e1=expr e2=expr { exprValue=basic.DTInteger(None, antlr.ifelse(e1.compareTo(e2)<=0,1,0)) } ) | #( LT e1=expr e2=expr { exprValue=basic.DTInteger(None, antlr.ifelse(e1.compareTo(e2) <0,1,0)) } ) | #( GE e1=expr e2=expr { exprValue=basic.DTInteger(None, antlr.ifelse(e1.compareTo(e2)>=0,1,0)) } ) | #( GT e1=expr e2=expr { exprValue=basic.DTInteger(None, antlr.ifelse(e1.compareTo(e2) >0,1,0)) } ) // Boolean algebra | #( "xor" e1=expr e2=expr { exprValue=basic.DTInteger(None,antlr.ifelse(e1.getInteger()!=e2.getInteger(),1,0)) } ) | #( "and" e1=expr e2=expr { exprValue=basic.DTInteger(None,antlr.ifelse(e1.getInteger()==1 and e2.getInteger()==1,1,0)) } ) | #( "or" e1=expr e2=expr { exprValue=basic.DTInteger(None,antlr.ifelse(e1.getInteger()==1 or e2.getInteger()==1,1,0)) } ) // unary operators | #( "not" e1=expr { exprValue=basic.DTInteger(None,antlr.ifelse(e1.getInteger()==0,1,0)) } ) | #( UNARY_PLUS e1=expr { exprValue=e1 } ) | #( UNARY_MINUS e1=expr { exprValue=e1.multiply(self.negOne) } ) | #( SUBSTRING_OP e1=expr e2=expr e3=expr { exprValue=e1.getDTDataType(e2,e3) } ) | d=data_store { exprValue=d } | c=con { exprValue=c } ; id returns [value] : ( s:STR_VAR {value=self.theContext.ensureVariable(s.getText(),STR_VAR)} | i:INT_VAR {value=self.theContext.ensureVariable(i.getText(),INT_VAR)} | f:FLT_VAR {value=self.theContext.ensureVariable(f.getText(),FLT_VAR)} ) ; con returns [value] : s:STR_CONST {value=basic.DTString (self.theContext.getCurrentScope(),s.getText())} | i:INT_CONST {value=basic.DTInteger (self.theContext.getCurrentScope(),i.getText())} | f:FLT_CONST {value=basic.DTFloat (self.theContext.getCurrentScope(),f.getText())} ; data_store returns [value] { i1=None;i2=None;i3=None;tbd=None;} : #(INDEX_OP v:dimension_variable i1=expr ( i2=expr ( i3=expr { value=self.theContext.getDTDataType( v.getText(),i1,i2,i3) } | { value=self.theContext.getDTDataType( v.getText(),i1,i2) } ) | { value=self.theContext.getDTDataType( v.getText(),i1) } ) ) | value=id ; // FOR NEXT BLOCK --------------------------------------- forNextStatement { ff=None;ft=None;fb=None; } : #(FOR_LOOP ff=forFrom ft=forTo fb=forBy b:FOR_BODY { while ff.compareTo(ft) != fb.compareTo(self.zero): try: self.forBody(b) ff.assign(ff.add(fb)) except antlr.ANTLRException,ex: pass } ) ; forFrom returns [forValue] : #(FOR_FROM forValue=assign_statement) ; forTo returns [forValue] : #(FOR_TO forValue=expr) ; forBy returns [forValue] : #(FOR_BY forValue=expr) | FOR_BY_ONE {forValue=basic.DTInteger(self.theContext.getCurrentScope(),1)} ; // IF THEN BLOCK --------------------------------------------- ifThenBlock { done=0 } : #( IF_THEN_BLOCK ( {done==0}? done=ifThenBody )+ ) ; ifThenBody returns [ifValue] { r=0 } : #(IF_BLOCK ifValue=conditional[1] ) | #(ELSE_IF_BLOCK ifValue=conditional[1] ) | #(ELSE_BLOCK cb:CODE_BLOCK { self.codeBlock(cb) } ) ; conditional[forWhat] returns [condValue] : c:CONDITION cb:CODE_BLOCK { condValue = self.condition(c).getInteger() if forWhat==condValue: self.codeBlock(cb) } ; doUntilLoop : #("until" c:CONDITION { while 0 == self.conditional(c,0): pass } ) ; doLoopUntil : #("do" cb:CODE_BLOCK c:CONDITION { self.codeBlock(cb); while 0 == self.condition(c).getInteger(): self.codeBlock(cb) } ) ; when_error_call_statement : #( WHEN_ERROR_CALL "call" i:IDENT {println(" Attaching error:",i.getText())} ) ; dim_statement { i1=None;i2=None;i3=None; } : #("dim" ( #( ARRAY1D dv1:dimension_variable i1=expr { self.theContext.setDimension(dv1.getText(),i1)} ) | #( ARRAY2D dv2:dimension_variable i1=expr i2=expr { self.theContext.setDimension(dv2.getText(),i1,i2)} ) | #( ARRAY3D dv3:dimension_variable i1=expr i2=expr i3=expr { self.theContext.setDimension(dv3.getText(),i1,i2,i3)} ) ) ) ; dimension_variable : STR_VAR | FLT_VAR | INT_VAR ; // Numeric functions doubleFunctions returns [funcValue] {n=None;i=None;s=None;} : #("abs" n=expr {funcValue=basic.DTFloat(None,Math.abs(n.getFloat()))} ) /* | #("acos" n=expr {tbd=new DTFloat(None,Math.acos(n.getFloat()));} ) | #("asc" s=expr {tbd=new DTInteger(None,Math.asc(n.getFloat()));} ) | #("atn" n=expr {tbd=new DTFloat(None,Math.atn(n.getFloat()));} ) | #("cos" n=expr {tbd=new DTFloat(None,Math.cos(n.getFloat()));} ) | #("dround" n=expr i=expr {tbd=new DTFloat(None,Math.dround(n.getFloat()));} ) | #("errl" {tbd=new DTFloat(None,Math.errl(n.getFloat()));} ) | #("errn" {tbd=new DTInteger(None,Math.errn(n.getFloat()));} ) | #("exp" n=expr {tbd=new DTFloat(None,Math.exp(n.getFloat()));} ) | #("fract" n=expr {tbd=new DTFloat(None,Math.fract(n.getFloat()));} ) | #("get_event" n=expr {tbd=new DTFloat(None,Math.get_event(n.getFloat()));} ) | #("in" n=expr {tbd=new DTFloat(None,Math.expr(n.getFloat()));} ) | #("instr" s=expr s=expr {tbd=new DTFloat(None,Math.abs(n.getFloat()));} ) | #("int" n=expr {tbd=new DTFloat(None,Math.int(n.getFloat()));} ) | #("ival" s=expr {tbd=new DTFloat(None,Math.ival(n.getFloat()));} ) | #("len" s=expr {tbd=new DTFloat(None,Math.len(n.getString()));} ) | #("lgt" n=expr {tbd=new DTFloat(None,Math.abs(n.getFloat()));} ) | #("log" n=expr {tbd=new DTFloat(None,Math.abs(n.getFloat()));} ) | #("max" ( n=expr { if tbd: tbd=n elif((n.compare(tbd)>0): tbd=n } )+ ) | #("min" ( n=expr { if tbd: tbd=n elif((n.compare(tbd)<0): tbd=n } )+ ) | #("peek" n=expr i=expr {tbd=new DTInteger(None,Math.peek(n.getFloat()));} ) | #("pi" {tbd=new DTFloat(None,Math.pi(n.getFloat()));} ) | #("rnd" {tbd=new DTInteger(None,Math.rnd(n.getFloat()));} ) | #("sgn" n=expr {tbd=new DTInteger(None,n.compare(zero)));} ) | #("signed" i=expr {tbd=new DTFloat(None,Math.abs(n.getFloat()));} ) | #("sin" n=expr {tbd=new DTFloat(None,Math.sin(n.getFloat()));} ) | #("sqr" n=expr {tbd=new DTFloat(None,Math.sqr(n.getFloat()));} ) | #("tan" n=expr {tbd=new DTFloat(None,Math.tan(n.getFloat()));} ) | #("time" {tbd=new DTFloat(None,Math.time(n.getFloat()));} ) | #("ubound" s=expr i=expr {tbd=new DTInteger(None,Math.ubound(n.getFloat()));} ) | #("val" s=expr {tbd=new DTFloat(None,Math.val(n.getFloat()));} ) // BIT Functions | #("andb" i=expr i=expr {tbd=new DTInteger(None,Math.andb(n.getFloat()));} ) | #("orb" i=expr i=expr {tbd=new DTInteger(None,Math.orb(n.getInteger()));} ) | #("notb" i=expr {tbd=new DTInteger(None,Math.abs(n.getInteger()));} ) | #("shiftb" i=expr i=expr {tbd=new DTInteger(None,Math.abs(i.getInteger()));} ) | #("xorb" i=expr i=expr {tbd=new DTInteger(None,Math.abs(i.getInteger()));} ) */ ; //-------------Orphan helpers // Helper Orphan condition returns [condValue] : #(CONDITION condValue=expr) ; // Helper Orphan forBody : #(FOR_BODY codeBlock) ; // Helper Orphan codeBlock : #(CODE_BLOCK (statement)*) ; antlr-2.7.7/examples/python/tinybasic/basic2.in0000644000175000017500000000034510522211616021422 0ustar twernertwernerPROGRAM ( ChainParameterString$ , DebugMode% ) dim a%(2,3) a%(1,2)=7 print "row=1 col=2=>>>",a%(1,2) x%=3 call xyz(x%,2,a%(,)) end sub xyz( y% , z% ,b%(,) ) print "Are you watching?",y%,z%,b%(1,2) exit sub end sub antlr-2.7.7/examples/python/HTML/0000755000175000017500000000000010522211616016504 5ustar twernertwernerantlr-2.7.7/examples/python/HTML/Makefile.in0000644000175000017500000000340510522211616020553 0ustar twernertwerner## This file is part of ANTLR (http://www.antlr.org). Have a ## look into LICENSE.txt for license details. This file has ## been written by (C) Wolfgang Haefelinger, 2004. ## do not change this value subdir=examples/python/HTML ## get configured (standard) variables - checkout or modify ## scripts/config.vars[.in] for details. @stdvars@ ### how to get rid of damned dos line ending style and -- al- ### most equally worse -- stupid tab character. ### dos2unix = perl -p -i.tmp -e 's,\r,,g;s,\t, ,g' dos2unix = : ### when running python we invoke python like .. python = /bin/sh @abs_this_builddir@/scripts/python.sh ## get configured rules @stdmake@ ## By default we compile class files so we are ready to carry ## out a test. Note that deps have been setup in such a way ## that you can do a 'make compile' whithout having made ## antlr.jar before. this : compile all :: compile g_FILES = \ $(_srcdir)/html.g \ $(eol) g_py_FILES = \ html_l.py \ $(eol) compile : $(g_py_FILES) test :: test1 test1_deps = \ $(g_py_FILES) \ $(buildtree)/scripts/python.sh \ $(eol) test1_cmd = \ $(python) html_l.py < $(_srcdir)/html.in \ $(eol) test1 : $(test1_deps) @ $(test1_cmd) $(g_py_FILES) : $(g_FILES) @ @RMF@ $(g_py_FILES) @ @ANTLR_COMPILE_CMD@ $(g_FILES) @ $(dos2unix) $(g_py_FILES) $(g_py_FILES) : @ANTLR_JAR@ $(g_py_FILES) : $(buildtree)/scripts/antlr.sh ### cleanup html clean :: @@ECHO@ cleaning html ... @ -@RMF@ $(g_py_FILES) @ -@RMF@ *.pyc *.tmp *TokenTypes.txt *TokenTypes ### get configured dependencies - for example, just list ### autoconf variable ANTLR_JAR as reference and it will ### be done automatically as stddeps contains appropr. ### rule. For details, checkout scripts/config.vars[.in] @stddeps@ .PHONY: compile .PHONY: test1 antlr-2.7.7/examples/python/HTML/html.in0000644000175000017500000000312410522211616020000 0ustar twernertwerner A test file fdsfdsfdssfd

ANTLR 2.xx Meta-Language

ANTLR 2.0 accepts three types of grammar specifications -- parsers, lexers, and tree-parsers (also called tree-walkers). Because ANTLR 2.0 uses LL(k) analysis for all three grammar variants, the grammar specifications are similar, and the generated lexers and parsers behave similarly.

Note: in this document, the word "parser" usually includes tree-parsers as well as token stream parsers, except where noted.

Meta-Language Vocabulary

Whitespace. Spaces, tabs, and newlines are separators in that they can separate ANTLR vocabulary

Download ANTLR 2.4.0.

ANTLR 2.4.0 release notes

ANTLR Meta-Language

antlr-2.7.7/examples/python/HTML/html.g0000644000175000017500000002105010522211616017616 0ustar twernertwerner// This file is part of PyANTLR. See LICENSE.txt for license // details..........Copyright (C) Wolfgang Haefelinger, 2004. // // $Id$ header "html_l.__main__" { L = Lexer() token = L.nextToken() while not token.isEOF(): print token token = L.nextToken() } options { language=Python; } class html_l extends Lexer; options { k = 4; exportVocab=HTML; charVocabulary = '\3'..'\377'; caseSensitive=false; filter=UNDEFINED_TOKEN; } /* STRUCTURAL tags */ DOCTYPE : "' ; OHTML : "" ; CHTML : "" ; OHEAD : "" ; CHEAD : "" ; OBODY : "' ; CBODY : "" ; /* HEAD ELEMENTS */ OTITLE : "" ; CTITLE : "" ; OSCRIPT : "" ; ISINDEX : "' ; META : "' ; LINK : "' ; /* headings */ OH1 : "' ; CH1 : "" ; OH2 : "' ; CH2 : "" ; OH3 : "' ; CH3 : "" ; OH4 : "' ; CH4 : "" ; OH5 : "' ; CH5 : "" ; OH6 : "' ; CH6 : "" ; OADDRESS : "
" ; CADDRESS : "
" ; OPARA : "' ; CPARA : "

" //it's optional ; /*UNORDERED LIST*/ OULIST : "' ; CULIST : "" ; /*ORDERED LIST*/ OOLIST : "' ; COLIST : "" ; /*LIST ITEM*/ OLITEM : "' ; CLITEM : "" ; /*DEFINITION LIST*/ ODLIST : "' ; CDLIST : "" ; ODTERM : "
" ; CDTERM : "
" ; ODDEF : "
" ; CDDEF : "
" ; ODIR: "" ; CDIR_OR_CDIV : "' ; ODIV: "' ; OMENU : "" ; CMENU : "" ; OPRE: ("
" | "") ('\n')? 
	;

CPRE:	 "</pre>" | "" 
	;

OCENTER
	:	"
" ; CCENTER : "
" ; OBQUOTE : "
" ; CBQUOTE : "
" ; //this is block element and thus can't be nested inside of //other block elements, ex: paragraphs. //Netscape appears to generate bad HTML vis-a-vis the standard. HR : "' ; OTABLE : "' ; CTABLE : "" ; OCAP: "' ; CCAP: "" ; O_TR : "' ; C_TR: "" ; O_TH_OR_TD : ("' ; C_TH_OR_TD : "" | "" ; /* PCDATA-LEVEL ELEMENTS */ /* font style elemens*/ OTTYPE : "" ; CTTYPE : "" ; OITALIC : "" ; CITALIC : "" ; OBOLD : "" ; CBOLD : "" ; OUNDER : "" ; CUNDER : "" ; /* Left-factor and to reduce lookahead */ OSTRIKE_OR_OSTRONG : "' ; CST_LEFT_FACTORED : "' ; OSTYLE : "

C++ Notes


The C++ runtime and generated grammars look very much the same as the java ones. There are some subtle differences though, but more on this later.

Building the runtime

The following is a bit unix centric. For Windows some contributed project files can be found in lib/cpp/contrib. These may be slightly outdated.

The runtime files are located in the lib/cpp subdirectory of the ANTLR distribution. Building it is in general done via the toplevel configure script and the Makefile generated by the configure script. Before configuring please read INSTALL.txt in the toplevel directory. The file lib/cpp/README may contain some extra information on specific target machines.

./configure --prefix=/usr/local
make

Installing ANTLR and the runtime is then done by typing

make install
This installs the runtime library libantlr.a in /usr/local/lib and the header files in /usr/local/include/antlr. Two convenience scripts antlr and antlr-config are also installed into /usr/local/bin. The first script takes care of invoking antlr and the other can be used to query the right options for your compiler to build files with antlr.

Using the runtime

Generally you will compile the ANTLR generated files with something similar to:
c++ -c MyParser.cpp -I/usr/local/include
Linking is done with something similar to:
c++ -o MyExec <your .o files> -L/usr/local/lib -lantlr

Getting ANTLR to generate C++

To get ANTLR to generate C++ code you have to add

language="Cpp";
to the global options section. After that things are pretty much the same as in java mode except that a all token and AST classes are wrapped by a reference counting class (this to make live easier (in some ways and much harder in others)). The reference counting class uses
operator->
to reference the object it is wrapping. As a result of this you use -> in C++ mode in stead of the '.' of java. See the examples in examples/cpp for some illustrations.

AST types

New as of ANTLR 2.7.2 is that if you supply the

buildAST=true
option to a parser then you have to set and initialize an ASTFactory for the parser and treewalkers that use the resulting AST.
ASTFactory my_factory;	// generates CommonAST per default..
MyParser parser( some-lexer );
// Do setup from the AST factory repeat this for all parsers using the AST
parser.initializeASTFactory( my_factory );
parser.setASTFactory( &my_factory );

In C++ mode it is also possible to override the AST type used by the code generated by ANTLR. To do this you have to do the following:

  • Define a custom AST class like the following:
    #ifndef __MY_AST_H__
    #define __MY_AST_H__
    
    #include <antlr/CommonAST.hpp>
    
    class MyAST;
    
    typedef ANTLR_USE_NAMESPACE(antlr)ASTRefCount<MyAST> RefMyAST;
    
    /** Custom AST class that adds line numbers to the AST nodes.
     * easily extended with columns. Filenames will take more work since
     * you'll need a custom token class as well (one that contains the
     * filename)
     */
    class MyAST : public ANTLR_USE_NAMESPACE(antlr)CommonAST {
    public:
       // copy constructor
       MyAST( const MyAST& other )
       : CommonAST(other)
       , line(other.line)
       {
       }
       // Default constructor
       MyAST( void ) : CommonAST(), line(0) {}
       virtual ~MyAST( void ) {}
       // get the line number of the node (or try to derive it from the child node
       virtual int getLine( void ) const
       {
          // most of the time the line number is not set if the node is a
          // imaginary one. Usually this means it has a child. Refer to the
          // child line number. Of course this could be extended a bit.
          // based on an example by Peter Morling.
          if ( line != 0 )
             return line;
          if( getFirstChild() )
             return ( RefMyAST(getFirstChild())->getLine() );
          return 0;
       }
       virtual void setLine( int l )
       {
          line = l;
       }
       /** the initialize methods are called by the tree building constructs
        * depending on which version is called the line number is filled in.
        * e.g. a bit depending on how the node is constructed it will have the
        * line number filled in or not (imaginary nodes!).
        */
       virtual void initialize(int t, const ANTLR_USE_NAMESPACE(std)string& txt)
       {
          CommonAST::initialize(t,txt);
          line = 0;
       }
       virtual void initialize( ANTLR_USE_NAMESPACE(antlr)RefToken t )
       {
          CommonAST::initialize(t);
          line = t->getLine();
       }
       virtual void initialize( RefMyAST ast )
       {
          CommonAST::initialize(ANTLR_USE_NAMESPACE(antlr)RefAST(ast));
          line = ast->getLine();
       }
       // for convenience will also work without
       void addChild( RefMyAST c )
       {
          BaseAST::addChild( ANTLR_USE_NAMESPACE(antlr)RefAST(c) );
       }
       // for convenience will also work without
       void setNextSibling( RefMyAST c )
       {
          BaseAST::setNextSibling( ANTLR_USE_NAMESPACE(antlr)RefAST(c) );
       }
       // provide a clone of the node (no sibling/child pointers are copied)
       virtual ANTLR_USE_NAMESPACE(antlr)RefAST clone( void )
       {
          return ANTLR_USE_NAMESPACE(antlr)RefAST(new MyAST(*this));
       }
       static ANTLR_USE_NAMESPACE(antlr)RefAST factory( void )
       {
          return ANTLR_USE_NAMESPACE(antlr)RefAST(RefMyAST(new MyAST()));
       }
    private:
       int line;
    };
    #endif
    
  • Tell ANTLR's C++ codegenerator to use your RefMyAST by including the following in the options section of your grammars:
    ASTLabelType = "RefMyAST";
    
    After that you only need to tell the parser before every invocation of a new instance that it should use the AST factory defined in your class. This is done like this:
    // make factory with default type of MyAST
    ASTFactory my_factory( "MyAST", MyAST::factory );
    My_Parser parser(lexer);
    // make sure the factory knows about all AST types in the parser..
    parser.initializeASTFactory(my_factory);
    // and tell the parser about the factory..
    parser.setASTFactory( &my_factory );
    

    After these steps you can access methods/attributes of (Ref)MyAST directly (without typecasting) in parser/treewalker productions.

    Forgetting to do a setASTFactory results in a nice SIGSEGV or you OS's equivalent. The default constructor of ASTFactory initializes itself to generate CommonAST objects.

    If you use a 'chain' of parsers/treewalkers then you have to make sure they all share the same AST factory. Also if you add new definitions of ASTnodes/tokens in downstream parsers/treewalkers you have to apply the respective initializeASTFactory methods to this factory.

    This all is demonstrated in the examples/cpp/treewalk example.

Using Heterogeneous AST types

This should now (as of 2.7.2) work in C++ mode. With probably some caveats.

The heteroAST example show how to set things up. A short excerpt:

ASTFactory ast_factory;

parser.initializeASTFactory(ast_factory);
parser.setASTFactory(&ast_factory);

A small excerpt from the generated initializeASTFactory method:

void CalcParser::initializeASTFactory( antlr::ASTFactory& factory )
{
   factory.registerFactory(4, "PLUSNode", PLUSNode::factory);
   factory.registerFactory(5, "MULTNode", MULTNode::factory);
   factory.registerFactory(6, "INTNode", INTNode::factory);
   factory.setMaxNodeType(11);
}

After these steps ANTLR should be able to decide what factory to use at what time.

Extra functionality in C++ mode.

In C++ mode ANTLR supports some extra functionality to make life a little easier.

Inserting Code

In C++ mode some extra control is supplied over the places where code can be placed in the gerenated files. These are extensions on the header directive. The syntax is:
header "<identifier>" {  }

identifier where
pre_include_hpp Code is inserted before ANTLR generated includes in the header file.
post_include_hpp Code is inserted after ANTLR generated includes in the header file, but outside any generated namespace specifications.
pre_include_cpp Code is inserted before ANTLR generated includes in the cpp file.
post_include_cpp Code is inserted after ANTLR generated includes in the cpp file, but outside any generated namespace specifications.

Pacifying the preprocessor

Sometimes various tree building constructs with '#' in them clash with the C/C++ preprocessor. ANTLR's preprocessor for actions is slightly extended in C++ mode to alleviate these pains.

NOTE: At some point I plan to replace the '#' by something different that gives less trouble in C++.

The following preprocessor constructs are not touched. (And as a result you cannot use these as labels for AST nodes.

  • if
  • define
  • ifdef
  • ifndef
  • else
  • elif
  • endif
  • warning
  • error
  • ident
  • pragma
  • include

As another extra it's possible to escape '#'-signs with a backslash e.g. "\#". As the action lexer sees these they get translated to simple '#' characters.

A template grammar file for C++

header "pre_include_hpp" {
    // gets inserted before antlr generated includes in the header file
}
header "post_include_hpp" {
    // gets inserted after antlr generated includes in the header file
     // outside any generated namespace specifications
}

header "pre_include_cpp" {
    // gets inserted before the antlr generated includes in the cpp file
}

header "post_include_cpp" {
    // gets inserted after the antlr generated includes in the cpp file
}

header {
    // gets inserted after generated namespace specifications in the header
    // file. But outside the generated class.
}

options {
   language="Cpp";
    namespace="something";      // encapsulate code in this namespace
//  namespaceStd="std";         // cosmetic option to get rid of long defines
                                // in generated code
//  namespaceAntlr="antlr";     // cosmetic option to get rid of long defines
                                // in generated code
    genHashLines = true;        // generated #line's or turn it off.
}

{
   // global stuff in the cpp file
   ...
}
class MyParser extends Parser;
options {
   exportVocab=My;
}
{
   // additional methods and members
   ...
}
... rules ...

{
   // global stuff in the cpp file
   ...
}
class MyLexer extends Lexer;
options {
   exportVocab=My;
}
{
   // additional methods and members
   ...
}
... rules ...

{
   // global stuff in the cpp file
   ...
}
class MyTreeParser extends TreeParser;
options {
   exportVocab=My;
}
{
   // additional methods and members
   ...
}
... rules ...

antlr-2.7.7/doc/glossary.html0000644000175000017500000005577010522211615016114 0ustar twernertwerner ANTLR-centric Language Glossary

ANTLR-centric Language Glossary

Terence Parr

This glossary defines some of the more important terms used in the ANTLR documentation. I have tried to be very informal and provide references to other pages that are useful. For another great source of information about formal computer languages, see Wikipedia.

Ambiguous

A language is ambiguous if the same sentence or phrase can be interpreted in more than a single way. For example, the following sentence by Groucho Marx is easily interpreted in two ways: "I once shot an elephant in my pajamas. How he got in my pajamas I'll never know!" In the computer world, a typical language ambiguity is the if-then-else ambiguity where the else-clause may be attached to either the most recent if-then or an older one. Reference manuals for computer languages resolve this ambiguity by stating that else-clauses always match up with the most recent if-then.

A grammar is ambiguous if the same input sequence can be derived in multiple ways. Ambiguous languages always yield ambiguous grammars unless you can find a way to encode semantics (actions or predicates etc...) that resolve the ambiguity. Most language tools like ANTLR resolve the if-then-else ambiguity by simply choosing to match greedily (i.e., as soon as possible). This matches the else with the most recent if-then. See nondeterministic.

ANTLR

ANother Tool for Language Recognition, a predicated-LL(k) parser generator that handles lexers, parsers, and tree parsers. ANTLR has been available since 1990 and led to a resurgence of recursive-descent parsing after decades dominated by LR and other DFA-based strategies.

AST

Abstract Syntax Tree. ASTs are used as internal representations of an input stream, normally constructed during a parsing phase. Because AST are two-dimensional trees they can encode the structure (as determined by the parser) of the input as well as the input symbols.

A homogeneous AST is in one in which the physical objects are all of the same type; e.g., CommonAST in ANTLR. A heterogeneous tree may have multiple types such as PlusNode, MultNode etc...

An AST is not a parse tree, which encodes the sequence of rules used to match input symbols. See What's the difference between a parse tree and an abstract syntax tree (AST)? Why doesn't ANTLR generate trees with nodes for grammar rules like JJTree does?.

An AST for input 3+4 might be represented as

   +
  / \
 3   4
or more typically (ala ANTLR) in child-sibling form:
+
|
3--4
Operators are usually subtree roots and operands are usually leaves.

Bit set

Bit sets are an extremely efficient representation for dense integer sets. You can easily encode sets of strings also by mapping unique strings to unique integers. ANTLR uses bitsets for lookahead prediction in parsers and lexers. Simple bit set implementations do not work so well for sparse sets, particularly when the maximum integer stored in the set is large.

ANTLR's bit set represents membership with a bit for each possible integer value. For a maximum value of n, a bit set needs n/64 long words or n/8 bytes. For ASCII bit sets with a maximum value of 127, you only need 16 bytes or 2 long words. UNICODE has a max value of \uFFFF or 65535, requiring 8k bytes, and these sets are typically sparse. Fortunately most lexers only need a few of these space inefficient (but speedy) bitsets and so it's not really a problem.

Child-sibling Tree

A particularly efficient data structure for representing trees. See AST.

Context-free grammar

A grammar where recognition of a particular construct does not depend on whether it is in a particular syntactic context. A context-free grammar has a set of rules like
stat : IF expr THEN stat
     | ...
     ;
where there is no restriction on when the IF alternative may be applied--if you are in rule stat, you may apply the alternative.

Context-sensitive

A grammar where recognition of a particular construct may depend on a syntactic context. You never see these grammars in practice because they are impractical (Note, an Earley parser is O(n^3) worst-case for context-free grammars). A context-free rule looks like:
Α → γ
but a context-sensitive rule may have context on the left-side:
αΑβ → αγβ
meaning that rule Α may only be applied (converted to γ) in between α and β.

In an ANTLR sense, you can recognize context-sensitive constructs with a semantic predicate. The action evaluates to true or false indicating the validity of applying the alternative.

See Context-sensitive gramar.

DFA

Deterministic Finite Automata. A state machine used typically to formally describe lexical analyzers. lex builds a DFA to recognize tokens whereas ANTLR builds a recursive descent lexer similar to what you would build by hand. See Finite state machine and ANTLR's lexer documentation.

FIRST

The set of symbols that may be matched on the left-edge of a rule. For example, the FIRST(decl) is set {ID, INT} for the following:
decl : ID ID SEMICOLON
     | INT ID SEMICOLON
     ;
The situation gets more complicated when you have optional constructs. The FIRST(a) below is {A,B,C}
a : (A)? B
  | C
  ;
because the A is optional and the B may be seen on the left-edge.

Naturally k>1 lookahead symbols makes this even more complicated. FIRST_k must track sets of k-sequences not just individual symbols.

FOLLOW

The set of input symbols that may follow any reference to the specified rule. For example, FOLLOW(decl) is {RPAREN, SEMICOLON):
methodHead : ID LPAREN decl RPAREN ;
var : decl SEMICOLON ;
decl : TYPENAME ID ;
because RPAREN and SEMICOLON both follow references to rule decl. FIRST and FOLLOW computations are used to analyze grammars and generate parsing decisions.

This grammar analysis all gets very complicated when k>1.

Grammar

A finite means of formally describing the structure of a possibly infinite language. Parser generators build parsers that recognize sentences in the language described by a grammar. Most parser generators allow you to add actions to be executed during the parse.

Hoisting

Semantic predicates describe the semantic context in which a rule or alternative applies. The predicate is hoisted into a prediction expression. Hoisting typically refers to pulling a predicate out of its enclosing rule and into the prediction expression of another rule. For example,
decl     : typename ID SEMICOLON
         | ID ID SEMICOLON
         ;
typename : {isType(LT(1))}? ID
         ;
The predicate is not needed in typename as there is no decision, however, rule decl needs it to distinguish between its two alternatives. The first alternative would look like:
if ( LA(1)==ID && isType(LT(1)) ) {
  typename();
  match(ID);
  match(SEMICOLON);
}
PCCTS 1.33 did, but ANTLR currently does not hoist predicates into other rules.

Inheritance, grammar

The ability of ANTLR to define a new grammar as it differs from an existing grammar. See the ANTLR documentation.

LA(n)

The nth lookahead character, token type, or AST node type depending on the grammar type (lexer, parser, or tree parser respectively).

Left-prefix, left factor

A common sequence of symbols on the left-edge of a set of alternatives such as:
a : A B X
  | A B Y
  ;
The left-prefix is A B, which you can remove by left-factoring:
a : A B (X|Y)
  ;
Left-factoring is done to reduce lookahead requirements.

Literal

Generally a literal refers to a fixed string such as begin that you wish to match. When you reference a literal in an ANTLR grammar via "begin", ANTLR assigns it a token type like any other token. If you have defined a lexer, ANTLR provides information about the literal (type and text) to the lexer so it may detect occurrences of the literal.

Linear approximate lookahead

An approximation to full lookahead (that can be applied to both LL and LR parsers) for k>1 that reduces the complexity of storing and testing lookahead from O(n^k) to O(nk); exponential to linear reduction. When linear approximate lookahead is insufficient (results in a nondeterministic parser), you can use the approximate lookahead to attenuate the cost of building the full decision.

Here is a simple example illustrating the difference between full and approximate lookahead:

a : (A B | C D)
  | A D
  ;
This rule is LL(2) but not linear approximate LL(2). The real FIRST_2(a) is {AB,CD} for alternative 1 and {AD} for alternative 2. No intersection, so no problem. Linear approximate lookahead collapses all symbols at depth i yielding k sets instead of a possibly n^k k-sequences. The approximation (compressed) sets are {AB,AD,CD,CB} and {AD}. Note the introduction of the spurious k-sequences AD and CB. Unfortunately, this compression introduces a conflict upon AD between the alternatives. PCCTS did full LL(k) and ANTLR does linear approximate only as I found that linear approximate lookahead works for the vast majority of parsing decisions and is extremely fast. I find one or two problem spots in a large grammar usually with ANTLR, which forces me to reorganize my grammar in a slightly unnatural manner. Unfortunately, your brain does full LL(k) and ANTLR does a slightly weaker linear approximate lookahead--a source of many (invalid) bug reports ;)

This compression was the subject of my doctoral dissertation (PDF 477k) at Purdue.

LL(k)

Formally, LL(k) represents a class of parsers and grammars that parse symbols from left-to-right (beginning to end of input stream) using a leftmost derivation and using k symbols of lookahead. A leftmost derivation is one in which derivations (parses) proceed by attempting to replace rule references from left-to-right within a production. Given the following rule
stat : IF expr THEN stat
     | ...
     ;
an LL parser would match the IF then attempt to parse expr rather than a rightmost derivation, which would attempt to parse stat first.

LL(k) is synonymous with a "top-down" parser because the parser begins at the start symbol and works its way down the derivation/parse tree (tree here means the stack of method activations for recursive descent or symbol stack for a table-driven parser). A recursive-descent parser is particular implementation of an LL parser that uses functions or method calls to implement the parser rather than a table.

ANTLR generates predicate-LL(k) parsers that support syntactic and sematic predicates allowing you to specify many context-free and context-sensitive grammars (with a bit of work).

LT(n)

In a parser, this is the nth lookahead Token object.

Language

A possibly infinite set of valid sentences. The vocabulary symbols may be characters, tokens, and tree nodes in an ANTLR context.

Lexer

A recognizer that breaks up a stream of characters into vocabulary symbols for a parser. The parser pulls vocabulary symbols from the lexer via a queue.

Lookahead

When parsing a stream of input symbols, a parser has matched (and no longer needs to consider) a portion of the stream to the left of its read pointer. The next k symbols to the right of the read pointer are considered the fixed lookahead. This information is used to direct the parser to the next state. In an LL(k) parser this means to predict which path to take from the current state using the next k symbols of lookahead.

ANTLR supports syntactic predicates, a manually-specified form of backtracking that effectively gives you infinite lookahead. For example, consider the following rule that distinguishes between sets (comma-separated lists of words) and parallel assignments (one list assigned to another):

stat:   ( list "=" )=> list "=" list
    |   list
    ;
If a list followed by an assignment operator is found on the input stream, the first production is predicted. If not, the second alternative production is attempted.

nextToken

A lexer method automatically generated by ANTLR that figures out which of the lexer rules to apply. For example, if you have two rules ID and INT in your lexer, ANTLR will generate a lexer with methods for ID and INT as well as a nextToken method that figures out which rule method to attempt given k input characters.

NFA

Nondeterministic Finite Automata. See Finite state machine.

Nondeterministic

A parser is nondeterministic if there is at least one decision point where the parser cannot resolve which path to take. Nondeterminisms arise because of parsing strategy weaknesses.
  • If your strategy works only for unambiguous grammars, then ambiguous grammars will yield nondeterministic parsers; this is true of the basic LL, LR strategies. Even unambiguous grammars can yield nondeterministic parsers though. Here is a nondeterministic LL(1) grammar:
    decl : ID ID SEMICOLON
         | ID SEMICOLON
         ;
    
    Rule decl is, however, LL(2) because the second lookahead symbol (either ID or SEMICOLON) uniquely determines which alternative to predict. You could also left-factor the rule to reduce the lookahead requirements.

  • If you are willing to pay a performance hit or simply need to handle ambiguous grammars, you can use an Earley parser or a Tomita parser (LR-based) that match all possible interpretations of the input, thus, avoiding the idea of nondeterminism altogether. This does present problems when trying to execute actions, however, because multiple parses are, in effect, occurring in parallel.

Note that a parser may have multiple decision points that are nondeterministic.

Parser

A recognizer that applies a grammatical structure to a stream of vocabulary symbols called tokens.

Predicate, semantic

A semantic predicate is a boolean expression used to alter the parse based upon semantic information. This information is usually a function of the constructs/input that have already been matched, but can even be a flag that turns on and off subsets of the language (as you might do for a grammar handling both K&R and ANSI C). One of the most common semantic predicates uses a symbol table to help distinguish between syntactically, but semantically different productions. In FORTRAN, array references and function calls look the same, but may be distinguished by checking what the type of the identifier is.
expr : {isVar(LT(1))}? ID LPAREN args RPAREN  // array ref
     | {isFunction(LT(1))}? ID LPAREN args RPAREN // func call
     ;

Predicate, syntactic

A selective form of backtracking used to recognize language constructs that cannot be distinguished without seeing all or most of the construct. For example, in C++ some declarations look exactly like expressions. You have to check to see if it is a declaration. If it parses like a declaration, assume it is a declaration--reparse it with "feeling" (execute your actions). If not, it must be an expression or an error:
stat : (declaration) => declaration
     | expression
     ;

Production

An alternative in a grammar rule.

Protected

A protected lexer rule does not represent a complete token--it is a helper rule referenced by another lexer rule. This overloading of the access-visibility Java term occurs because if the rule is not visible, it cannot be "seen" by the parser (yes, this nomeclature sucks).

Recursive-descent

See LL(k).

Regular

A regular language is one that can be described by a regular grammar or regular expression or accepted by a DFA-based lexer such as those generated by lex. Regular languages are normally used to describe tokens.

In practice you can pick out a regular grammar by noticing that references to other rules are not allowed accept at the end of a production. The following grammar is regular because reference to B occurs at the right-edge of rule A.

A : ('a')+ B ;
B : 'b' ;
Another way to look at it is, "what can I recognize without a stack (such as a method return address stack)?".

Regular grammars cannot describe context-free languages, hence, LL or LR based grammars are used to describe programming languages. ANTLR is not restricted to regular languages for tokens because it generates recursive-descent lexers. This makes it handy to recognize HTML tags and so on all in the lexer.

Rule

A rule describes a partial sentence in a language such as a statement or expression in a programming language. Rules may have one or more alternative productions.

Scanner

See Lexer.

Semantics

See What do "syntax" and "semantics" mean and how are they different?.

Subrule

Essentially a rule that has been expanded inline. Subrules are enclosed in parenthesis and may have suffixes like star, plus, and question mark that indicate zero-or-more, one-or-more, or optional. The following rule has 3 subrules:
a : (A|B)+ (C)* (D)?
  ;

Syntax

See What do "syntax" and "semantics" mean and how are they different?.

Token

A vocabulary symbol for a language. This term typically refers to the vocabulary symbols of a parser. A token may represent a constant symbol such as a keyword like begin or a "class" of input symbols like ID or INTEGER_LITERAL.

Token stream

See Token Streams in the ANTLR documentation.

Tree

See AST and What's the difference between a parse tree and an abstract syntax tree (AST)? Why doesn't ANTLR generate trees with nodes for grammar rules like JJTree does?.

Tree parser

A recognizer that applies a grammatical structure to a two-dimensional input tree. Grammatical rules are like an "executable comment" that describe the tree structure. These parsers are useful during translation to (i) annotate trees with, for example, symbol table information, (2) perform tree rewrites, and (3) generate output.

Vocabulary

The set of symbols used to construct sentences in a language. These symbols are usually called tokens or token types. For lexers, the vocabulary is a set of characters.

Wow

See ANTLR.
antlr-2.7.7/doc/optional.gif0000644000175000017500000000374210522211615015667 0ustar twernertwernerGIF89aクシシシトトトタタタ,ク@Hーチ*\ネー。テ#J徂ナ,X`「ヌ ;:D@イ、K鱈ノ0繧(G>T$K5mVLタ3蹌:?葺Q」Q曰*]ハ%フ。7カ*オ*DェVgfンz+ラ檻ソD#Qア8ッィラ擢゚殺ロ5,レ「0マメ('マイL N黎%ニス|+.リWq翡6 9m蜀/'ヨ ヨイX酪3ヲIコ4鯰l 「jコuiホィ)俺lョキノ"u xp`タシ3レュ6リヌU'j栩ユg據1 ノ圃IネトレyムWs^オ_犹w|ノァ抑ル磔`僧ネ痰}"tケf竕4y6?・ネWmョ8禮イeX轟cuン・c勘"州nd詮"ゥ苒L6ノ咳夊臠ラm 畷}メ榴cム綏ァQ9モvヨ!由#aGd頂ア)唐C訂ラ|G箒ム_NQ゚幌)續yHh噐ユ恰d(_QFZ初H譁p8」j.ホ瓢H{篁「、:)ァ-腿)」イcゥkb*陋:'g。tRハF「メハラ'L:鰐」゙=堪rヌ.ヒl・ホリlエメB,亮R[-ォレbロェカァr[ォUnホェi斟:qコ%ウ+睚リュ膊[\ziH0埆>エ.q'ォアヨ^ep ヒpm媾s.I(lS}>50l゙rヨニ樫,イ"# nノZ⊇ィェカヒ蟐,フ4ラlヘセ昼リ 3ョ「ク9エセ)ュz脱m叉J&lヤ゙KヲジチャuセロR]ムヨ-巍&v\Kラチcァ=ルヲKヌワ6ヤ「*ヨウ=スI+ヘョンユケルレi%ネ囎カ丑- o蓼辮G.yィr+3QチNョ貘ケュ豼S癡亟ム喊ッハェ~-ッロ{ツネカ手ノーァァチ。>シl桴eオovオンヒ+髟オサVル9i シ蛟c肪[pO]゙キ 5丿>ッ゚ス{雖7マ|懺サセキU橦k、Dラ堝ナマ|エi9儘ッ|JHチ%/} ヒトソ河フv約B「Qォマ。H6ツ牲P(D iGツ跣T!Xgテ)餘|'。読~襍ルeー+<栞ワw?&゙M2D\Yヲlオ=o贋 S0 <燎ョW -ヒSb5 Mqッリ「ヲpjZ張ナ錺/k`エ82んM'd\!ソサ=腎トネMニ-.&7bラククLoC$'エIノ4a$サ2Ip,L"i錚Yヨ猪0b討リオ レ2料$/yゥ。`hネLヲ2遼フf:厰DR@;antlr-2.7.7/doc/jguru-logo.gif0000644000175000017500000001002110522211615016120 0ustar twernertwernerGIF89ags|xzv|~{}zп~}潟′究эヮр濠鴛随寶字沿嶋仲漕錐圏結侭勗丼囹ァ粕涛汨増試剪喋リリ瞻ロ聲゚鈬肚粐鱆礪珥鉉鴆跏裹泛ョ。ッ悃ェ圀ィ棔ャ撼ォ劒ヲ利、沫」屆ィマミロミムワユヨ獪ユ゚モヤ゙メモンリル籏リ瞶ラ獨ヨ゚、ヲオ」・エ「、ウゥォコィェケヲィキ・ァカ。」アワン袢レ龝v詠u棧ュ據ャ「ッヌノヨノヒラフホルヒヘリハフラョアチャッソヲゥクォョスェュシゥャサァェケタテメテニヤツナモチトメヘマルョイツュアチイカニアオナナネユトヌヤニノユヌハヨメヤンャーソォッセスチミシタマソテムlu遡t生サフオコヒエケハケセホクスヘキシフカサヒウクネツニモ圓コ丐キ擂シウケハサタマコソホケセヘwΒ」ュニァアハ鎗ョcs賄t勞イヒェオホィウヒ[rrォ峠椡メ攬ミ悽ヘィエヘLpーQvカRwキTrゥTqィfセfュn姐嶇、ラ」ヨ「ヤ束ヤ刈ノ頭ユ朝ヤ制ム擶ム&kラ.lマ5tリ8wロ;{゚6mヌDAzユ=nソ>oタO願J}マDoクRレT緯Y寂X冴Z上SハXミe甫f紗p壜p吽{渺|ロ|渺{樂束メタタタ`Y゚SマLソFッ@93-p&` P@0 g h k khmlnpio s"u"vj゚!m%r,y-{-z,r゚88@∵B錣鵑颯Z蝉!ネ,gs@ Hーチ*\ネー。テ ウ モCア「メルテムFア廬S シ?チシ枠c。9cレーノ篥"BエXア "K2s 鴣fホ?緞凱8槻掣ル側,B!オ.ヤFEノ筥ヘ9カvオH鏗リアb!a& 4jiS-ニ鋼 }ワフyウヨ幌詁Sヘ霜5hメbリ"<ネ醒F%H悽v掲5g喝設N近f虚*渝jォ#I*9,dリ5ツ[Wmカゥ Y/f58ッ スセ}頤罘イz遠5苣エQA,゙-IF(,p h@[<靤ム (b6`ーテ8ミタ$ィ`bメ b袰纃8讓緕72ホ4,@i苟H&9S3ェヤ聿徹ZC 八,」苺M$4A馘2)」・領漉Vト ュ馥5eト 諒 蘂n>禍偖ミ)P5^ェメ Bモー)滸Hヲ@フ4 e慂蓚$、 93=綟泝z9(蔓:モ7チQF}、ェjェx悁ヌNチ$'鷙鱠tFyTIouニA>・IXヘチQォッzEネaウ、Npカツ+唆EF  +ネ@マク.ャTFロWJ VムWD;8廻晦驗Rムek.メo%ータVオAニG,qユャ。芯%ム%2ル慕帥zH,゙cソ!LCナク*イネ2-剥0マ摶レjリC7Gタ@ミカ=アc最捏ヤ$ミD tハ*ッ F=!hA 蝉60N安l÷U霈M0{hァ暘^ラ4レタmL-/0ナ5ウイ2RX靄Bリ r` $0@8ナ肖^K$3I9 父ノ゚BキP.ッB獵医-ァキ#遲ヌ.ミ2フヤnサ2%ウi43%c;3iサ`*ヘ3チ釵 4フ褐2ソ縅撰テSJ殳J%ヲ j逑ヨGjィ.ス度zケ'梅dシ5截Y4ヘ,jミ2マ03ノ輜?トκ懼ラ ト「bG0.aュ-"ゥ;d7」t0ー(aBミ痺t(D!Xツ=」 Cqn-sィH,l+ ZリツHメ3|$0|8ユNQ28i眄タ聡丕 nネツF E#rェ#:!Er#B"襖jエ腐#E)X トー "リ秧リミvQ契 香Fvdehイ*癌濟チ 个 /{! 8DBxLvサPテトイ SmD3s ノ"マ」クRX]<棒イ"^bュ\。 EB?p譜茖2yH4yE:1ヤト タ/pー朔73)・チr無ュ"ニh"午 ォih$}C9&レt eTH:Y%gホcコー奎ミ 7ネ@'b摩T67ネ リウヌ99ケト0j腺}ァIG-aO:G。障モ秦ネ-0  ミ税r 6/ャ シ餉}屡ヤヲレ5hタタ┬ @0ミンミ$¨< OP1*紺ァ ;j潮涕鼇 i帥{ツP暄ヤヤヲsモ@"1祀ロ揆(Dd<ャ岾9ア舟ツィ"怦6六0!ン{。`ウ6。lBp6トレヨカ6EX欺7Maイロ#D94vYzケミEエF,ネ5f1Zム9FSrcキ オ!ロ(ヌ,f介Fwn$カ[ワ&7!pマビノo0b{[w ND(H3 ソi8cセHJ・ MホpC$ユ'Å梁b≡鐙q桧 溌、 /オo ヨモAーラ%-Qq:_=ツヌ`H1{ Qq4b蚣タHA ナェi|-6 "oゥ ヒ陪5L)g,yS.檻2/DSVー程K%i Pヒ` 9b輝、あゥナクD頚ィ亠A %.ム孳2 gムヌ)b{Q-愚CタF0d!ャ棉!└マ寫J配鴬ネ {ォWセRア$IJエ「ーN箴ZヤC値kr。犠ースe磨aV'渋DB゙"ネルネ$%鱈1@ノ,hRY徼ヨ~AAm-菜Zcカユュok\(ラケメum?コk.U眇ィF6コ)q乎クfタ<1 |48 = ~#ル匁碓メ+ナ(t9チRXィ*["アョ廖b擲k@ネ相神ヒ臼巴ーoARBアb゙Mコ8、枢:ZネT ゚0鬣ケ+EナB+@ゴレ鯖孛qtDォ首ヘヤeR~。イmxテ イ,*? 罩、yx&ェ7ア啼AIczdネ`懦ノ「。罠Z。!cJ j。旺u(ヘ9イyaコC。bQ&。q「O")瓦ラ」「ネ0悍R ヲ;フPO「 ヤ 9「Aカマ%モ0 !ニ」◆ *VeU「&U*・フテ、+。# ・8$C晰>zミc>シ ホ =フ燬ヨ4癪I 忝;antlr-2.7.7/doc/options.html0000644000175000017500000007176510522211615015746 0ustar twernertwerner ANTLR Options

Options

File, Grammar, and Rule Options

Rather than have the programmer specify a bunch of command-line arguments to the parser generator, an options section within the grammar itself serves this purpose. This solution is preferrable because it associates the required options with the grammar rather than ANTLR invocation. The section is preceded by the options keyword and contains a series of option/value assignments surrounded by curly braces such as:


options {
   k = 2;
   tokenVocbaulary = IDL;
   defaultErrorHandler = false;
}

The options section for an entire (.g) file, if specified, immediately follows the (optional) file header:

header { package X; }
options {language="FOO";}

The options section for a grammar, if specified, must immediately follow the ';' of the class specifier:

class MyParser extends Parser;
options { k=2; }

The options section for a rule, if specified, must immediately follow the rule name:

myrule[args] returns [retval]
   options { defaultErrorHandler=false; }
   :   // body of rule...
   ;    

The option names are not keywords in ANTLR, but rather are entries in a symbol table examined by ANTLR. The scope of option names is limited to the options section; identifiers within your grammar may overlap with these symbols.

The only ANTLR options not specified in the options section are things that do not vary with the grammar, but rather than invocation of ANTLR itself. The best example is debugging information. Typically, the programmer will want a makefile to change an ANTLR flag indicating a debug or release build.

Options supported in ANTLR

Key for the type column: F=file, G=grammar, R=rule, L=lexer, S=subrule, C=C++ only.

Symbol Type Description
language F Set the generated language
k G Set the lookahead depth
importVocab G Initial grammar vocabulary
exportVocab G Vocabulary exported from grammar
testLiterals LG,LR Generate literal-testing code
defaultErrorHandler G,R Control default exception-handling
greedy S False implies you want subrule loop, (..)* and (..)+, to exit when it sees lookahead consistent with what follows the loop.
codeGenMakeSwitchThreshold G Control code generation
codeGenBitsetTestThreshold G Control code generation
buildAST G Set automatic AST construction in Parser (transform mode in Tree-Parser)
analyzerDebug G Spit out lots of debugging information while performing grammar analysis.
codeGenDebug G Spit out lots of debugging information while doing code generation.
ASTLabelType G Specify the type of all user-defined labels, overrides default of AST.
charVocabulary LG Set the lexer character vocabulary
interactive G Both the lexer and the parser have an interactive option, which defaults to "false". See the parser speed section above.
caseSensitive LG Case is ignored when comparing against character and string literals in the lexer. The case of the input stream is maintained when stored in the token objects.
ignore LR Specify a lexer rule to use as whitespace between lexical rule atomic elements (chars, strings, and rule references). The grammar analysis and, hence, the lookhaead sets are aware of the whitespace references. This is a lexer rule option.
paraphrase LR An easy way to specify a string to use in place of the token name during error processing.
caseSensitiveLiterals LG Case is ignored when comparing tokens against the listerals table.
classHeaderPrefix G Replace the usual class prefix ("public" in Java) for the enclosing class definition.
classHeaderSuffix G Append a string to the enclosing class definition. In Java, this amounts to a comma-separated list of interfaces that your lexer, parser, or tree walker must implement.
mangleLiteralPrefix F Sets the prefix for the token type definitions of literals rather than using the default of "TOKEN_".
warnWhenFollowAmbig S Warnings will be printed when the lookahead set of what follows a subrule containing an empty alternative conflicts with a subrule alternative or when the implicit exit branch of a closure loop conflicts with an alternative.  The default is true.
generateAmbigWarnings S When true, no ambiguity/nondeterminism warning is generated for the decision associated with the subrule.  Use this very carefully--you may change the subrule and miss an ambiguity because of the option.  Make very sure that the ambiguity you mask is handled properly by ANTLR.  ANTLR-generated parsers resolve ambiguous decisions by consuming input as soon as possible (or by choosing the alternative listed first).

See the Java and HTML grammars for proper use of this option.  A comment should be supplied for each use indicating why it is ok to shut off the warning.

filter LG When true, the lexer ignores any input not exactly matching one of the nonprotected lexer rules.  When set to a rule name, the filter option using the rule to parse input characters between valid tokens or those tokens of interest.
namespace FGC When set, all the C++ code generated is wrapped in the namespace mentioned here.
namespaceStd FGC When set, the ANTLR_USE_NAMESPACE(std) macros in the generated C++ code are replaced by this value. This is a cosmetic option that only makes the code more readable. It does not replace this macro in the support C++ files. Note: use this option directly after setting the language to C++.
namespaceAntlr FGC When set, the ANTLR_USE_NAMESPACE(antlr) macros in the generated C++ code are replaced by this value. This is a cosmetic option that only makes the code more readable. It does not replace this macro in the support C++ files. Note: use this option directly after setting the language to C++.
genHashLines FGC Boolean toggle, when set to 'true' #line <linenumber> "filename" lines are inserted in the generated code so compiler errors/warnings refer the .g files.
noConstructors FGLC Boolean toggle, when set to 'true' the default constructors for the generated lexer/parser/treewalker are omitted. The user then has the option to specify them himself (with extra initializers etc.)

language: Setting the generated language

ANTLR supports multiple, installable code generators. Any code-generator conforming to the ANTLR specification may be invoked via the language option. The default language is "Java", but "Cpp" and "CSharp" are also supported.   The language option is specified at the file-level, for example:

header { package zparse; }
options { language="Java"; }
... classes follow ...    

k: Setting the lookahead depth

You may set the lookahead depth for any grammar (parser, lexer, or tree-walker), by using the k= option:

class MyLexer extends Lexer;
options { k=3; }
...

Setting the lookahead depth changes the maximum number of tokens that will be examined to select alternative productions, and test for exit conditions of the EBNF constructs (...)?, (...)+, and (...)*. The lookahead analysis is linear approximate (as opposed to full LL(k) ). This is a bit involved to explain in detail, but consider this example with k=2:

r :  ( A B | B A )
  |  A A
  ;

Full LL(k) analysis would resolve the ambiguity and produce a lookahead test for the first alternate like:

if ( (LA(1)==A && LA(2)==B) || (LA(1)==B && LA(2)==A) )

However, linear approximate analysis would logically OR the lookahead sets at each depth, resulting in a test like:

if ( (LA(1)==A || LA(1)==B) && (LA(2)==A || LA(2)==B) )

Which is ambiguous with the second alternate for {A,A}. Because of this, setting the lookahead depth very high tends to yield diminishing returns in most cases, because the lookahead sets at large depths will include almost everything.

importVocab: Initial Grammar Vocabulary

[See the documentation on vocabularies for more information]

To specify an initial vocabulary (tokens, literals, and token types), use the importVocab grammar option.

class MyParser extends Parser;
options {
   importVocab=V;
}

ANTLR will look for VTokenTypes.txt in the current directory and preload the token manager for MyParser with the enclosed information.

This option is useful, for example, if you create an external lexer and want to connect it to an ANTLR parser. Conversely, you may create an external parser and wish to use the token set with an ANTLR lexer. Finally, you may find it more convenient to place your grammars in separate files, especially if you have multiple tree-walkers that do not add any literals to the token set.

The vocabulary file has an identifier on the first line that names the token vocabulary that is followed by lines of the form ID=value or "literal"=value. For example:

ANTLR // vocabulary name
"header"=3
ACTION=4
COLON=5
SEMI=6
...

A file of this form is automatically generated by ANTLR for each grammar.

Note: you must take care to run ANTLR on the vocabulay-generating grammar files before you run ANTLR on the vocabulary-consuming grammar files.

exportVocab: Naming Export Vocabulary

[See the documentation on vocabularies for more information]

The vocabulary of a grammar is the union of the set of tokens provided by an importVocab option and the set of tokens and literals defined in the grammar.  ANTLR exports a vocabulary for each grammar whose default name is the same as the grammar.   So, the following grammar yields a vocabulary called P:

class P extends Parser;
a : A;

ANTLR generates files PTokenTypes.txt and PTokenTypes.java.

You can specify the name of the exported vocabulary with the exportVocab option.   The following grammar generates a vocabulary called V not P.

class P extends Parser;
options {
  exportVocab=V;
}
a : A;

All grammars in the same file witht the same vocabulary name contribute to the same vocabulary (and resulting files).  If the the grammars were in separate files, on the other hand, they would all overwrite the same file.  For example, the following parser and lexer grammars both may contribute literals and tokens to the MyTokens vocabulary.

class MyParser extends Parser;
options {
  exportVocab=MyTokens;
}
...

class MyLexer extends Lexer;
options {
  exportVocab=MyTokens;
}
... 

testLiterals: Generate literal-testing code

By default, ANTLR will generate code in all lexers to test each token against the literals table (the table generated for literal strings), and change the token type if it matches the table. However, you may suppress this code generation in the lexer by using a grammar option:

class L extends Lexer;
options { testLiterals=false; }
...

If you turn this option off for a lexer, you may re-enable it for specific rules. This is useful, for example, if all literals are keywords, which are special cases of ID:

ID
options { testLiterals=true; }
   : LETTER (LETTER | DIGIT)*
   ;

If you want to test only a portion of a token's text for a match in the literals table, explicitly test the substring within an action using method:

    public int testLiteralsTable(String text, int ttype) {...}

For example, you might want to test the literals table for just the tag word in an HTML word.

defaultErrorHandler: Controlling default exception-handling

By default, ANTLR will generate default exception handling code for a parser or tree-parser rule. The generated code will catch any parser exceptions, synchronize to the follow set of the rule, and return. This is simple and often useful error-handling scheme, but it is not very sophisticated. Eventually, you will want to install your own exepttion handlers. ANTLR will automatically turn off generation of default exception handling for rule where an exception handler is specified. You may also explicitly control generation of default exception handling on a per-grammar or per-rule basis. For example, this will turn off default error-handing for the entire grammar, but turn it back on for rule "r":

class P extends Parser;
options {defaultErrorHandler=false;}

r
options {defaultErrorHandler=true;}
: A B C;

For more information on exception handling in the lexer, go here.

codeGenMakeSwitchThreshold: controlling code generation

ANTLR will optimize lookahead tests by generating a switch statement instead of a series of if/else tests for rules containing a sufficiently large number of alternates whose lookahead is strictly LL(1). The option codeGenMakeSwitchThreshold controls this test. You may want to change this to control optimization of the parser. You may also want to disable it entirely for debugging purposes, by setting it to a large number:

class P extends Parser;
options { codeGenMakeSwitchThreshold=999; }
...

codeGenBitsetTestThreshold: controlling code generation

ANTLR will optimize lookahead tests by generating a bitset test instead of an if statement, for very complex lookahead sets. The option codeGenBitsetTestThreshold controls this test. You may want to change this to control optimization of the parser:

class P extends Parser;
// make bitset if test involves five or more terms
options { codeGenBitsetTestThreshold=5; }
...

You may also want to disable it entirely for debugging purposes, by setting it to a large number:

class P extends Parser;
options { codeGenBitsetTestThreshold=999; }
...      

buildAST: Automatic AST construction

In a Parser, you can tell ANTLR to generate code to construct ASTs corresponding to the structure of the recognized syntax. The option, if set to true, will cause ANTLR to generate AST-building code. With this option set, you can then use all of the AST-building syntax and support methods.

In a Tree-Parser, this option turns on "transform mode", which means an output AST will be generated that is a transformation of the input AST. In a tree-walker, the default action of buildAST is to generate a copy of the portion of the input AST that is walked. Tree-transformation is almost identical to building an AST in a Parser, except that the input is an AST, not a stream of tokens.

ASTLabelType: Setting label type

When you must define your own AST node type, your actions within the grammar will require lots of downcasting from AST (the default type of any user-defined label) to your tree node type; e.g.,

decl : d:ID {MyAST t=(MyAST)#d;}
     ;

This makes your code a pain to type in and hard to read.  To avoid this, use the grammar option ASTLabelType to have ANTLR automatically do casts and define labels of the appropriate type.

class ExprParser extends Parser;

options {
  buildAST=true;
  ASTLabelType = "MyAST";
}

expr : a:term ;

The type of #a within an action is MyAST not AST.

charVocabulary: Setting the lexer character vocabulary

ANTLR processes Unicode. Because of this this, ANTLR cannot make any assumptions about the character set in use, else it would wind up generating huge lexers. Instead ANTLR assumes that the character literals, string literals, and character ranges used in the lexer constitute the entire character set of interest. For example, in this lexer:

class L extends Lexer;
A : 'a';
B : 'b';
DIGIT : '0' .. '9';

The implied character set is { 'a', 'b', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' }. This can produce unexpected results if you assume that the normal ASCII character set is always used. For example, in:

class L extends Lexer;
A : 'a';
B : 'b';
DIGIT : '0' .. '9';
STRING: '"' (~'"")* '"';

The lexer rule STRING will only match strings containing 'a', 'b' and the digits, which is usually not what you want. To control the character set used by the lexer, use the charVocbaulary option. This example will use a general eight-bit character set.

class L extends Lexer;
options { charVocabulary = '\3'..'\377'; }
...

This example uses the ASCII character set in conjunction with some values from the extended Unicode character set:


class L extends Lexer;
options {
	charVocabulary = '\3'..'\377' | '\u1000'..'\u1fff';
}
...

warnWhenFollowAmbig

[Warning: you should know what you are doing before you use this option.  I deliberately made it a pain to shut warnings off (rather than a single character operator) so you would not just start turning off all the warnings.  I thought for long time before implementing this exact mechanism.  I recommend a comment in front of any use of this option that explains why it is ok to hush the warning.]

This subrule option is true by default and controls the generation of nondeterminism (ambiguity) warnings when comparing the FOLLOW lookahead sets for any subrule with an empty alternative and any closure subrule such as (..)+ and (...)*.  For example, the following simple rule has a nondeterministic subrule, which arises from a language ambiguity that you could attach an ELSE clause to the most recent IF or to an outer IF because the construct can nest.

stat	:	"if" expr "then" stat ("else" stat)?
	|	ID ASSIGN expr SEMI
	;

Because the language is ambiguous, the context-free grammar must be ambiguous and the resulting parser nondeterministic (in theory).  However, being the practical language folks that we are, we all know you can trivially solve this problem by having ANTLR resolve conflicts by consuming input as soon as possible; I have yet to see a case where this was the wrong thing to do, by the way.  This option, when set to false, merely informs ANTLR that it has made the correct assumption and can shut off an ambiguity related to this subrule and an empty alternative or exit path.  Here is a version of the rule that does not yield a warning message:

 

stat	:	"if" expr "then" stat
		(
		    // standard if-then-else ambig
		    options {
		        warnWhenFollowAmbig=false;
		    }
		:	"else" stat
		)?
	|	ID ASSIGN expr SEMI
	;

One important note: This option does not affect non-empty alternatives.  For example, you will still get a warning for the following subrule between alts 1 and 3 (upon lookahead A):

(
	options {
		warnWhenFollowAmbig=false;
	}
:	A
|	B
|	A
)

Further, this option is insensitive to lookahead.  Only completely empty alternatives count as candidate alternatives for hushing warnings.  So, at k=2, just because ANTLR can see past alternatives with single tokens, you still can get warnings.

Command Line Options

-o outputDir specify output directory where all output generated.
-glib supergrammarFile Specify a file with a supergrammar for the generated file.
-debug launch the ParseView debugger upon parser invocation.  Unless you have downloaded and unzipped the debugger over the top of the standard ANTLR distribution, the code emanating from ANTLR with this option will not compile (likewise for Swing).
-html generate a HTML file from your grammar without actions and so on.   This is only a prototype, but seems to do something useful.   It only works for parsers, not lexers or tree parsers.
-docbook generate a docbook SGML file from your grammar without actions and so on.   This is only a prototype, but seems to do something useful.   It only works for parsers, not lexers or tree parsers.
-diagnostic generate a text file from your grammar with a lot of debugging info.
-trace have all rules call traceIn/traceOut.
-traceParser have parser rules call traceIn/traceOut.
-traceLexer have lexer rules call traceIn/traceOut.
-traceTreeParser have tree walker rules call traceIn/traceOut.
-h|-help|--help help message.

Version: $Id: //depot/code/org.antlr/release/antlr-2.7.7/doc/options.html#2 $ antlr-2.7.7/doc/runtime.html0000644000175000017500000012225010522211615015720 0ustar twernertwerner ANTLR Specification: Run-time

Java Runtime Model


Programmer's Interface

In this section, we describe what ANTLR generates after reading your grammar file and how to use that output to parse input. The classes from which your lexer, token, and parser classes are derived are provided as well.

What ANTLR generates

ANTLR generates the following types of files, where MyParser, MyLexer, and MyTreeParser are names of grammar classes specified in the grammar file. You may have an arbitrary number of parsers, lexers, and tree-parsers per grammar file; a separate class file will be generated for each. In addition, token type files will be generated containing the token vocabularies used in the parsers and lexers. One or more token vocabularies may be defined in a grammar file, and shared between different grammars. For example, given the grammar file:

class MyParser extends Parser;
options {
  exportVocab=My;
}
... rules ...

class MyLexer extends Lexer;
options {
  exportVocab=My;
}
... rules ...

class MyTreeParser extends TreeParser;
options {
  exportVocab=My;
}
... rules ...

The following files will be generated:

  • MyParser.java. The parser with member methods for the parser rules.
  • MyLexer.java. The lexer with the member methods for the lexical rules.
  • MyTreeParser.java. The tree-parser with the member methods for the tree-parser rules.
  • MyTokenTypes.java. An interface containing all of the token types defined by your parsers and lexers using the exported vocabulary named My.
  • MyTokenTypes.txt. A text file containing all of the token types, literals, and paraphrases defined by parsers and lexers contributing vocabulary My.

The programmer uses the classes by referring to them:

  1. Create a lexical analyzer. The constructor with no arguments implies that you want to read from standard input.
  2. Create a parser and attach it to the lexer (or other TokenStream).
  3. Call one of the methods in the parser to begin parsing.

If your parser generates an AST, then get the AST value, create a tree-parser, and invoke one of the tree-parser rules using the AST.

MyLexer lex = new MyLexer();
MyParser p =
  new MyParser(lex,user-defined-args-if-any);
p.start-rule();
// and, if you are tree parsing the result...
MyTreeParser tp = new MyTreeParser();
tp.start-rule(p.getAST());

You can also specify the name of the token and/or AST objects that you want the lexer/parser to create. Java's support of dynamic programming makes this quite painless:

MyLexer lex = new MyLexer();
lex.setTokenObjectClass("mypackage.MyToken");
  // defaults to "antlr.CommonToken"
...
parser.setASTNodeClass("mypackage.MyASTNode");
  // defaults to "antlr.CommonAST"

Make sure you give a fully-qualified class name.

The lexer and parser can cause IOExceptions as well as RecognitionExceptions, which you must catch:

  CalcLexer lexer =
    new CalcLexer(new DataInputStream(System.in));
  CalcParser parser = new CalcParser(lexer);
  // Parse the input expression
  try {
    parser.expr();
  }
  catch (IOException io) {
    System.err.println("IOException");
  }
  catch(RecognitionException e) {
    System.err.println("exception: "+e);
  }

Multiple Lexers/Parsers With Shared Input State

Occasionally, you will want two parsers or two lexers to share input state; that is, you will want them to pull input from the same source token stream or character stream.   The section on multiple lexer "states" describes such a situation.

ANTLR factors the input variables such as line number, guessing state, input stream, etc... into a separate object so that another lexer or parser could same that state.  The LexerSharedInputState and ParserSharedInputState embody this factoring.   Method getInputState() can be used on either CharScanner or Parser objects.  Here is how to construct two lexers sharing the same input stream:

// create Java lexer
JavaLexer mainLexer = new JavaLexer(input);
// create javadoc lexer; attach to shared
// input state of java lexer
JavaDocLexer doclexer =
  new JavaDocLexer(mainLexer.getInputState());

Parsers with shared input state can be created similarly:

JavaDocParser jdocparser =
  new JavaDocParser(getInputState());
jdocparser.content(); // go parse the comment

Sharing state is easy, but what happens upon exception during the execution of the "subparser"?  What about syntactic predicate execution?  It turns out that invoking a subparser with the same input state is exactly the same as calling another rule in the same parser as far as error handling and syntactic predicate guessing are concerned.  If the parser is guessing before the call to the subparser, the subparser must continue guessing, right?  Exceptions thrown inside the subparser must exit the subparser and return to enclosing erro handler or syntactic predicate handler.

Parser Implementation

Parser Class

ANTLR generates a parser class (an extension of LLkParser) that contains a method for every rule in your grammar. The general format looks like:

public class MyParser extends LLkParser
    implements MyLexerTokenTypes
{
  protected P(TokenBuffer tokenBuf, int k) {
    super(tokenBuf,k);
    tokenNames = _tokenNames;
  }
  public P(TokenBuffer tokenBuf) {  
    this(tokenBuf,1);
  }
  protected P(TokenStream lexer, int k) {
    super(lexer,k);
    tokenNames = _tokenNames;       
  }
  public P(TokenStream lexer) {  
    this(lexer,1);
  }
  public P(ParserSharedInputState state) {
    super(state,1);
    tokenNames = _tokenNames;
  }
  ...
  // add your own constructors here...
  rule-definitions
}
  

Parser Methods

ANTLR generates recursive-descent parsers, therefore, every rule in the grammar will result in a method that applies the specified grammatical structure to the input token stream. The general form of a parser method looks like:

public void rule()
  throws RecognitionException,
         TokenStreamException
{
  init-action-if-present
  if ( lookahead-predicts-production-1 ) {
     code-to-match-production-1
  }
  else if ( lookahead-predicts-production-2 ) {
     code-to-match-production-2
  }
  ...
  else if ( lookahead-predicts-production-n ) {
     code-to-match-production-n
  }
  else {
    // syntax error
    throw new NoViableAltException(LT(1));
  }
}
  This code results from a rule of the form:  
rule:   production-1
    |   production-2
   ...
    |   production-n
    ;
  

If you have specified arguments and a return type for the rule, the method header changes to:

/* generated from:
 *    rule(user-defined-args)
 *      returns return-type : ... ;
 */
public return-type rule(user-defined-args)
  throws RecognitionException,
         TokenStreamException
{
  ...
}
  

Token types are integers and we make heavy use of bit sets and range comparisons to avoid excessively-long test expressions.

EBNF Subrules

Subrules are like unlabeled rules, consequently, the code generated for an EBNF subrule mirrors that generated for a rule. The only difference is induced by the EBNF subrule operators that imply optionality or looping.

(...)? optional subrule. The only difference between the code generated for an optional subrule and a rule is that there is no default else-clause to throw an exception--the recognition continues on having ignored the optional subrule.

{
  init-action-if-present
  if ( lookahead-predicts-production-1 ) {
     code-to-match-production-1
  }
  else if ( lookahead-predicts-production-2 ) {
     code-to-match-production-2
  }
  ...
  else if ( lookahead-predicts-production-n ) {
     code-to-match-production-n
  }
}
  

Not testing the optional paths of optional blocks has the potential to delay the detection of syntax errors.

(...)* closure subrule. A closure subrule is like an optional looping subrule, therefore, we wrap the code for a simple subrule in a "forever" loop that exits whenever the lookahead is not consistent with any of the alternative productions.

{
  init-action-if-present
loop:
  do {
    if ( lookahead-predicts-production-1 ) {
       code-to-match-production-1
    }
    else if ( lookahead-predicts-production-2 ) {
       code-to-match-production-2
    }
    ...
    else if ( lookahead-predicts-production-n ) {
       code-to-match-production-n
    }
    else {
      break loop;
    }
  }
  while (true);
}
  

While there is no need to explicity test the lookahead for consistency with the exit path, the grammar analysis phase computes the lookahead of what follows the block. The lookahead of what follows much be disjoint from the lookahead of each alternative otherwise the loop will not know when to terminate. For example, consider the following subrule that is nondeterministic upon token A.

( A | B )* A
  

Upon A, should the loop continue or exit? One must also ask if the loop should even begin. Because you cannot answer these questions with only one symbol of lookahead, the decision is non-LL(1).

Not testing the exit paths of closure loops has the potential to delay the detection of syntax errors.

As a special case, a closure subrule with one alternative production results in:

{
  init-action-if-present
loop:
  while ( lookahead-predicts-production-1 ) {
       code-to-match-production-1
  }
}
   

This special case results in smaller, faster, and more readable code.

(...)+ positive closure subrule. A positive closure subrule is a loop around a series of production prediction tests like a closure subrule. However, we must guarantee that at least one iteration of the loop is done before proceeding to the construct beyond the subrule.

{
  int _cnt = 0;
  init-action-if-present
loop:
  do {
    if ( lookahead-predicts-production-1 ) {
       code-to-match-production-1
    }
    else if ( lookahead-predicts-production-2 ) {
       code-to-match-production-2
    }
    ...
    else if ( lookahead-predicts-production-n ) {
       code-to-match-production-n
    }
    else if ( _cnt>1 ) {
      // lookahead predicted nothing and we've
      // done an iteration
      break loop;
    }
    else {
      throw new NoViableAltException(LT(1));
    }
    _cnt++;  // track times through the loop
  }
  while (true);
}
  

While there is no need to explicity test the lookahead for consistency with the exit path, the grammar analysis phase computes the lookahead of what follows the block. The lookahead of what follows much be disjoint from the lookahead of each alternative otherwise the loop will not know when to terminate. For example, consider the following subrule that is nondeterministic upon token A.

( A | B )+ A
  

Upon A, should the loop continue or exit? Because you cannot answer this with only one symbol of lookahead, the decision is non-LL(1).

Not testing the exit paths of closure loops has the potential to delay the detection of syntax errors.

You might ask why we do not have a while loop that tests to see if the lookahead is consistent with any of the alternatives (rather than having series of tests inside the loop with a break). It turns out that we can generate smaller code for a series of tests than one big one. Moreover, the individual tests must be done anyway to distinguish between alternatives so a while condition would be redundant.

As a special case, if there is only one alternative, the following is generated:

{
  init-action-if-present
  do {
    code-to-match-production-1
  }
  while ( lookahead-predicts-production-1 );
}
  

Optimization. When there are a large (where large is user-definable) number of strictly LL(1) prediction alternatives, then a switch-statement can be used rather than a sequence of if-statements. The non-LL(1) cases are handled by generating the usual if-statements in the default case. For example:

switch ( LA(1) ) {
  case KEY_WHILE :
  case KEY_IF :
  case KEY_DO :
    statement();
    break;
  case KEY_INT :
  case KEY_FLOAT :
    declaration();
    break;
  default :
    // do whatever else-clause is appropriate
}
  

This optimization relies on the compiler building a more direct jump (via jump table or hash table) to the ith production matching code. This is also more readable and faster than a series of bit set membership tests.

Production Prediction

LL(1) prediction. Any LL(1) prediction test is a simple set membership test. If the set is a singleton set (a set with only one element), then an integer token type == comparison is done. If the set degree is greater than one, a bit set is created and the single input token type is tested for membership against that set. For example, consider the following rule:

a : A | b ;
b : B | C | D | E | F;
  

The lookahead that predicts production one is {A} and the lookahead that predicts production two is {B,C,D,E,F}. The following code would be generated by ANTLR for rule a (slightly cleaned up for clarity):

public void a() {
  if ( LA(1)==A ) {
    match(A);
  }
  else if (token_set1.member(LA(1))) {
    b();
  }
}
  

The prediction for the first production can be done with a simple integer comparison, but the second alternative uses a bit set membership test for speed, which you probably didn't recognize as testing LA(1) member {B,C,D,E,F}. The complexity threshold above which bitset-tests are generated is user-definable.

We use arrays of long ints (64 bits) to hold bit sets. The ith element of a bitset is stored in the word number i/64 and the bit position within that word is i % 64. The divide and modulo operations are extremely expensive and, but fortunately, a strength reduction can be done. Dividing by a power of two is the same as shifting right and modulo a power of two is the same as masking with that power minus one. All of these details are hidden inside the implementation of the BitSet class in the package antlr.collections.impl.

The various bit sets needed by ANTLR are created and initialized in the generated parser (or lexer) class.

Approximate LL(k) prediction. An extension of LL(1)...basically we do a series of up to k bit set tests rather than a single as we do in LL(1) prediction. Each decision will use a different amount of lookahead, with LL(1) being the dominant decision type.

Production Element Recognition

Token references. Token references are translated to:

match(token-type);
  

For example, a reference to token KEY_BEGIN results in:

match(KEY_BEGIN);
  

where KEY_BEGIN will be an integer constant defined in the MyParserTokenType interface generated by ANTLR.

String literal references. String literal references are references to automatically generated tokens to which ANTLR automatically assigns a token type (one for each unique string). String references are translated to:

match(T);
  

where T is the token type assigned by ANTLR to that token.

Character literal references. Referencing a character literal implies that the current rule is a lexical rule. Single characters, 't', are translated to:

match('t');
  

which can be manually inlined with:

if ( c=='t' ) consume();
else throw new MismatchedCharException(
               "mismatched char: '"+(char)c+"'");
   

if the method call proves slow (at the cost of space).

Wildcard references. In lexical rules, the wildcard is translated to:

consume();
  

which simply gets the next character of input without doing a test.

References to the wildcard in a parser rule results in the same thing except that the consume call will be with respect to the parser.

Not operator. When operating on a token, ~T is translated to:

matchNot(T);
 

When operating on a character literal, 't' is translated to:

matchNot('t');
  

Range operator. In parser rules, the range operator (T1..T2) is translated to:

matchRange(T1,T2);
   

In a lexical rule, the range operator for characters c1..c2 is translated to:

matchRange(c1,c2);
  

Labels. Element labels on atom references become Token references in parser rules and ints in lexical rules. For example, the parser rule:

a : id:ID {System.out.println("id is "+id);} ;
  would be translated to:  
public void a() {
  Token id = null;
  id = LT(1);
  match(ID);
  System.out.println("id is "+id);
}
  For lexical rules such as:  
ID : w:. {System.out.println("w is "+(char)w);};
  the following code would result:  
public void ID() {
  int w = 0;
  w = c;
  consume(); // match wildcard (anything)
  System.out.println("w is "+(char)w);
}
  

Labels on rule references result in AST references, when generating trees, of the form label_ast.

Rule references. Rule references become method calls. Arguments to rules become arguments to the invoked methods. Return values are assigned like Java assignments. Consider rule reference i=list[1] to rule:

list[int scope] returns int
    :   { return scope+3; }
    ;
  The rule reference would be translated to:  
i = list(1);
  

Semantic actions. Actions are translated verbatim to the output parser or lexer except for the translations required for AST generation and the following:

  • $FOLLOW(r): FOLLOW set name for rule r
  • $FIRST(r): FIRST set name for rule r

Omitting the rule argument implies you mean the current rule. The result type is a BitSet, which you can test via $FIRST(a).member(LBRACK) etc...

Here is a sample rule:

a : A {System.out.println($FIRST(a));} B
  exception
    catch [RecognitionException e] {    
        if ( $FOLLOW.member(SEMICOLON) ) {
        consumeUntil(SEMICOLON);
    }
    else {
        consume();
    }
    }
  ;
Results in
public final void a() throws RecognitionException, TokenStreamException {  
    try {
        match(A);
        System.out.println(_tokenSet_0);
        match(B);
    }
    catch (RecognitionException e) {
        if ( _tokenSet_1.member(SEMICOLON) ) {
            consumeUntil(SEMICOLON);
        }
        else {
            consume();
        }
    }
}

To add members to a lexer or parser class definition, add the class member definitions enclosed in {} immediately following the class specification, for example:

class MyParser;
{
   protected int i;
   public MyParser(TokenStream lexer,
        int aUsefulArgument) {
      i = aUsefulArgument;
   }
}
... rules ...

ANTLR collects everything inside the {...} and inserts it in the class definition before the rule-method definitions. When generating C++, this may have to be extended to allow actions after the rules due to the wacky ordering restrictions of C++.

Standard Classes

ANTLR constructs parser classes that are subclasses of the antlr.LLkParser class, which is a subclass of the antlr.Parser class. We summarize the more important members of these classes here. See Parser.java and LLkParser.java for details of the implementation.

public abstract class Parser {
   protected ParserSharedInputState inputState;
   protected ASTFactory ASTFactory;
   public abstract int LA(int i);
   public abstract Token LT(int i);
   public abstract void consume();
   public void consumeUntil(BitSet set) { ... }
   public void consumeUntil(int tokenType) { ... }
   public void match(int t)
      throws MismatchedTokenException { ... }
   public void matchNot(int t)
      throws MismatchedTokenException { ... }
   ...
}

public class LLkParser extends Parser {
   public LLkParser(TokenBuffer tokenBuf, int k_)
     { ... }
   public LLkParser(TokenStream lexer, int k_)
     { ... }
   public int LA(int i) { return input.LA(i); }
   public Token LT(int i) { return input.LT(i); }
   public void consume() { input.consume(); }
   ...
}

Lexer Implementation

Lexer Form

The lexers produced by ANTLR are a lot like the parsers produced by ANTLR. They only major differences are that (a) scanners use characters instead of tokens, and (b) ANTLR generates a special nextToken rule for each scanner which is a production containing each public lexer rule as an alternate. The name of the lexical grammar class provided by the programmer results in a subclass of CharScanner, for example

public class MyLexer extends antlr.CharScanner
  implements LTokenTypes, TokenStream
{
  public L(InputStream in) {
          this(new ByteBuffer(in));
  }
  public L(Reader in) {
          this(new CharBuffer(in));
  }
  public L(InputBuffer ib) {
          this(new LexerSharedInputState(ib));
  }
  public L(LexerSharedInputState state) {
          super(state);
          caseSensitiveLiterals = true;
          setCaseSensitive(true);
          literals = new Hashtable();
  }

  public Token nextToken() throws TokenStreamException {
     scanning logic
    ...
  }
  recursive and other non-inlined lexical methods
  ...
}
  

When an ANTLR-generated parser needs another token from its lexer, it calls a method called nextToken. The general form of the nextToken method is:

public Token nextToken()
  throws TokenStreamException {
  int tt;
  for (;;) {
     try {
        resetText();
        switch ( c ) {
        case for each char predicting lexical rule
           call lexical rule gets token type -> tt
        default :
           throw new NoViableAltForCharException(
               "bad char: '"+(char)c+"'");
        }
        if ( tt!=Token.SKIP ) {
           return makeToken(tt);
        }
     }
     catch (RecognitionException ex) {
        reportError(ex.toString());
     }
  }
}
  

For example, the lexical rules:

lexclass Lex;

WS   : ('\t' | '\r' | ' ') {_ttype=Token.SKIP;} ;
PLUS : '+';
MINUS: '-';
INT  : ( '0'..'9' )+ ;
ID   : ( 'a'..'z' )+ ;
UID  : ( 'A'..'Z' )+ ;
  would result in something like:  
public class Lex extends CharScanner
  implements TTokenTypes {
...
public Token nextToken()
    throws TokenStreamException {
    int _tt = Token.EOF_TYPE;
    for (;;) {
    try {
       resetText();
       switch ( _c ) {
       case '\t': case '\r': case ' ': 
           _tt=mWS();
           break;
       case '+': 
           _tt=mPLUS();
           break;
       case '-': 
           _tt=mMINUS();
           break;
       case '0': case '1': case '2': case '3': 
       case '4': case '5': case '6': case '7': 
       case '8': case '9': 
           _tt=mINT();
           break;
       case 'a': case 'b': case 'c': case 'd': 
       case 'e': case 'f': case 'g': case 'h': 
       case 'i': case 'j': case 'k': case 'l': 
       case 'm': case 'n': case 'o': case 'p': 
       case 'q': case 'r': case 's': case 't': 
       case 'u': case 'v': case 'w': case 'x': 
       case 'y': case 'z': 
           _tt=mID();
           break;
       case 'A': case 'B': case 'C': case 'D': 
       case 'E': case 'F': case 'G': case 'H': 
       case 'I': case 'J': case 'K': case 'L': 
       case 'M': case 'N': case 'O': case 'P': 
       case 'Q': case 'R': case 'S': case 'T': 
       case 'U': case 'V': case 'W': case 'X': 
       case 'Y': case 'Z': 
           _tt=mUID();
           break;
       case EOF_CHAR :
           _tt = Token.EOF_TYPE;
           break;
       default :
          throw new NoViableAltForCharException(
               "invalid char "+_c);
       }
       if ( _tt!=Token.SKIP ) {
           return makeToken(_tt);
       }
    }  // try
	catch (RecognitionException ex) {
	  reportError(ex.toString());
	}
	}  // for
}

public int mWS()
    throws RecognitionException,
           CharStreamException,
           TokenStreamException {
    int _ttype = WS;
    switch ( _c) {
    case '\t': 
        match('\t');
        break;
    case '\r': 
        match('\r');
        break;
    case ' ': 
        match(' ');
        break;
    default :
    {
        throw new NoViableAltForException(
               "no viable for char: "+(char)_c);
    }
    }
     _ttype = Token.SKIP;
    return _ttype;
}

public int mPLUS()
    throws RecognitionException,
           CharStreamException,
           TokenStreamException {
    int _ttype = PLUS;
    match('+');
    return _ttype;
}

public int mMINUS()
    throws RecognitionException,
           CharStreamException,
           TokenStreamException {

    int _ttype = MINUS;
    match('-');
    return _ttype;
}

public int mINT()
    throws RecognitionException,
           CharStreamException,
           TokenStreamException {

    int _ttype = INT;
    {
    int _cnt=0;
    _loop:
    do {
        if ( _c>='0' && _c<='9')
          { matchRange('0','9'); }
        else
        if ( _cnt>=1 ) break _loop;
        else {
           throw new ScannerException(
              "no viable alternative for char: "+
                (char)_c);
        }
        _cnt++;
    } while (true);
    }
    return _ttype;
}

public int mID()
    throws RecognitionException,
           CharStreamException,
           TokenStreamException {
    int _ttype = ID;
    {
    int _cnt=0;
    _loop:
    do {
        if ( _c>='a' && _c<='z')
        { matchRange('a','z'); }
        else
        if ( _cnt>=1 ) break _loop;
        else {
            throw new NoViableAltForCharException(
               "no viable alternative for char: "+
                 (char)_c);
        }
        _cnt++;
        } while (true);
    }
    return _ttype;
}

public int mUID()
    throws RecognitionException,
           CharStreamException,
           TokenStreamException {

    int _ttype = UID;
    {
    int _cnt=0;
    _loop:
    do {
        if ( _c>='A' && _c<='Z')
        { matchRange('A','Z'); }
        else
        if ( _cnt>=1 ) break _loop;
        else {
            throw new NoViableAltForCharException(
               "no viable alternative for char: "+
                 (char)_c);
        }
        _cnt++;
    } while (true);
    }
    return _ttype;
}

}
  

ANTLR-generated lexers assume that you will be reading streams of characters. If this is not the case, you must create your own lexer.

Creating Your Own Lexer

To create your own lexer, your Java class that will doing the lexing must implement interface TokenStream, which simply states that you must be able to return a stream of tokens via nextToken:

/**This interface allows any object to
 * pretend it is a stream of tokens.
 * @author Terence Parr, MageLang Institute
 */
public interface TokenStream {
  public Token nextToken();
}
  

ANTLR will not generate a lexer if you do not specify a lexical class.

Launching a parser with a non-ANTLR-generated lexer is the same as launching a parser with an ANTLR-generated lexer:

HandBuiltLexer lex = new HandBuiltLexer(...);
MyParser p = new MyParser(lex);
p.start-rule();

The parser does not care what kind of object you use for scanning as as long as it can answer nextToken.

If you build your own lexer, and the token values are also generated by that lexer, then you should inform the ANTLR-generated parsers about the token type values generated by that lexer. Use the importVocab in the parsers that use the externally-generated token set, and create a token definition file following the requirements of the importVocab option.

Lexical Rules

Lexical rules are essentially the same as parser rules except that lexical rules apply a structure to a series of characters rather than a series of tokens. As with parser rules, each lexical rule results in a method in the output lexer class.

Alternative blocks. Consider a simple series of alternatives within a block:

FORMAT : 'x' | 'f' | 'd';
  

The lexer would contain the following method:

public int mFORMAT() {
  if ( c=='x' ) {
    match('x');
  }
  else if ( c=='x' ) {
    match('x');
  }
  else if ( c=='f' ) {
    match('f');
  }
  else if ( c=='d' ) {
    match('d');
  }
  else {
    throw new NoViableAltForCharException(
        "no viable alternative: '"+(char)c+"'");
  }
  return FORMAT;
}
  

The only real differences between lexical methods and grammar methods are that lookahead prediction expressions do character comparisons rather than LA(i) comparisons, match matches characters instead of tokens, a return is added to the bottom of the rule, and lexical methods throw CharStreamException objects in addition to TokenStreamException and RecognitionException objects.

Optimization: Non-Recursive lexical rules. Rules that do not directly or indirectly call themselves can be inlined into the lexer entry method: nextToken. For example, the common identifier rule would be placed directly into the nextToken method. That is, rule:

ID  :   ( 'a'..'z' | 'A'..'Z' )+
    ;
 

would not result in a method in your lexer class. This rule would become part of the resulting lexer as it would be probably inlined by ANTLR:

public Token nextToken() {
  switch ( c ) {
  cases for operators and such here
  case '0': // chars that predict ID token
  case '1':
  case '2':
  case '3':
  case '4':
  case '5':
  case '6':
  case '7':
  case '8':
  case '9':
    while ( c>='0' && c<='9' ) {
      matchRange('0','9');
    }
    return makeToken(ID);
  default :
    check harder stuff here like rules
      beginning with a..z
}
  

If not inlined, the method for scanning identifiers would look like:

public int mID() {
  while ( c>='0' && c<='9' ) {
    matchRange('0','9');
  }
  return ID;
}
  

where token names are converted to method names by prefixing them with the letter m. The nextToken method would become:

public Token nextToken() {
  switch ( c ) {
  cases for operators and such here
  case '0': // chars that predict ID token
  case '1':
  case '2':
  case '3':
  case '4':
  case '5':
  case '6':
  case '7':
  case '8':
  case '9':
    return makeToken(mID());
  default :
    check harder stuff here like rules
      beginning with a..z
}
  

Note that this type of range loop is so common that it should probably be optimized to:

while ( c>='0' && c<='9' ) {
  consume();
}
  

Optimization: Recursive lexical rules. Lexical rules that are directly or indirectly recursive are not inlined. For example, consider the following rule that matches nested actions:

ACTION
    :   '{' ( ACTION | ~'}' )* '}'
    ;
  

ACTION would be result in (assuming a character vocabulary of 'a'..'z', '{', '}'):

public int mACTION()
    throws RecognitionException,
           CharStreamException,
           TokenStreamException {

    int _ttype = ACTION;
    match('{');
    {
    _loop:
    do {
        switch ( _c) {
        case '{':
            mACTION();
            break;
        case 'a': case 'b': case 'c': case 'd':
        case 'e': case 'f': case 'g': case 'h':
        case 'i': case 'j': case 'k': case 'l':
        case 'm': case 'n': case 'o': case 'p':
        case 'q': case 'r': case 's': case 't':
        case 'u': case 'v': case 'w': case 'x':
        case 'y': case 'z':
            matchNot('}');
            break;
        default :
            break _loop;
        }
    } while (true);
    }
    match('}');
    return _ttype;
}
       

Token Objects

The basic token knows only about a token type:

public class Token {
  // constants
  public static final int MIN_USER_TYPE = 3;
  public static final int INVALID_TYPE = 0;
  public static final int EOF_TYPE = 1;
  public static final int SKIP = -1;
  
  // each Token has at least a token type
  int type=INVALID_TYPE;
  
  // the illegal token object
  public static Token badToken =
    new Token(INVALID_TYPE, "");
  
  public Token() {;}
  public Token(int t) { type = t; }
  public Token(int t, String txt) {
    type = t; setText(txt);
  }

  public void setType(int t) { type = t; }
  public void setLine(int l) {;}
  public void setColumn(int c) {;}
  public void setText(String t) {;}
  
  public int getType() { return type; }
  public int getLine() { return 0; }
  public int getColumn() { return 0; }
  public String getText() {...}
}

The raw Token class is not very useful.  ANTLR supplies a "common" token class that it uses by default, which contains the line number and text associated with the token:

public class CommonToken extends Token {
  // most tokens will want line, text information
  int line;
  String text = null;
 
  public CommonToken() {}
  public CommonToken(String s)  { text = s; }
  public CommonToken(int t, String txt) {
    type = t;
    setText(txt);
  }

  public void setLine(int l)    { line = l; }
  public int  getLine()         { return line; }
  public void setText(String s) { text = s; }
  public String getText()       { return text; }
}

ANTLR will generate an interface that defines the types of tokens in a token vocabulary. Parser and lexers that share this token vocabulary are generated such that they implement the resulting token types interface:

public interface MyLexerTokenTypes {
  public static final int ID = 2;
  public static final int BEGIN = 3;
  ...
}

ANTLR defines a token object for use with the TokenStreamHiddenTokenFilter object called CommonHiddenStreamToken:

public class CommonHiddenStreamToken
  extends CommonToken {
  protected CommonHiddenStreamToken hiddenBefore;
  protected CommonHiddenStreamToken hiddenAfter;

  public CommonHiddenStreamToken
    getHiddenAfter() {...}
  public CommonHiddenStreamToken
    getHiddenBefore() {...}
}

Hidden tokens are weaved amongst the normal tokens.  Note that, for garbage collection reasons, hidden tokens never point back to normal tokens (preventing a linked list of the entire token stream).

Token Lookahead Buffer

The parser must always have fast access to k symbols of lookahead. In a world without syntactic predicates, a simple buffer of k Token references would suffice. However, given that even LL(1) ANTLR parsers must be able to backtrack, an arbitrarily-large buffer of Token references must be maintained. LT(i) looks into the token buffer.

Fortunately, the parser itself does not implement the token-buffering and lookahead algorithm. That is handled by the TokenBuffer object. We begin the discussion of lookahead by providing an LL(k) parser framework:

public class LLkParser extends Parser {
   TokenBuffer input;
   public int LA(int i) {
      return input.LA(i);
   }
   public Token LT(int i) {
      return input.LT(i);
   }
   public void consume() {
      input.consume();
   }
}
       

All lookahead-related calls are simply forwarded to the TokenBuffer object. In the future, some simple caching may be performed in the parser itself to avoid the extra indirection, or ANTLR may generate the call to input.LT(i) directly.

The TokenBuffer object caches the token stream emitted by the scanner. It supplies LT() and LA() methods for accessing the kth lookahead token or token type, as well as methods for consuming tokens, guessing, and backtracking.

public class TokenBuffer {
   ...
   /** Mark another token for
    *  deferred consumption */
   public final void consume() {...}

   /** Get a lookahead token */
   public final Token LT(int i) { ... }

   /** Get a lookahead token value */
   public final int LA(int i) { ... }

   /**Return an integer marker that can be used to
    * rewind the buffer to its current state. */
   public final int mark() { ... }

   /**Rewind the token buffer to a marker.*/
   public final void rewind(int mark) { ... }
}

To begin backtracking, a mark is issued, which makes the TokenBuffer record the current position so that it can rewind the token stream. A subsequent rewind directive will reset the internal state to the point before the last mark.

Consider the following rule that employs backtracking:

stat:   (list EQUAL) => list EQUAL list
    |   list
    ;
list:   LPAREN (ID)* RPAREN
    ;
 

Something like the following code would be generated:

public void stat()
  throws RecognitionException,
         TokenStreamException
{
  boolean synPredFailed;
  if ( LA(1)==LPAREN ) { // check lookahead
    int marker = tokenBuffer.mark();
    try {
      list();
      match(EQUAL);
      synPredFailed = false;
    }
    catch (RecognitionException e) {
      tokenBuffer.rewind(marker);
      synPredFailed = true;
    }
  }
  if ( LA(1)==LPAREN && !synPredFailed ) {
    // test prediction of alt 1
    list();
    match(EQUAL);
    list();
  }
  else if ( LA(1)==LPAREN ) {
    list();
  }
}
      

The token lookahead buffer uses a circular token buffer to perform quick indexed access to the lookahead tokens. The circular buffer is expanded as necessary to calculate LT(i) for arbitrary i. TokenBuffer.consume() does not actually read more tokens. Instead, it defers the read by counting how many tokens have been consumed, and then adjusts the token buffer and/or reads new tokens when LA() or LT() is called.

Version: $Id: //depot/code/org.antlr/release/antlr-2.7.7/doc/runtime.html#2 $ antlr-2.7.7/doc/trees.html0000644000175000017500000012007510522211615015362 0ustar twernertwerner ANTLR Tree Construction

ANTLR Tree Construction

ANTLR helps you build intermediate form trees, or abstract syntax trees (ASTs), by providing grammar annotations that indicate what tokens are to be treated as subtree roots, which are to be leaves, and which are to be ignored with respect to tree construction.  As with PCCTS 1.33, you may manipulate trees using tree grammar actions.

It is often the case that programmers either have existing tree definitions or need a special physical structure, thus, preventing ANTLR from specifically defining the implementation of AST nodes. ANTLR specifies only an interface describing minimum behavior. Your tree implementation must implement this interface so ANTLR knows how to work with your trees. Further, you must tell the parser the name of your tree nodes or provide a tree "factory" so that ANTLR knows how to create nodes with the correct type (rather than hardcoding in a new AST() expression everywhere).   ANTLR can construct and walk any tree that satisfies the AST interface.  A number of common tree definitions are provided. Unfortunately, ANTLR cannot parse XML DOM trees since our method names conflict (e.g., getFirstChild()); ANTLR was here first <wink>. Argh!

Notation

In this and other documents, tree structures are represented by a LISP-like notation, for example:

#(A B C)

is a tree with A at the root, and children B and C. This notation can be nested to describe trees of arbitrary structure, for example:

#(A B #(C D E))

is a tree with A at the root, B as a first child, and an entire subtree as the second child. The subtree, in turn, has C at the root and D,E as children.

Controlling AST construction

AST construction in an ANTLR Parser, or AST transformation in a Tree-Parser, is turned on and off by the buildAST option.

From an AST construction and walking point of view, ANTLR considers all tree nodes to look the same (i.e., they appear to be homogeneous).  Through a tree factory or by specification, however, you can instruct ANTLR to create nodes of different types.   See the section below on heterogeneous trees.

Grammar annotations for building ASTs

Leaf nodes

ANTLR assumes that any nonsuffixed token reference or token-range is a leaf node in the resulting tree for the enclosing rule. If no suffixes at all are specified in a grammar, then a Parser will construct a linked-list of the tokens (a degenerate AST), and a Tree-Parser will copy the input AST.

Root nodes

Any token suffixed with the "^" operator is considered a root token. A tree node is constructed for that token and is made the root of whatever portion of the tree has been built

a : A B^ C^ ;

results in tree #(C #(B A)).

First A is matched and made a lonely child, followed by B which is made the parent of the current tree, A. Finally, C is matched and made the parent of the current tree, making it the parent of the B node. Note that the same rule without any operators results in the flat tree A B C.

Turning off standard tree construction

Suffix a token reference with "!" to prevent incorporation of the node for that token into the resulting tree (the AST node for the token is still constructed and may be referenced in actions, it is just not added to the result tree automatically). Suffix a rule reference "!" to indicate that the tree constructed by the invoked rule should not be linked into the tree constructed for the current rule.

Suffix a rule definition with "!" to indicate that tree construction for the rule is to be turned off. Rules and tokens referenced within that rule still create ASTs, but they are not linked into a result tree. The following rule does no automatic tree construction. Actions must be used to set the return AST value, for example:

begin!
    :   INT PLUS i:INT
        { #begin = #(PLUS INT i); }
    ;

For finer granularity, prefix alternatives with "!" to shut off tree construction for that alternative only. This granularity is useful, for example, if you have a large number of alternatives and you only want one to have manual tree construction:

stat:
        ID EQUALS^ expr   // auto construction
    ... some alternatives ...
    |!  RETURN expr
        {#stat = #([IMAGINARY_TOKEN_TYPE] expr);}
    ... more alternatives ...
    ; 

Tree node construction

With automatic tree construction off (but with buildAST on), you must construct your own tree nodes and combine them into tree structures within embedded actions. There are several ways to create a tree node in an action:

  1. use new T(arg) where T is your tree node type and arg is either a single token type, a token type and token text, or a Token.
  2. use ASTFactory.create(arg) where T is your tree node type and arg is either a single token type, a token type and token text, or a Token. Using the factory is more general than creating a new node directly, as it defers the node-type decision to the factory, and can be easily changed for the entire grammar.
  3. use the shorthand notation #[TYPE] or #[TYPE,"text"] or #[TYPE,"text",ASTClassNameToConstruct]. The shorthand notation results in a call to ASTFactory.create() with any specified arguments.
  4. use the shorthand notation #id, where id is either a token matched in the rule, a label, or a rule-reference.

To construct a tree structure from a set of nodes, you can set the first-child and next-sibling references yourself or call the factory make method or use #(...) notation described below.

AST Action Translation

In parsers and tree parsers with buildAST set to true, ANTLR will translate portions of user actions in order to make it easier to build ASTs within actions. In particular, the following constructs starting with '#' will be translated:

#label
The AST associated with a labeled token-reference or rule-reference may be accessed as #label. The translation is to a variable containing the AST node built from that token, or the AST returned from the rule.
#rule
When rule is the name of the enclosing rule, ANTLR will translate this into the variable containing the result AST for the rule. This allows you to set the return AST for a rule or examine it from within an action. This can be used when AST generation is on or suppressed for the rule or alternate. For example:
r! : a:A	{ #r = #a; }
Setting the return tree is very useful in combination with normal tree construction because you can have ANTLR do all the work of building a tree and then add an imaginary root node such as:
 
decl : ( TYPE ID )+
       { #decl = #([DECL,"decl"], #decl); }
     ;
ANTLR allows you to assign to #rule anywhere within an alternative of the rule. ANTLR ensures that references of and assignments to #rule within an action force the parser's internal AST construction variables into a stable state. After you assign to #rule, the state of the parser's automatic AST construction variables will be set as if ANTLR had generated the tree rooted at #rule. For example, any children nodes added after the action will be added to the children of #rule.
#label_in
In a tree parser, the input AST associated with a labeled token reference or rule reference may be accessed as #label_in. The translation is to a variable containing the input-tree AST node from which the rule or token was extracted. Input variables are seldom used. You almost always want to use #label instead of #label_in.
 
#id
ANTLR supports the translation of unlabeled token references as a shorthand notation, as long as the token is unique within the scope of a single alternative. In these cases, the use of an unlabeled token reference identical to using a label. For example, this:

r! : A { #r = #A; }

is equivalent to:


r! : a:A { #r = #a; }
#id_in is given similar treatment to #label_in.
 
#[TOKEN_TYPE] or #[TOKEN_TYPE,"text"] or #[TYPE,"text",ASTClassNameToConstruct]
AST node constructor shorthand. The translation is a call to the ASTFactory.create() method.  For example, #[T] is translated to:
ASFFactory.create(T)
#(root, c1, ..., cn)
AST tree construction shorthand. ANTLR looks for the comma character to separate the tree arguments. Commas within method call tree elements are handled properly; i.e., an element of "foo(#a,34)" is ok and will not conflict with the comma separator between the other tree elements in the tree. This tree construct is translated to a "make tree" call. The "make-tree" call is complex due to the need to simulate variable arguments in languages like Java, but the result will be something like:
ASTFactory.make(root, c1, ...,
cn);

In addition to the translation of the #(...) as a whole, the root and each child c1..cn will be translated. Within the context of a #(...) construct, you may use:

  • id or label as a shorthand for #id or #label.
  • [...] as a shorthand for #[...].
  • (...) as a shorthand for #(...).

The target code generator performs this translation with the help of a special lexer that parses the actions and asks the code-generator to create appropriate substitutions for each translated item. This lexer might impose some restrictions on label names (think of C/C++ preprocessor directives)

Invoking parsers that build trees

Assuming that you have defined a lexer L and a parser P in your grammar, you can invoke them sequentially on the system input stream as follows.

L lexer = new L(System.in);
P parser = new P(lexer);
parser.setASTNodeType("MyAST");
parser.startRule();   

If you have set buildAST=true in your parser grammar, then it will build an AST, which can be accessed via parser.getAST(). If you have defined a tree parser called T, you can invoke it with:

T walker = new T();
walker.startRule(parser.getAST()); // walk tree  

If, in addition, you have set buildAST=true in your tree-parser to turn on transform mode, then you can access the resulting AST of the tree-walker:

AST results = walker.getAST();
DumpASTVisitor visitor = new DumpASTVisitor();
visitor.visit(results);

Where DumpASTVisitor is a predefined ASTVisitor implementation that simply prints the tree to the standard output.

You can also use get a LISP-like print out of a tree via

String s = parser.getAST().toStringList();

AST Factories

ANTLR uses a factory pattern to create and connect AST nodes. This is done to primarily to separate out the tree construction facility from the parser, but also gives you a hook in between the parser and the tree node construction.  Subclass ASTFactory to alter the create methods.

If you are only interested in specifying the AST node type at runtime, use the

setASTNodeType(String className)

method on the parser or factory.  By default, trees are constructed of nodes of type antlr.CommonAST. (You must use the fully-qualified class name).

You can also specify a different class name for each token type to generate heterogeneous trees:

/** Specify an "override" for the Java AST object created for a
 *  specific token.  It is provided as a convenience so
 *  you can specify node types dynamically.  ANTLR sets
 *  the token type mapping automatically from the tokens{...}
 *  section, but you can change that mapping with this method.
 *  ANTLR does it's best to statically determine the node
 *  type for generating parsers, but it cannot deal with
 *  dynamic values like #[LT(1)].  In this case, it relies
 *  on the mapping.  Beware differences in the tokens{...}
 *  section and what you set via this method.  Make sure
 *  they are the same.
 *
 *  Set className to null to remove the mapping.
 *
 *  @since 2.7.2
 */
public void setTokenTypeASTNodeType(int tokenType, String className)
	throws IllegalArgumentException;

The ASTFactory has some generically useful methods:

/** Copy a single node with same Java AST objec type.
 *  Ignore the tokenType->Class mapping since you know
 *  the type of the node, t.getClass(), and doing a dup.
 *
 *  clone() is not used because we want all AST creation
 *  to go thru the factory so creation can be
 *  tracked.  Returns null if t is null.
 */
public AST dup(AST t);
/** Duplicate tree including siblings
 * of root.
 */
public AST dupList(AST t);
/**Duplicate a tree, assuming this is a
 * root node of a tree--duplicate that node
 * and what's below; ignore siblings of root
 * node.
 */
public AST dupTree(AST t);

Heterogeneous ASTs

Each node in an AST must encode information about the kind of node it is; for example, is it an ADD operator or a leaf node such as an INT?  There are two ways to encode this: with a token type or with a Java (or C++ etc...) class type.  In other words, do you have a single class type with numerous token types or no token types and numerous classes?  For lack of better terms, I (Terence) have been calling ASTs with a single class type homogeneous trees and ASTs with many class types heterogeneous trees.

The only reason to have a different class type for the various kinds of nodes is for the case where you want to execute a bunch of hand-coded tree walks or your nodes store radically different kinds of data.  The example I use below demonstrates an expression tree where each node overrides value() so that root.value() is the result of evaluating the input expression.   From the perspective of building trees and walking them with a generated tree parser, it is best to consider every node as an identical AST node.  Hence, the schism that exists between the hetero- and homogeneous AST camps.

ANTLR supports both kinds of tree nodes--at the same time!  If you do nothing but turn on the "buildAST=true" option, you get a homogeneous tree.  Later, if you want to use physically separate class types for some of the nodes, just specify that in the grammar that builds the tree.  Then you can have the best of both worlds--the trees are built automatically, but you can apply different methods to and store different data in the various nodes.  Note that the structure of the tree is unaffected; just the type of the nodes changes.

ANTLR applies a "scoping" sort of algorithm for determining the class type of a particular AST node that it needs to create.  The default type is CommonAST unless, prior to parser invocation, you override that with a call to:

  myParser.setASTNodeType("com.acme.MyAST");

where you must use a fully qualified class name.

In the grammar, you can override the default class type by setting the type for nodes created from a particular input token.  Use the element option <AST=typename> in the tokens section:

tokens {
    PLUS<AST=PLUSNode>;
    ...
}

You may further override the class type by annotating a particular token reference in your parser grammar:

anInt : INT<AST=INTNode> ;

This reference override is super useful for tokens such as ID that you might want converted to a TYPENAME node in one context and a VARREF in another context.

ANTLR uses the AST factory to create all AST nodes even if it knows the specific type.   In other words, ANTLR generates code similar to the following:

ANode tmp1_AST = (ANode)astFactory.create(LT(1),"ANode");
from
a : A<AST=ANode> ;
.

An Expression Tree Example

This example includes a parser that constructs expression ASTs, the usual lexer, and some AST node class definitions.

Let's start by describing the AST structure and node types.   Expressions have plus and multiply operators and integers.  The operators will be subtree roots (nonleaf nodes) and integers will be leaf nodes.  For example, input 3+4*5+21 yields a tree with structure:

(  + (  +  3 (  *  4  5 ) )  21 )

or:

  +
  |
  +--21
  |
  3--*
     |
     4--5

All AST nodes are subclasses of CalcAST, which are BaseAST's that also answer method value().   Method value() evaluates the tree starting at that node.  Naturally, for integer nodes, value() will simply return the value stored within that node.  Here is CalcAST:

public abstract class CalcAST
    extends antlr.BaseAST
{
    public abstract int value();
}

The AST operator nodes must combine the results of computing the value of their two subtrees.  They must perform a depth-first walk of the tree below them.  For fun and to make the operations more obvious, the operator nodes define left() and right() instead, making them appear even more different than the normal child-sibling tree representation.  Consequently, these expression trees can be treated as both homogeneous child-sibling trees and heterogeneous expression trees.

public abstract class BinaryOperatorAST extends
    CalcAST
{
    /** Make me look like a heterogeneous tree */
    public CalcAST left() {
        return (CalcAST)getFirstChild();
    }

    public CalcAST right() {
        CalcAST t = left();
        if ( t==null ) return null;
        return (CalcAST)t.getNextSibling();
    }
}

The simplest node in the tree looks like:

import antlr.BaseAST;
import antlr.Token;
import antlr.collections.AST;
import java.io.*;

/** A simple node to represent an INT */
public class INTNode extends CalcAST {
    int v=0;

    public INTNode(Token tok) {
        v = Integer.parseInt(tok.getText());
    }

    /** Compute value of subtree; this is
     *  heterogeneous part :)
     */
    public int value() {
        return v;
    }

    public String toString() {
        return " "+v;
    }

    // satisfy abstract methods from BaseAST
    public void initialize(int t, String txt) {
    }
    public void initialize(AST t) {
    }
    public void initialize(Token tok) {
    }
}

The operators derive from BinaryOperatorAST and define value() in terms of left() and right().  For example, here is PLUSNode:

import antlr.BaseAST;
import antlr.Token;
import antlr.collections.AST;
import java.io.*;

/** A simple node to represent PLUS operation */
public class PLUSNode extends BinaryOperatorAST {
    public PLUSNode(Token tok) {
    }

    /** Compute value of subtree;
     * this is heterogeneous part :)
     */
    public int value() {
        return left().value() + right().value();
    }

    public String toString() {
        return " +";
    }

    // satisfy abstract methods from BaseAST
    public void initialize(int t, String txt) {
    }
    public void initialize(AST t) {
    }
    public void initialize(Token tok) {
    }
}

The parser is pretty straightforward except that you have to add the options to tell ANTLR what node types you want to create for which token matched on the input stream.   The tokens section lists the operators with element option AST appended to their definitions.  This tells ANTLR to build PLUSNode objects for any PLUS tokens seen on the input stream, for example.  For demonstration purposes, INT is not included in the tokens section--the specific token references is suffixed with the element option to specify that nodes created from that INT should be of type INTNode (of course, the effect is the same as there is only that one reference to INT).

class CalcParser extends Parser;
options {
    buildAST = true; // uses CommonAST by default
}

// define a bunch of specific AST nodes to build.
// can override at actual reference of tokens in
// grammar below.
tokens {
    PLUS<AST=PLUSNode>;
    STAR<AST=MULTNode>;
}

expr:   mexpr (PLUS^ mexpr)* SEMI!
    ;

mexpr
    :   atom (STAR^ atom)*
    ;

// Demonstrate token reference option
atom:   INT<AST=INTNode>
    ;

Invoking the parser is done as usual.  Computing the value of the resulting AST is accomplished by simply calling method value() on the root.

import java.io.*;
import antlr.CommonAST;
import antlr.collections.AST;

class Main {
    public static void main(String[] args) {
        try {
            CalcLexer lexer =
                new CalcLexer(
                  new DataInputStream(System.in)
                );
            CalcParser parser =
                new CalcParser(lexer);
            // Parse the input expression
            parser.expr();
            CalcAST t = (CalcAST)parser.getAST();

            System.out.println(t.toStringTree());

            // Compute value and return
            int r = t.value();
            System.out.println("value is "+r);
        } catch(Exception e) {
            System.err.println("exception: "+e);
            e.printStackTrace();
        }
    }
}

For completeness, here is the lexer:

class CalcLexer extends Lexer;

WS  :   (' '
    |   '\t'
    |   '\n'
    |   '\r')
        { $setType(Token.SKIP); }
    ;

LPAREN: '(' ;

RPAREN: ')' ;

STAR:   '*' ;

PLUS:   '+' ;

SEMI:   ';' ;

protected
DIGIT
    :   '0'..'9' ;

INT :   (DIGIT)+ ;

Describing Heterogeneous Trees With Grammars

So what's the difference between this approach and default homogeneous tree construction?  The big difference is that you need a tree grammar to describe the expression tree and compute resulting values.  But, that's a good thing as it's "executable documentation" and negates the need to handcode the tree parser (the value() methods).  If you used homogeneous trees, here is all you would need beyond the parser/lexer to evaluate the expressions:  [This code comes from the examples/java/calc directory.]

class CalcTreeWalker extends TreeParser;

expr returns [float r]
{
    float a,b;
    r=0;
}
    :   #(PLUS a=expr b=expr)   {r = a+b;}
    |   #(STAR a=expr b=expr)   {r = a*b;}
    |   i:INT
        {r = (float)
         Integer.parseInt(i.getText());}
    ;

Because Terence wants you to use tree grammars even when constructing heterogeneous ASTs (to avoid handcoding methods that implement a depth-first-search), implement the following methods in your various heterogeneous AST node class definitions:

    /** Get the token text for this node */
    public String getText();
    /** Get the token type for this node */
    public int getType();

That is how you can use heterogeneous trees with a tree grammar.  Note that your token types must match the PLUS and STAR token types imported from your parser.  I.e., make sure PLUSNode.getType() returns CalcParserTokenTypes.PLUS.   The token types are generated by ANTLR in interface files that look like:

public interface CalcParserTokenTypes {
	...
        int PLUS = 4;
        int STAR = 5;
	...
}

AST (XML) Serialization

[Oliver Zeigermann olli@zeigermann.de provided the initial implementation of this serialization.  His XTAL XML translation code is worth checking out; particularly for reading XML-serialized ASTs back in.]

For a variety of reasons, you may want to store an AST or pass it to another program or computer.  Class antlr.BaseAST is Serializable using the Java code generator, which means you can write ASTs to the disk using the standard Java stuff.  You can also write the ASTs out in XML form using the following methods from BaseAST:

  • public void xmlSerialize(Writer out)
  • public void xmlSerializeNode(Writer out)
  • public void xmlSerializeRootOpen(Writer out)
  • public void xmlSerializeRootClose(Writer out)

All methods throw IOException.

You can override xmlSerializeNode and so on to change the way nodes are written out.  By default the serialization uses the class type name as the tag name and has attributes text and type to store the text and token type of the node.

The output from running the simple heterogeneous tree example, examples/java/heteroAST, yields:

 (  + (  +  3 (  *  4  5 ) )  21 )
<PLUS><PLUS><int>3</int><MULT>
<int>4</int><int>5</int>
</MULT></PLUS><int>21</int></PLUS>
value is 44

The LISP-form of the tree shows the structure and contents.  The various heterogeneous nodes override the open and close tags and change the way leaf nodes are serialized to use <int>value</int> instead of tag attributes of a single node.

Here is the code that generates the XML:

Writer w = new OutputStreamWriter(System.out);
t.xmlSerialize(w);
w.write("\n");
w.flush();

AST enumerations

The AST findAll and findAllPartial methods return enumerations of tree nodes that you can walk.  Interface

antlr.collections.ASTEnumeration

and

class antlr.Collections.impl.ASTEnumerator

implement this functionality.  Here is an example:

// Print out all instances of
// a-subtree-of-interest
// found within tree 't'.
ASTEnumeration enum;
enum = t.findAll(a-subtree-of-interest);
while ( enum.hasMoreNodes() ) {
  System.out.println(
    enum.nextNode().toStringList()
  );
}

A few examples


sum :term ( PLUS^ term)*
    ; 

The "^" suffix on the PLUS tells ANTLR to create an additional node and place it as the root of whatever subtree has been constructed up until that point for rule sum. The subtrees returned by the term references are collected as children of the addition nodes.  If the subrule is not matched, the associated nodes would not be added to the tree. The rule returns either the tree matched for the first term reference or a PLUS-rooted tree.

The grammar annotations should be viewed as operators, not static specifications. In the above example, each iteration of the (...)* will create a new PLUS root, with the previous tree on the left, and the tree from the new term on the right, thus preserving the usual associatively for "+".

Look at the following rule that turns off default tree construction.

decl!:
    modifiers type ID SEMI;
	{ #decl = #([DECL], ID, ([TYPE] type),
                    ([MOD] modifiers) ); }
    ;

In this example, a declaration is matched. The resulting AST has an "imaginary" DECL node at the root, with three children. The first child is the ID of the declaration. The second child is a subtree with an imaginary TYPE node at the root and the AST from the type rule as its child. The third child is a subtree with an imaginary MOD at the root and the results of the modifiers rule as its child.

Labeled subrules

[THIS WILL NOT BE IMPLEMENTED AS LABELED SUBRULES...We'll do something else eventually.]

In 2.00 ANTLR, each rule has exactly one tree associated with it. Subrules simply add elements to the tree for the enclosing rule, which is normally what you want. For example, expression trees are easily built via:


expr: ID ( PLUS^ ID )*
    ;
    

However, many times you want the elements of a subrule to produce a tree that is independent of the rule's tree. Recall that exponents must be computed before coefficients are multiplied in for exponent terms. The following grammar matches the correct syntax.


// match exponent terms such as "3*x^4"
eterm
    :   expr MULT ID EXPONENT expr
    ;
    

However, to produce the correct AST, you would normally split the ID EXPONENT expr portion into another rule like this:


eterm:
    expr MULT^ exp
    ;

exp:
	ID EXPONENT^ expr
    ;
    

In this manner, each operator would be the root of the appropriate subrule. For input 3*x^4, the tree would look like:


#(MULT 3 #(EXPONENT ID 4))
    

However, if you attempted to keep this grammar in the same rule:


eterm
    :   expr MULT^ (ID EXPONENT^ expr)
    ;
    

both "^" root operators would modify the same tree yielding


#(EXPONENT #(MULT 3 ID) 4)
    

This tree has the operators as roots, but they are associated with the wrong operands.

Using a labeled subrule allows the original rule to generate the correct tree.


eterm
    :   expr MULT^ e:(ID EXPONENT^ expr)
    ;
    

In this case, for the same input 3*x^4, the labeled subrule would build up its own subtree and make it the operand of the MULT tree of the eterm rule. The presence of the label alters the AST code generation for the elements within the subrule, making it operate more like a normal rule. Annotations of "^" make the node created for that token reference the root of the tree for the e subrule.

Labeled subrules have a result AST that can be accessed just like the result AST for a rule. For example, we could rewrite the above decl example using labeled subrules (note the use of ! at the start of the subrules to suppress automatic construction for the subrule):


decl!:
    m:(! modifiers { #m = #([MOD] modifiers); } )
    t:(! type { #t = #([TYPE] type); } )
    ID
    SEMI;
    { #decl = #( [DECL] ID t m ); }
    ;
    

What about subrules that are closure loops? The same rules apply to a closure subrule--there is a single tree for that loop that is built up according to the AST operators annotating the elements of that loop. For example, consider the following rule.


term:   T^ i:(OP^ expr)+
    ;
    

For input T OP A OP B OP C, the following tree structure would be created:


#(T #(OP #(OP #(OP A) B) C) )
    

which can be drawn graphically as


T
|
OP
|
OP--C
|
OP--B
|
A
    

The first important thing to note is that each iteration of the loop in the subrule operates on the same tree. The resulting tree, after all iterations of the loop, is associated with the subrule label. The result tree for the above labeled subrule is:


#(OP #(OP #(OP A) B) C)
    

The second thing to note is that, because T is matched first and there is a root operator after it in the rule, T would be at the bottom of the tree if it were not for the label on the subrule.

Loops will generally be used to build up lists of subtree. For example, if you want a list of polynomial assignments to produce a sibling list of ASSIGN subtrees, then the following rule you would normally split into two rules.


interp
    :   ( ID ASSIGN poly ";" )+
    ;
    

Normally, the following would be required


interp
    :   ( assign )+
    ;
assign
    :   ID ASSIGN^ poly ";"!
    ;
    

Labeling a subrule allows you to write the above example more easily as:


interp
    :   ( r:(ID ASSIGN^ poly ";") )+
    ;
    

Each recognition of a subrule results in a tree and if the subrule is nested in a loop, all trees are returned as a list of trees (i.e., the roots of the subtrees are siblings). If the labeled subrule is suffixed with a "!", then the tree(s) created by the subrule are not linked into the tree for the enclosing rule or subrule.

Labeled subrules within labeled subrules result in trees that are linked into the surrounding subrule's tree. For example, the following rule results in a tree of the form X #( A #(B C) D) Y.


a   :   X r:( A^ s:(B^ C) D) Y
    ;
    

Labeled subrules within nonlabeled subrules result in trees that are linked into the surrounding rule's tree. For example, the following rule results in a tree of the form #(A X #(B C) D Y).


a   :   X ( A^ s:(B^ C) D) Y
    ;    

Reference nodes

Not implemented. A node that does nothing but refer to another node in the tree. Nice for embedding the same tree in multiple lists.

Required AST functionality and form

The data structure representing your trees can have any form or type name as long as they implement the AST interface:

package antlr.collections;

/** Minimal AST node interface used by ANTLR
 *  AST generation and tree-walker.
 */
public interface AST {
    /** Get the token type for this node */
    public int getType();

    /** Set the token type for this node */
    public void setType(int ttype);

    /** Get the token text for this node */
    public String getText();

    /** Set the token text for this node */
    public void setText(String text);

    /** Get the first child of this node;
     *  null if no children */
    public AST getFirstChild();

    /** Set the first child of a node */
    public void setFirstChild(AST c);

    /** Get the next sibling in line after this
     * one
     */
    public AST getNextSibling();

    /** Set the next sibling after this one */
    public void setNextSibling(AST n);

    /** Add a (rightmost) child to this node */
    public void addChild(AST node);
    /** Are two nodes exactly equal? */
    public boolean equals(AST t);
    /** Are two lists of nodes/subtrees exactly
     *  equal in structure and content? */
    public boolean equalsList(AST t);
    /** Are two lists of nodes/subtrees
     *  partially equal? In other words, 'this'
     *  can be bigger than 't'
     */
    public boolean equalsListPartial(AST t);
    /** Are two nodes/subtrees exactly equal? */
    public boolean equalsTree(AST t);
    /** Are two nodes/subtrees exactly partially
     *  equal? In other words, 'this' can be
     *  bigger than 't'.
     */
    public boolean equalsTreePartial(AST t);
    /** Return an enumeration of all exact tree
     * matches for tree within 'this'.
     */
    public ASTEnumeration findAll(AST tree);
    /** Return an enumeration of all partial
     *  tree matches for tree within 'this'.
     */
    public ASTEnumeration findAllPartial(
        AST subtree);
    /** Init a node with token type and text */
    public void initialize(int t, String txt);
    /** Init a node using content from 't' */
    public void initialize(AST t);
    /** Init a node using content from 't' */
    public void initialize(Token t);
    /** Convert node to printable form */
    public String toString();
    /** Treat 'this' as list (i.e.,
     *  consider 'this'
     *  siblings) and convert to printable
     *  form
     */
    public String toStringList();
    /** Treat 'this' as tree root
     *  (i.e., don't consider
     *  'this' siblings) and convert
     *   to printable form */
    public String toStringTree();
}

This scheme does not preclude the use of heterogeneous trees versus homogeneous trees. However, you will need to write extra code to create heterogeneous trees (via a subclass of ASTFactory) or by specifying the node types at the token reference sites or in the tokens section, whereas the homogeneous trees are free.

Version: $Id: //depot/code/org.antlr/release/antlr-2.7.7/doc/trees.html#2 $
antlr-2.7.7/doc/subrule.gif0000644000175000017500000000353710522211615015525 0ustar twernertwernerGIF89aシシシトトトタタタ,@Hーチ*\ネー。テ#J忸ナ,X`「ヌ ;:D@彫@K址ネ0繧(ニ\ィr&H6!貍I0cw(耘?*]ハエ飫、+1掏潮U昿ッハヤハ秒ヨョ4?~霞鰄Hチイ悩r`ム挌]葺V-Zアv}ホ彬7鎹櫞 |T耘ィ:Uフn罸ラe<92Yヒ +cセィ9/^オ.5w]ウエi ゙\6蓚ラヲ7-(mル\ヒ~會`、f:m稈cfEiュ矼4;*ys肩孫kX#p゚7レヨXUレqヲgス*zユサ"ヲロセ*[訛ル;Ny捫}レRyヌp゚;x苟キX{BX`Qx套レa_@l gnSM^ル ^%4「[+オ蘆Rイクヨl籃h緕M両シm蔟ラ`オシコ厂jシ^{'レl38ルツhu為Rヘ0ツY弦モz/ ゙サュuイ'ゥn1マケE7ヤ躄戈ルSサラ4゚任>ニ框ュ3粥$W~鉞1zホhウ=a酵ワB」ヨマcスォ9ム3k[ p7イ桑z燹G蟐*モ插{ン#シAヘ}オセ"椶x]キ=bソrF・i?;カr[ァ゚8ウ拡Op'>?磊響ペーキサ P\<マ「,D@ン-⊆j 78AVlサ2ネ2モ1 Q艝晰:テチユチ/:畴&ウィ3,_y<ナN$5テ!CRコロ4H」D['セ ム~レ]ヌ&邁".Q]=\糜ェオZFaN傴B$%)カd7RニFッイyq$ワラナvィXN[|殕;antlr-2.7.7/doc/ANTLRException.gif0000644000175000017500000003425010522211615016577 0ustar twernertwernerGIF89aтトトトт、、、トワトタタタ!,@(p ー@*DィpaC #J8ア D/Rカ{X+゚ステナロ4鞣汳O^クラー5_&鉢ワヌo+ホ噛ウ贊}Y瞳ロ:イG#ヲjハ攅F*1ヨテk+_ホ懼゙フノwヒFスyヒユBラ 1聽ア’7m=)[マ"Pミab ネ欅_ッW_コ。LueユSTァdfムzコiエ_gH焜ツ _}゙gミ=ィユErW刄リ。7ヨabキキル顕烏ceMゥヲ潼w鐔j゙m$Oィタ9薯iネ゚ <クSB e桧>Ie油9・BXnケe燃ヲ寝ZYd考 (ノ艚Nヲy蝮xレ曳幼ゥ&揺齷ミ/Yヲ拈"軾Q ラ「_ソ。輊y2ヲ」虻YィnカYァ7vハ(・メケuヲ囃ェ黔チ=コォ「ニh繽ャヲ釐X獏 徃駄D 痔紫ルャイzwョス[ムツ横}・ィ%&dカoQi「楪箜ナ`LュゥィWオx\:謨ォ慣ツ*aソk蜷ーvg$zイ[鯊ェォッコGチ)?Mz阯遨ィ=絹這^ヒ泓ロ1ナ 道gy.i」俾sハ堤「aヘロッホセカ「ヌヘヨ Vハカケフス淒嫩型!Lpソナ"ュメbヲイフクャイ+7校托ニ ーtョメヒlc槲 ヤリ麁ウKI;ュ敦 teチrV1ユ蛟Gム肱ヒ捲チテニJオホLヒヨuリoッ岼kdW緇ニN。ヤヨeCw麗1X /ノ0Dィ「Y6サ+ユn7シ`Z<n . wXキ1y,uテ。赤U1沙ノmU硅ト乕ミ{>Q毬,D%Ml[Tニ乖)3、HオC;Pd:T貫,キ幟薛4^ャWユJu)鬘g#゚gH9z8 iヌセXメ壷K )uカ蚓2wトキィ羣駟 アCユ|ト@シ鼠4lリ謌xA,Dユソス 鷦S/剰ヒE「or箱ホゥサ!K{彬& 擧H フwシ$宸GM%3dpケノF$ #ァ CD 1叱。ユコZアdiキナ丞ム8>+'^、ゥジ ヘ|ツ$w<疏H叩 ヌラゥテEァ.^モ&儕X犀q`'2Qカ客qqユQモfT踵j5ハ%渾Rjェ@t+ Iルwニt槓Dニ*Zコ)5ニ而v$$Zェク事秦ツxトッ、茯XKJモu决「ハシYセGイ。D'霙看 Bア洽疑] W+ョq徒-$ZOメャ2p嬢hシコセ諍#、dヒ「ッ6}Fjュェシ婀瀲vy,ソハ2ォホウフ#+AレYハ>4ュ胃レEc{fイ6(CMH,AヌKuャW精oE膏シ*尚8Vz"Gェ咋s)ハユカVコ.テnWOハ\"ォト.jM ーミニ0%罷ネナコ踈キサg|脯マCメ星サkaKフノjオッk_>超ョゥP!ク8代揄Sォルレ9ツ4qィ嶷ァ[聞ネ ムご;甞リAL\ヒ壞%ルZ。AvサDコユ-ニヘォャIラ]牧mキ需メ」F「ナサfTーニ」{ヨI/レシ揃エヲ7ヘ餒xチΓ1乕c゚+タJョg6.'ナ短アノiゥL。Tセイ不ヘ)zラソヨオー{ b;リト>カア届l,#q-ゥg6ヨYセvナカハb}ホQu エカ総eレ*レモ鎭キコラヘvサ幢執シ蹼zロ峙ヒ&wセュ~7已キセOxチN#憮wクツiea!{7ョ# znユ m:W勃揃ukヘナ:wUYG[[ムキBUウ.p63ン#Lタュc]N8sシj朱8mYks映埖E6攀r剽S,ロツ{ロモJァW,U蓉ツRセ聴ネ ャノ老2シeエ;ォF旬xェロGンェ7フホョ GpNtー窘ツキ釁ニョw}セ|AXr、蒙.Mュャ姐s亠2動ホ;マ'41樔>p2聚ホ ?oィ3ク_n8コソqト_IオG2ホGtユ験MIヌァ<}h{Fマ-yツ=證ュxハW*ネz}(スッt.yノU練サb讙テ蒂ヘ!?&=a5メOシ方lSz{|r3x打y^ァ\CM=オfQ %uネイSホ濃>j詒BエエL‥qHy輸リ_Ty5,ォ48 8H坤ヌ!cx~ヲニtvァRヘQYレキ婚粕zpF2GYgノ躾X|ョgGテjロヨDツu{h本メ_8XXG=Mラ@M@ヨJ]枚~サV+ラ+チエ?#!\薺e[qFqセニ磨]鑿(v;ィHueu vj「r+Eg、wV"5rイuUWケ%,WJg'$Hzホ&ユ_P#送}o剖uy4ル!N8gv晉tホE[・t陣}Q頼88預c{?クz4轡ィ|jヨgI1tfOkTヌラ|ワヲEw伯キウ解v鎌Nォメe乾7閇NF/}a芝aD,4G"レオ,xq「∩テX5RkカhLf諄⊥諭gAI+Sナxcxvャ慎X?。T「%映挙Eu9u9線uyp卓r諠@リNヌ聾モクNK}限mミghモH_ニモf陳&ョ綽Eu芭2.IGU`IU争cz諚V'ヨV-!ニ!kツTセ5依%$&+綰ユoィ4ェ冒、弼・#F腟o /)qリム6ユ'hqI])"チm紮h兆rOf妨Hih齋KwムI「EJオ馮砧T鰭Yv}騫=rH:x9ヲq「)3JVi昆=]8[$$3ヰロ・Y/"t4エィZx4叨テ乎Wgcメ怒Vсカf{2.コラPヤリ[1合)rd警ケ燐Y3キ縛ノ足桝リ撈 榕ゥKIァWチEwク&コヲUa)~,ァ草 y蚕>`H}」ラ跳ゃ餡エ笊髑ァ儷リZY)ル鐘)隊鴎LQ」妾エ抄ィxYv沌。ェ *腿8ユ4Dス8:>(t\ッ繻ト牢D|Fihケ{ゥy准リ`蜉赤we收壥頼z、ネ0g Yヌ喞i、遐ケvylYツ=:啄Yu7蝌逋e~Od)ィ撩)w3ネVンvルLノ祉服泗J「駅g'7<ェ信コ」~黛J゚HvdU刪ウy9ァッY斈チ崑x塚ネEヘ(捐{)y」ム0EPXhミy」 ルx礫鎧体Qァe:。リ・┝川)<#fr`X、鑛UZ・)ァスヨ針穫逎5、ソJヲセ嗤ヒx鏑 ゥiZt'娥I炊D」メL#コ7Iョeナ」ナ・7Sb*m漠ヲャハャヌa】スiゥT9mD喩メ%,瑤|G-)」U」qT的 jzgコ天Jb6fッ嗚F\D」y邃0レ「tKェ殍ェ;d呶レitgwイヒ皙ェー閻lc鬯ヌ渮jGFw剩稔遯7エヘェウBKウFKエC[エ!サIィ u晒{・b3ュ5肚・頸l_ハヲ,ロオ^[oM[)J@WDN+Y_サゥロ姚傅m Dー}fーヒ慮.オx峭zロイ>1・{+TwfオクナャGォエ峩顎ク給エ口クケ ケ+ケ梗ャNキ埆ケ憬ケ檢ケコ「;コ、{柑Yィ*[|」ヤレ壼yo、室1$Uiiウキロヲツメ8ァヨZ-ネク帥8婉.犧シリ離ッ*;sオQ4ィャyエニィXサFW4C抬「ーォ4シoコァJ演モコ啗:シ:ヲソュr%ィヲレ」ヨG。♀X 潤Gr+縲rjォ HvL馴U,Xス\[Rwケ必+セ?vォL;*ソ唱$コ道ャホソA8捌淫K鍖)Iレセ鵯ニツWユツ卻cツモ八サ6G帑槽シタzァ喩克渥イ+陽垰:\S@l=チ劫xPL・冨H7緊*06ィw峭HケRチイZコ{ツdシn]\シ検ソ?サソNkサウzォ)ubI"芙レ櫪ス*嵒ロ*mレァKgl`8スラyI ソワ4ヌフ:Z隍btZゥ |」fNp ウa&ッェofサ*:鯔A&_蜻(i?*トラ(ヌ、ナ3*讎&S「,幅嫖+|k4」モK)ヒ%2KネY佻悽落W1カM調74fナ%ム|'。、Oウシ「「#L┼ク3_・UネqケコクチュKォuキ室YサVフiムAクxケツ$径ェ)ヌa9ク|ャ5悄"フR!ィシェ♭<ニサz|ニ'zヨ采ヘ。オォワョ:ツ&]}、`ムム~ワcモ wネ愾kムx+。ソ ァニ) オ糺ニ<ヒシ$ョIヘヲ ヌ万チス3:S下・6魯#ゥiミ・ュ。rトメZモ:ツヘ=。 }沍:ゥ、5テvフモィ吽ォユムJT慘ハ!{ワ距ン:n。lマ$Q圷V(ユ* ォqlヲV2斃囂{ヲ,瓧K燉リ bU2Oヘ^:U剪sヨ┏9サヌフメLmモ竇暴ン:Wネ」gaノア厄 l幺クナィム;5ミULoサxL「Cmx]ナnスムケヒリEツメ申クt t*、U・オ$チメ6ヘ4k緯ャ項耆}x'M殊リ鳥ネコヤ}ネフ勢筅ル スロmoLワメヤホネ リOナソケワ 雪巫ロサ姶ユ恩ユ゙}iナi&リヲiB勠ヨ"]<7Jウサl゙{鱗,ZaォlフAク:,ュフ"ゥッ党「{ヘ`)iヘ+ナ%゚器拵Mナtロ´ト懆ヒィィJ軒゚]罸<ォ[ 尢W「ェF)Ck"["CRアエa&レミ|ホ・スアG慟.ォクセェrコロLケムホツ。モ<フwシw)炳$Q9゚オ{ムトヤ\テベ。wネ酌ミサ<楴廊L゙キrレュ逵ユラ敲帙kmユコ:ンJ=B>ロヌ地)aヒ38シ<カ゚麥ニリ拵ム蝕"wャ~チy)オヘ踟h ク梔綴5ゥ~)テ!ルmヘx>ジョセユネツ湖潯=廷Cフェ'ラシ4%-ウ ['.K)LTトB鎰ユ=^ィシuナnヨCDi欧KNカチ]{T゙]ユVェタケd」ヘW 楳モ9dQル5vftネ7薇ヘホ聾iッホT勲c^ク N9cOMネW\ン`l~ヤモ3変ホラニ釐^TR驟 ィフキムUp>マ騷楠A ,ロ寂2N4糞Tイ-A創IUVマ;ッヲ丑ヤS,矧ニlモCV:蛩um瓏ルス.譴寮4鎖驅V8ョTス([、0,ス晉H]1箝癜>シ皿wニ>5リヒ`懿Йャ・8ネ樋ョ?゚.渉粽/ \M/:赴縡ャ"6 ヒミnmヶユ襞ハ=o釐澁 嗜ンnフコ %J輙"ンァ スホッ魅・カセfMm沽nロゥQ菫蔕ネ鷂煩゙ギラ御ホy_ィOタ・2チ@狢 D榔瓧 >T礇Jエx紮`フク」ヌ AZ<aJWェiイbJ,kカシsfB9Rd2'N'θ<*TァR「.52zSaマ'ゥ.ナ啅Vョ]ス"シZ朝ムッdez jウ鰮ャjヲユ*租Rィb稼Z7[ァzlHキhヌシ F杜ヤェラカヘ(ゥ翩}!O四勹CクC 鋸ョTサ?」3倏B賑l6ネM'H@P フスwヲ=覿ヨ鴻袰xrユヘンツ4忽riソxヌl\uハタO3ホ]vkノIsソサ04ケ9s蹇`|^]゚5?リト 榻キ遐ロL3FkNタヌ2m<ョニウシjoゥヒォソルコ#沼而zo>tロヘ!`アナ{ム?e繋ナg、ーァーZt柁РGc0オc pD:SpC4Tメサ(aDoハ+Οタオ款イJ-マ*ッI・|?ЙDdウLッフシRツ'滬rウ、トュ\mL?tツモ、ィBエ4C4Q)ロシNN%#、モノウ濺エRKァrイホEュTTサNt-ノ5ュツOg2フS9\pコ I%+苳U[/ナ5W]uッMヘrユ-w=リkE娘8陛窩孕YO#ウ.2mロ 7y#vD促墳Yァ CAEヒs9qTカンリ}7ミ`?L還。概p搖U゚/ユ@Z - M88ELマqカ樔:ヘi、ォ各zリワ_=褪ツQサオエレ!U甫pスI03]t^ソ6ホ5トm1テリY\烏栲\マF}ヨF!)Vod恟謇閹H/ カN&ァd6 ヤツHテ 甸淅oッナェ(ャォe ヘ峽/..4ミ甑^UvWoyoチowa逧%ルe挈lア・;ヤF'nヒ/ラ)#% 拓"ヌ廩。=tモテ(エメOgスuヨラv=vM面」Z mu6ンd!メe航\7鯆gマ織キユエマノS^j升+Urォヒサラ-^ツス{3イ%j6ア>弸」ィソ「サ6oXJメルGリf5゚wf?抂C$`ケ惇ノホ^2゙v>ェャNテVミ~$ ー$G#X&B iO{ト"ニ%ハヘノP7 B#>技Oィ」?*フナzHシHE4レ抗bCC瞎П`ッVqfY檢ャ亅彿pb ・イX碩ケ.∞ ハ)tス舞O|otcヘ'ヌユ)jワb濁リィーdモc 9トr睡Dd"テ5E6メ4d繆薊JVメ追鬚Eタィンユ'wチウSョ"ラ@z産dイ#8GVヲイ吻te,a9KUヨ文%. Wハセ/aロUBフObチDfvXギメd(サd4・9ヘシネ!1ヤ「X3@ヨーl?8D、ム錬ヲ・カエ!族Gァヘ)ハサQモ檐トgCゥ1*,潁ヒK&-VMS贅E(-ェミメ。 }hC!:QW.湲|。BモEハ久ヤ」iHi;走ヤ、'EiJ;Qカ櫨/・ィKc S茄Tヲ6・餉u售榻ヤァ8)p8ェR「ユィGu敲声Tヲ6ユゥOjT・:Uェrョ詐*對レゥ?Bェ;hCcハニuョXeo辻:Y窩ユT.oc・"シ-Jq軍*ァオ$5ュ0Kマ悧7}藏ーネ2氷Aナニルu_[・]$;XセI.Ns+cケYiュャ笏#キヘ託2モCォ[Yオn'オxサャUテLhV]テ鼾a7袢キvw)lナア゚クナ・ラァワ2TルMケl杯サ7カェ6漬gエWVeオッ~ュl$e=ホヲャ|ワaケJスレ~MK舶-{Z\。<畝mラEメョD'kホ纓vJサェ摶タk%nュ4[7yネ/uラ羇Ix 怖5オク.KSN.v扛[クソ樶ノシサメソ ォタヘ翰ヒb望$^ャウVVヒォョー才a庠ャlエmk,ニ6ュ樺束ロ$M民Mッpヌ[h失V7フd-郛Cョ椏b w刔vruア ^SKホ1幇祉ルnホ.ー\_I*ypヨ"tP_ャ拯ミR^)yZ9スTIホrCル8サf6歴壼6oョ8収sッ iyフリヌT+jQmキ2ボ弁2ォ堡アcュ(MRッqヒウ」WV6_7ヨソn`sydNエV岬ウ-]ソルアリnh└oxヘ>5コoシYf嘩沁#ェ `RW{メゥNエ偃M+[サキュj}_{ラ`y-.w媼3k畏カcナ{!Vワヌxニ5セq指ワケナ 裟雛ワ'GyハU~r譖睛ヲ迺=Iwr@紮Wヒmラ檐シk n;Lォ*ハォvオf阡、嶮ツシミ{ヌ酲%x龜ネj\ト~xサKスn貨n!ケ'0fcUラヤ. kナ&2ョZXュ)ス;サw瀲イウ9ロZキ麺rvコシノ3ォM pタ/? マ冉帋lウ甸F:ィW/テムモ#ソニ$ニ販-:ゥJムゥI`KGkヤZレヒォ舗dж0f遘yコッIンヲ<->=z>kvカモ<;y}!!kAモd|ヲO゚暼qセzカ笑 fg>\ケノD/gヘ?蕎ソスvXイ2マ;#1宸Zッo<, 俣コ。セソmア&テ>ケ 髮!" <ス('ハc囎8D ソ(セ!ア L'硫タ咫ラΔSz「(B"佳0タ2ュサ 竜ケpロk!>dTサケ@ャセ '(箸ー@「!)<。 4&$?Lム@ヒ)?3,C'ト<、4@、ARメ-ンc5|5; シシ詆:。橋ァテチ鑚セ>Dl奎 ACトソ i荘ィ?oテタvサ"C斌「サl奪。 Z<衲ヲ#ナトCシD8=鐚シGャ践4Dレ@T4=ラ+@ュ \サラテッサ bハ湧*Oコテ7<(ロcl#T E!周H eワ f4揣cタヲナェツエ|=Yウェマコス瘻F?l9ハX= 囎y、GBテツオtワサ癇9s 5Tェヤ$DネУネケュaネネ禰a<:,{;簫1M\+Hェ5f*Z、ホエNA>Wナ迂セ愼+$「郡/ナニナホホ,Iイア^婉-M゚レノ膾キメz[エ-ヨ団!=Eチ|テ・タ飴< 「!ゥ、ヘツ メ]ュmロQユヨ。トロgaウsS3Kス%ワx3'スラモ擇サ}゙ラテワッオロ&Pニ旃゚昊5。(エAュRsP8ワ。スモ在Xミ\ョDクマ悩ウ ェ・JQナTMス゚ェk-セI」' =-M;Uテ+dモnセ7スソ(モ闍フ 杤Hナ*t"ャソヌMDBメ|H~|ヌ=゙ヌ{Y、UホmW\エ蝉/+Y「<ラヨネ゙0ヘG:p態G張L・`_}I{i; タヲユq纜T該ヘHミ5XiェC} 桟 ラJIヘ,ム「藺uモッzナOUアョ゚t-T羞ャ;G萬//ヌ靨IU・:カ縅レOvc 攪ュM粒ン+&(疼ZB.3ンウヘ\V7fロ]e饐刈ユ;ニ膈ン」婬ヤ膂W媒#] モ/ロPキRサ兩ュ博暎9橦ニウ_カ^`嬌隙エ、蠹Vン゚瞻ナ濘。6スQm瀘C幟ムノ僕ッ= eVメクロ0゙1^jシ'L!k-フ_e゚ウ姪ゥnXン$髀M唸n|9ワアリ/A]]ヤh瓧ウ舮ヨメワICSト鯑Mソ.・iヒ孔「ム`s?x O.\RWN@D`#ュQ5eD、マNS衢犂4D\マ9綱8uゥ愉Lナ;サ>リ使@イトDクヘ楹。袢矮於6忤饅ツ;/<ヘN」<ゥヤ(qT誦茘ホエ秬z~gク|6nX鑞「ヒンU=テ/ ワ継 rFdフメ、'[uヨ2\Nノ5B3q<ケ ラ=EロE&6lmノオ\ィセn刎m トヲc]フ筐チ*ラrクヨマ石ケ,ョ譁ョサ=。kd\アョlヨNPラvkGチ滉q %fa6ン」縣儲Rd]体トSヒヨw\sホ$藝h^O!゚ァaホuZ荒ェ置奕搓d5ホCネwM=bv2FB=惰d遨lムヤ哺レr+=[pOァZ%リ?樔\g=卜iw}2ァメ>j9フiV齷ニシ&gqゥf#賞要uJ|ヒ}pj那譲フ[溏Tn迪.゙jヘミロT1>fハシj"oン',_サ&ァコ^EFoj]@.評Q満ミモホウS2v `JモwヌT^ァ"ヲ_懍u゚キGノユ簿%S+_7mォ?O7沍ルtツ・Pセzソzf4ョワ三薨ウ/,Oa~{HモミZヌ ッス駭&k槐Cd"盟モァI\Dゥ_モキfjs'[燔ラナル:2%^呂NQモル<ニニViネソヤキシヤ竈!V年ヨ岸 ッコ:シ]働ネ>ヘ皷瓣fマnECO 綮「4籖b>トw.G=Re穀9p゚ルyモワヨ「戊 アb>7フ~o^ヘwル ,hA*,リp。C,|(qE#^頌#ト/nTX$ハ"!RL9フ2gメャi&ホ:w鴣'ミBcbtノ芯ツ「FU2Eェt)ハ徹扼ク彫R-G&jラーbアz=ZV篥エh*ンレユ葮クr醴ュロ,ロコx+Rオ[Uナス\6ヒwラチ}26ッ篦?ツeロ、ネタ冠゙5ヒルrHァ;|Lz(ヤェWウカIリ0テ'sヨ源ゥ獵ウ#Oョzユ゚ア{3>\ルロ並踐;ヨ*oケヘwGク隆''韶}sVキ馭;z。魏/扼v|粲_ヲロ=Tチ )JUヨs6 Rワ|ムmヤF! W 00!& vサ|`餮 敢y(ロF攪`q9GoトI&RBリ`$(ョ7ロy8rリQr昿Rク8`瓮ネゥR]gdY5JdZs;撹リ/~儚泉¶7」!ヲネヲy'/「クヲdZ「ラ葬6Y!卑。G牌0ス莓L・オルQ[%レ&Bj隍.ハィ。 )・rU゙浬鮒`}ッ濠畝:G窖」抔'彜 +^」Y`lャメ贏岌ニ:ムPク ヤ':痳u*)、柬J(ヘ2驃-ke範 :ュカメ坊hZ膜ム)ggセ鑓櫁ツ逾槐 惻cカ偃\ー「喚シ*zアニ駆ソヲg鮓ル"フュi\Z0ト3ゥエユR゚娯ル淦セa Yスbル悋セ8ノ蘓ワニ,ニネイ塵囮轅トK0ーDモ~A;ロィカ>矩Lン"}エ「>3Lスoフ燦妛モ褂"ユ 幃j斬Oe熕遷ラcヌ^{3U(繆ョェUヒレuwヌvgg3リ5ニyッ 巽ゥネ模6i+楫サ?Wシ&敵齒ォ)ウp#x+ソ%ケ罍駒リ闥テ9踉セカ%^コロロiナ懍|~pカ樂2柆V棔cュ ?<ナ;-觜^ニj`凱ン髢ロnラマス7絲溲゚椽シ豪ァセコワqソェq寮>ラカヒDGW IKイ1ィケネ妙rK4%ゥIル ー I・鬱I。-F,潁ヤ:IKtカ \l=ロiQ凱Liレ-aヨ・3%ィホU#━O]&ル*ホソH2Kワs・ W ・ニクアR&ミ^"エKェZHLヤ(「ユwイtST%塲d鍔ヲUiキ)WE8モ。j建?4_4)ラクメu嗽ヘ」1 ヘ8ィi #コ55ォf呟ヲE寂~ネDA;ツVO「鎖 kYフ昼ウ5ェq贓ム致O?-絽レmF qラサc`_ォ<カ*s投\ロ跛"2%l p+ワ カx罹トサ鷏墅m.tオg6ススuオヨナlFZ「レrキサィUョwI+ルWキシ=(リュワキス}/|uX艷裏-Kミwソ 8ホ ヲル シヒn!ラシkV 0c8Fヨ答T(峺 ム-オvs+bGキオ仁B8塲@qQィ伊'ヒェ鞏ラ割ネ7癌1ヤセm1ル鋪PQクセ藁*uC'B馗到、聳韜ー 59"H%ヨ。m!ァァ]院輯nヒ$ュワQz槫1j詛 C鐐ッx僣Fセ3 俄ァ$ 」;却テ9エkゥエ房・*.ノーソ碯淀ヲムnY(fルクヒ暎ウン(夊DJz&&レメ6騷ョ景5オ0ァ錣#zメ滾蟯 餃抻ム&ヨヘH<コラシvf8!フBL3ホ鶚ァ+ラ閘コエRバgv冤槙Ta.淙ゥQサeリカ$飃I]1j;嶮PgウQハly杁溥ニr楪ムjァ[Qhu-橦M:藷Bロ6+マIgヒ-ヘシニyO類 ソ5pH8Tj;」,鵡 fαサ碆Vx!{fvワ瞶vキ.問Sjb楡c>キl+蛭#t」Uz4I!ハォ叡ツh地G迺邨6w&ュ蝎>3ヌ融V黠龍ワM5「∪Kヲ榕ゥ^OオTVD=ケheゥ`^輅ヘラッ」kケヌ}ラt上ナタヲX オイsキ;車ソKシぼr[蔕セム續キI~,hア[1o$イフ3sヘs*嵒;/ホソy」/鶸NLQョ=WF スwシq{チ迸サ=oシiリニキ0^ ャ゙3ソホWコ|b/wッ]蓙7浣[箭3$X 粐ニ-~ウ!%\yモB4ス_ァo瓩0欣MeY_Q忠M傷滬`Xaュ醗uE6ヤ!平F僮厦ュT鏖IネケQqイ枡ノリョ 贔娉愾 オヨレ Z.}ュレム校。ル リモキ骨カレ,!ヘ0=~ }6!゙鸚R2W8Iア]ワナ萼U杲-Zノ]エ、UMメ Jエフエユノi椎゚^_ ヨ。ソ)泄 aヌ\。ニシチロ~rィワ ワ2リ醤!$Fb Qヘm レカ`簧EIマレナ」u-ンモ= ]ホ。啖y"崔]]a>!,ホ「ヨ「,レb,趙smYc(イ8-゙「0曩/X$6」3劾=c襴」ス`4^#66m#7ェY゚0」.笋8#9枋8「8ヲ#;ョ」;團0ホW6ホ#=ヨ」=禳Qン」>#?聿$ホ;ェ#<d;、A $BdB$C、Bメ?F、DN$EリiU$Ff、F6繻9dC.、G$H惹C~dI贋I彫I跂Fカ、Kセ$Lニ、Lホ$M酌J「膊ェ$N、NdJdN%OBdM・Q・;antlr-2.7.7/doc/stream.splitter.gif0000644000175000017500000001262710522211615017204 0ustar twernertwernerGIF89a"シシシフフフトトトタタタ,"@Hーチ*\ネー。テ#J廩ア「ナ3jワネア」ヌ +"I FTケチ,ニ i膈8sワノウァマ檗/゙,ゥム萢 XオhiR:]<コ4ィユォXウ>DPUォWャ]憤リU&Jァbホ,ロ)メエ8Mr斃Uラサxカォキ/PカンレハQSイQ_=ー゚ヌ星Vuケ2EハLマセ%ワR`ト9 オLコ4OフQ广[sbァーcヒFy薺ァGs゚ヨレfmロウ ゚泡びォ+/wケ醫UCン.リッウスY7サネkソ_yサD鱠モEッセス{ ル[Oソ~}kv'Z&~ー w朸ニヌgEZgワ亦A墸炙rナラQM棠!Yッチエ噐カ。By4b~確6bk*^ロH'BH\R+ヲHUノ^)~カ"衡氷^織営KM`[ 坪經?fネ艚M6笏$I嚼iゥ.h;緬゚断]yfてi鐫oGメ烟nNY'夂'YgQ%寞チxaB、喙7コリァM{頂fjースエcgmVルeo 7ァ&6b\H纐硲y&i伐☆h5fリゥ6.J(kaリ。ュシツ雅:D勞=嬾シ,Uキ)uロイハF帋擁b mイノツ=v+オ。ヨ,淙b{堂ニ「gマ~ョケ檢nRュレァ)ァケjセN ~ソlチ[揉ツ 絋lェ_WテWワSトY麌ニ|Yヌ褶(gfWr秒「5ラニ^q ヒケワ$~M ネx゙ャ3ツ2嶌鑰;隷リh+鏑Vム"-]+ユu醵aヘ4笛s95ユン!-0軍kュuミnュdjvルh淇(「={tWc\_ヒ、痴;佐セmU6ヤM釛冴 クンmΟxフ7网yJクム&i8胞ハコ゚艾_-フkニ 隍7肺鬨7囃アラ鬩ヌイ臠% iホ"Uj飄「ノ:zエンサヒHjテ'O"サcメヤcンタマ旨モト<余?スネ遨Y8{ナエ_j瑠渮{ワρ/弱ヌllクテユ/iクsウ+vg ?サ(ルBuソXQノ} =~\炯A蛻ッVソ O} !qA夸э 弃ネB)ェ^#,矢J」ツ<ャQロ^ ネ*9ィ^52耳ィ#敏u勲R蛎j虎A 粱Luz[ノtノ0ス拠ヘz8カ祈゚8GBヲ8サmヲト鰍yゥ!ハrテクdヒ#[tタ\ L 糒トTニ覈q祉;bIN・Q%ム、マx!ヲ*肯フW・リナKn焔"「ニ(ケテGセミ竹「Dr:/カJAW,11ツ再ヒ^Bッsセ ヲ韶オJ' 傑32チ.et覈#>Jフエ5%」kz\5Q(ヌ卒鎰ァ9$uコ晤T<QF著>i"=憚鳬徇。゚iテ}40。!烏5.嘆ソエ>埃隠 ]棺 E7レタネス誚嗣゙O&メ爪今tヲォ<ィフidbJ/NチD8オ殪,コ8rァ@葺サモ(~1^ 沿&ソ%ms}モ瑞ヤェDr。3ゥ熹5ィ$UォKン\WクB1o>オェZyキヨカコュpヘ$2%ラュMヲ「k\ノ&ヲ鐵#^郤セ@陸ヨ`緩ナ2Y<=ャ1YH3モ・跳。]c% ャネャ毋Tヒe緊ニ゙96w.,>=/ミホヤ@跋ァM[ラフィフuェE,@XセHb1夬斗s;アQ6*」蜻o騒トba&造ラロツヨヨイアEョ口ワxイxチEノq:ェI^W %4 BtDリRリiモ> Us煤RX$-キ8C鑪#H7NT$hn悶蒿yyiク譲tI9QgF提0SX1-x~鑷xZメXQヨЯ8&\HS8B廚X5栄云HJヲ揺運8渦H!8革H寒x勘ィ'xム%ェVエCHA侠"桶リ/3yナ協vWケクQ疇ケ3トXpニ掘・Y6#Fvフ%\ハ(袴彩%fカp27昨ト庫貢ヨ故ユgワH<鏥Y讎X'Z^R射鋒ョ斌タ~k=s揚メ(絶臭x\溢ィ落ヨmクSW)w蒋s芸,ケP?錢ョE迅f畝p"飆AByh""pcs*ゥ+x]メQ3ォrw8!-\ ケF儷B2ォΩ(摺カ壌剩/wq8EvB<&ラ敦Sf8ケ鞄BS9PLノjK9呆ラ&A郵VIXiケ>ケ姫)aW3ノgTy年Y翰'dvY>髟bY}キ'{ル2z倭Ixb、Rリ.に~網~。,iS劭「-\{乂6в+!/vニィ士~ >ヘtv慄+uWヘd-譲yt5v喃w听8柏hィY+jリ%&セ餉フ2ィ憙"P瀦蟋、ッwn7nケ屹俣ラ往B葡ムルdワル昃ゥ帋 ]}EY檍gル~5ィ7Wn爰挺ゥd(ル柢駻i"ヤw}ケ渺Y曉ケaヨhVZbshiニ愆"d{ネt(/&。ワ閖[◆ェN/コ。蔬}「、リ。L。z~゚ wD$「ャD「蓴#驩>ミg「6ラtY站B。」'國 逎#枳g」/ラHqヨS.コナUKImFハ#M)ムq、/ウ$mGWgDハ;纓Xタf冕ymil!ヨu_I_リ労コ:x=wo荼qyz副。「オュ薫*uナIH熔努Rシノ毳 廛p<7Dィyナ~0シ・dfwo垣8s蛭}ッLユmf鋭Sァr丁 マゥヒ#wキ畠w痙ュキh{B)rdハo3Vi:ァ C朦zァ白sハEゥェコrе册XИfナゥリ付駱Iw歟9YsTチ圖トziャ、hノZuソ"ウ*@ェpテォsケjrZ妙畛ォB!∵Evwhュ4觜oハjァェカhリェ|ニE7\E20オーmレ\lハjFDdャXアQヨv鋸辞 t詼ヱェョTl;・Tb具R減HW番' ォR述エAトo9}ZJ屋ヘアワ薑" ヲフzmュR背イ拮☆I蚕4ッヌコyシ:Ih・i]ー98。[ウ=ッ?オ(ー>3イjー槙 mGクッa尸梔!9ア:娯wbカi亰]Bエ`k[ネJ)サJ踏tQk経'・ ォ臙ュココ講qiaTァオヲcキ クツニェgロf皆 4イ拐エ天敏jッ・1ム j{zゥ呱ェア2イ{ョヒr- キ$(キxJ嚏ハ恤x>」Gャコ_KシdKイエkイEヤ*ココォケケ下BウZケ"シロキナサ|ロケ(<ク]棋~<ヤクt{コ"勸框ュニk[ュヒ萌ロp゙ォヲ朱イvz゚レ=B)スOY 輳レ=セV鼻ヒセ (ホjO窖~$Wm堤8>^[佼kヲマ价-Q[,Wヌlカヌ?剽.@3コヌサ「漢ィAォ- 巉辰mア_イチ Ce戉b`/サ'ト0アI.フツネ鯏R-ヘcテ`U@窿) ヨ*3鯀cカK7{&.丁熹ム+二UD+\ニス1・tfq゚t#ケノメRニz<rナVで=ャョ クヌ4|U.:+fS蜀ヒlオS[ヌクノzヌナAノロ)o癜i「M{ノCシ弗ケメSye,コeョィハーネハメ鯡;x!*ョ"Sネ0j圖lヒキ戸エャ<(コヒ;IkフフアネvGフゥクmテのァb5ノフ% 7ヘχ< zヘィコレL.リヘレADォニ1ソL「<ネ・K ホ-コシKルサ讖ホ「ネホoテu マHコシセ駘マッ"ΡヌuネマfVL:ー訓醫,ミ 拔モォウ:#ミ -ァ{ム[ェGリーXネ!険}Uムヘn;サШリカUッメ"テ?(g)姨$$* 2 Hミj9lサイgク7Mケ-]ウ-ソ|3Q9ヤ゚|1。孀ィ、33ェヤ,ィヤ{ミR =UユV易タfルリNミaユ嚢Om^ヘ」xbヘコメDツg斎サネ゚6жヒ 笈H;3途Vx3=V;- 弯>h兄」M)Ч亳(Uh挟e袷蔆。zリャイ< ヨ+腿'モ]ル」ヘル`ク宕酲,ノ^剣!オX}ュk◎]ラBwm)イ<~ヘリ9ロェノシレ焙决ヲMDヨ;廊カルワォ鰤_gハン:z{UM狛hLワレムr旌痞9ン゙-鐘ョ゙鼇セ3\ン跫柎ヒ離 ゚ナ呑ュカリ楮゙Qァ諾ZH侖挿Tzュ゚淒゚2 レ"レMレn璋サ「スュオヘュ]レ精オ|=ル- $ィ na<簍h簗X゙(>d(ク.至;antlr-2.7.7/doc/closure.gif0000644000175000017500000000416610522211615015517 0ustar twernertwernerGIF89a」ナシシシトトトタタタ,」ナ@Hーチ*\ネー。テ#J廩ア「ナ3JDア」ヌ9U(r、ノQ.,艶・ハ]カ9S。L8゙ 匍猥?{ ThQ*・H4# 6eTゥU」+!ZItkヘО」.囎餠ャdチセD v,ニエXゥv| 5∂サxン+ヨnメキq5ツ q0a插NLpq羞P」:~フ乘ヨ・サェmV*猝'g^ロVhミ)ロ瑳キオサ}スhpmヤ徠Gシュロf仭O;<クO縒-o)Y0票」K戎エu?ァk゚~ケノ:ハ慈コ Xソ^マzャg耆欅_k}ツ焉セ:7レル梨Qクリ櫟O 7U~$ル4g.5ナメMUオW寉。}z「yE゚筵w゙z,FF夕エ'bo3ョe焉儼cピノxル 脳d翡丱(u」r>メ肪{-イ俥6ycЖ^影}5籔e短ny"mワ品誡_ dK&ル!Rf6f皴岬廨%)゙桍*陟j陦k擠陲6麌」ト!祢ワ>ェ鬥忻キ_ぁ"`、。:*ヲ姜z*ィゥ滑b コ*ャ・nヤゥ剴セレS〈2鞅・ア笏擧8*W押・,MシRxUウリd^U垠,ウkJ[kH鉉Xg|:B&Vオ\f;メ却Z%m皷%{Sj.ク芳サョォ嗚%セ{+wケ;ァp&;&等稱ン舌<゚ュハュO^ _クl~+ンエu捜pッ~6<ナ#カZコ ヒ.^セロZz臆ハ~\テRーェ+p。:穆マイ エム  メワ\ノ゙チ・0エ\q、0キ;LhwミioKDュ蜴aラ メ嘲6挿ナ2ヒ~eュケ)セ2ロマュ゙|゚.焙ロXカリ゙ョ/>T綫ム qィN5繹:+謗=苟l笶oュ纖 コ9・k~コー」サ9遑=;オ{サ皴ォ:鮖宀;ゥコ:ス^Xqヌ証フツ。 r`エqハ?]t7ヨ貘゚ 5v3ィl゙bメy_ゥオAW%5杁k カw^セウ;鉈_X螫Jr:驪キsオマ{サ^蘿-蛭|NQ`葵3閻5ニBVー騏p釆滿GイキーM0僑 g8テサーEzナイ决゙Cミヤ貯 Eクシrミゥエ蛮uウ$gツ」呵th;)V相コテbメセテ<ヲ・\8ド ユEツロ,ム)ホア@ネB顕U%メJHF-fNA(F'殉jヤgコユニィ]r#s(8Dz!lLcp゙扎>イ骨ヤ %Jメ麹悧剏ク筆^メ康yW DソLゥ積甌XTトハZニ0!t、`Hス:子漢シb0ウ8フ(レムwP駁蛾G`6S佰dヲ.Gモx^トf2損LcS燮萼a9シIuq嵬寞CゥOB=R<7「杤bメ陽4軽エ夕ITニ,ウGハS鑑+]Tタbハノユゥ*pエ -ヌ イヤrォ$#1j@柵謄ユ、ネ@ A^&蘚5ミ巾[|$ifh$淋ミ]<帝M:麾;antlr-2.7.7/doc/metalang.html0000644000175000017500000013176210522211615016035 0ustar twernertwerner ANTLR Specification: Meta Language

ANTLR Meta-Language

ANTLR accepts three types of grammar specifications -- parsers, lexers, and tree-parsers (also called tree-walkers). Because ANTLR uses LL(k) analysis for all three grammar variants, the grammar specifications are similar, and the generated lexers and parsers behave similarly. The generated recognizers are human-readable and you can consult the output to clear up many of your questions about ANTLR's behavior.

Meta-Language Vocabulary

Whitespace. Spaces, tabs, and newlines are separators in that they can separate ANTLR vocabulary symbols such as identifiers, but are ignored beyond that. For example, "FirstName LastName" appears as a sequence of two token references to ANTLR not token reference, space, followed by token reference.

Comments. ANTLR accepts C-style block comments and C++-style line comments. Java-style documenting comments are allowed on grammar classes and rules, which are passed to the generated output if requested. For example,

/**This grammar recognizes simple expressions
 * @author Terence Parr
 */
class ExprParser;

/**Match a factor */
factor : ... ;

Characters. Character literals are specified just like in Java. They may contain octal-escape characters (e.g., '\377'), Unicode characters (e.g., '\uFF00'), and the usual special character escapes recognized by Java ('\b', '\r', '\t', '\n', '\f', '\'', '\\'). In lexer rules, single quotes represent a character to be matched on the input character stream. Single-quoted characters are not supported in parser rules.

End of file. The EOF token is automatically generated for use in parser rules:

rule : (statement)+ EOF;

You can test for EOF_CHAR in actions of lexer rules:

// make sure nothing but newline or
// EOF is past the #endif
ENDIF
{
  boolean eol=false;
}
     :   "#endif"
         ( ('\n' | '\r') {eol=true;} )?
         {
           if (!eol) {
             if (LA(1)==EOF_CHAR) {error("EOF");}
             else {error("Invalid chars");}
           }
         }
     ;

While you can test for end-of-file as a character, it is not really a character--it is a condition.  You should instead override CharScanner.uponEOF(), in your lexer grammar:

/** This method is called by YourLexer.nextToken()
 *  when the lexer has
 * hit EOF condition. EOF is NOT a character.
 * This method is not called if EOF is reached
 * during syntactic predicate evaluation or during
 * evaluation of normal lexical rules, which
 * presumably would be an IOException. This
 * traps the "normal" EOF * condition.
 *
 * uponEOF() is called after the complete evaluation
 * of the previous token and only if your parser asks
 * for another token beyond that last non-EOF token.
 *
 * You might want to throw token or char stream
 * exceptions like: "Heh, premature eof" or a retry
 * stream exception ("I found the end of this file,
 * go back to referencing file").
 */
public void uponEOF()
  throws TokenStreamException, CharStreamException
{
}

The end-of-file situation is a bit nutty (since version 2.7.1) because Terence used -1 as a char not an int (-1 is '\uFFFF'...oops).

Strings. String literals are sequences of characters enclosed in double quotes. The characters in the string may be represented using the same escapes (octal, Unicode, etc.) that are valid in character literals. Currently, ANTLR does not actually allow Unicode characters within string literals (you have to use the escape). This is because the antlr.g file sets the charVocabulary option to ascii.

In lexer rules, strings are interpreted as sequences of characters to be matched on the input character stream (e.g., "for" is equivalent to 'f' 'o' 'r').

In parser rules, strings represent tokens, and each unique string is assigned a token type. However, ANTLR does not create lexer rules to match the strings. Instead, ANTLR enters the strings into a literals table in the associated lexer. ANTLR will generate code to test the text of each token against the literals table, and change the token type when a match is encountered before handing the token off to the parser. You may also perform the test manually -- the automatic code-generation is controllable by a lexer option.

You may want to use the token type value of a string literal in your actions, for example in the synchronization part of an error-handler. For string literals that consist of alphabetic characters only, the string literal value will be a constant with a name like LITERAL_xxx, where xxx is the name of the token. For example, the literal "return" will have an associated value of LITERAL_return.   You may also assign a specific label to a literal using the tokens section.

Token references. Identifiers beginning with an uppercase letter are token references. The subsequent characters may be any letter, digit, or underscore. A token reference in a parser rule results in matching the specified token. A token reference in a lexer rule results in a call to the lexer rule for matching the characters of the token. In other words, token references in the lexer are treated as rule references.

Token definitions. Token definitions in a lexer have the same syntax as parser rule definitions, but refer to tokens, not parser rules. For example,

class MyParser extends Parser;
idList : ( ID )+;   // parser rule definition

class MyLexer extends Lexer;
ID : ( 'a'..'z' )+ ;   // token definition    

Rule references. Identifiers beginning with a lowercase letter are references to ANTLR parser rules. The subsequent characters may be any letter, digit, or underscore. Lexical rules may not reference parser rules.

Actions. Character sequences enclosed in (possibly nested) curly braces are semantic actions. Curly braces within string and character literals are not action delimiters.

Arguments Actions. Character sequences in (possibly nested) square brackets are rule argument actions. Square braces within string and character literals are not action delimiters. The arguments within [] are specified using the syntax of the generated language, and should be separated by commas.

codeBlock
[int scope, String name] // input arguments
returns [int x]          // return values
: ... ;

// pass 2 args, get return
testcblock
{int y;}
	:	y=cblock[1,"John"]
	;

Many people would prefer that we use normal parentheses for arguments, but parentheses are best used as grammatical grouping symbols for EBNF.

Symbols. The following table summarizes punctuation and keywords in ANTLR.

Symbol Description
(...) subrule
(...)* closure subrule zero-or-more
(...)+ positive closure subrule one-or-more
(...)? optional zero-or-one
{...} semantic action
[...] rule arguments
{...}? semantic predicate
(...)=> syntactic predicate
| alternative operator
.. range operator
~ not operator
. wildcard
= assignment operator
: label operator, rule start
; rule end
<...> element option
class grammar class
extends specifies grammar base class
returns specifies return type of rule
options options section
tokens tokens section
header header section
tokens token definition section

Header Section

A header section contains source code that must be placed before any ANTLR-generated code in the output parser. This is mainly useful for C++ output due to its requirement that elements be declared before being referenced. In Java, this can be used to specify a package for the resulting parser, and any imported classes. A header section looks like:

header {
  source code in the language generated by ANTLR;
}  

The header section is the first section in a grammar file. Depending on the selected target language more types of header sections might be possible. See the respective addendums.

Parser Class Definitions

All parser rules must be associated with a parser class. A grammar (.g) file may contain only one parser class definitions (along with lexers and tree-parsers). A parser class specification precedes the options and rule definitions of the parser. A parser specification in a grammar file often looks like:

{ optional class code preamble }
class YourParserClass extends Parser;
options
tokens
{ optional action for instance vars/methods }
parser rules...    

When generating code in an object-oriented language, parser classes result in classes in the output, and rules become member methods of the class. In C, classes would result in structs, and some name-mangling would be used to make the resulting rule functions globally unique.

The optional class preamble is some arbitrary text enclosed in {}. The preamble, if it exists, will be output to the generated class file immediately before the definition of the class.

Enclosing curly braces are not used to delimit the class because it is hard to associate the trailing right curly brace at the bottom of a file with the left curly brace at the top of the file. Instead, a parser class is assumed to continue until the next class statement.

You may specify a parser superclass that is used as the superclass fo the generate parser. The superclass must be fully-qualified and in double-quotes; it must itself be a subclass of antlr.LLkParser. For example,

class TinyCParser extends Parser("antlr.debug.ParseTreeDebugParser");

Lexical Analyzer Class Definitions

A parser class results in parser objects that know how to apply the associated grammatical structure to an input stream of tokens. To perform lexical analysis, you need to specify a lexer class that describes how to break up the input character stream into a stream of tokens. The syntax is similar to that of a parser class:


{ optional class code preamble }
class YourLexerClass extends Lexer;
options
tokens
{ optional action for instance vars/methods }
lexer rules...

Lexical rules contained within a lexer class become member methods in the generated class. Each grammar (.g) file may contain only one lexer class. The parser and lexer classes may appear in any order.

The optional class preamble is some arbitrary text enclosed in {}. The preamble, if it exists, will be output to the generated class file immediately before the definition of the class.

You may specify a lexer superclass that is used as the superclass for the generate lexer. The superclass must be fully-qualified and in double-quotes; it must itself be a subclass of antlr.CharScanner.

Tree-parser Class Definitions

A tree-parser is like a parser, except that is processes a two-dimensional tree of AST nodes instead of a one-dimensional stream of tokens. Tree parsers are specified identically to parsers, except that the rule definitions may contain a special form to indicate descent into the tree. Again only one tree parser may be specified per grammar (.g) file.

{ optional class code preamble }
class YourTreeParserClass extends TreeParser;
options
tokens
{ optional action for instance vars/methods }
tree parser rules...

You may specify a tree parser superclass that is used as the superclass for the generate tree parser. The superclass must be fully-qualified and in double-quotes; it must itself be a subclass of antlr.TreeParser.

Options Section

Rather than have the programmer specify a bunch of command-line arguments to the parser generator, an options section within the grammar itself serves this purpose. This solution is preferable because it associates the required options with the grammar rather than ANTLR invocation. The section is preceded by the options keyword and contains a series of option/value assignments. An options section may be specified on both a per-file, per-grammar, per-rule, and per-subrule basis.

You may also specify an option on an element, such as a token reference.

Tokens Section

If you need to define an "imaginary" token, one that has no corresponding real input symbol, use the tokens section to define them.  Imaginary tokens are used often for tree nodes that mark or group a subtree resulting from real input.  For example, you may decide to have an EXPR node be the root of every expression subtree and DECL for declaration subtrees for easy reference during tree walking.  Because there is no corresponding input symbol for EXPR, you cannot reference it in the grammar to implicitly define it.  Use the following to define those imaginary tokens.

tokens {
    EXPR;
    DECL;
}

The formal syntax is:

tokenSpec : "tokens" LCURLY
            (tokenItem SEMI)+
            RCURLY
          ;

tokenItem : TOKEN ASSIGN STRING (tokensSpecOptions)?
          | TOKEN  (tokensSpecOptions)?
          | STRING (tokensSpecOptions)?
          ;
tokensSpecOptions
          : "<"
              id ASSIGN optionValue
              ( SEMI id ASSIGN optionValue )*
            ">"
          ;

You can also define literals in this section and, most importantly, assign to them a valid label as in the following example.

tokens {
    KEYWORD_VOID="void";
    EXPR;
    DECL;
    INT="int";
}

Strings defined in this way are treated just as if you had referenced them in the parser.

If a grammar imports a vocabulary containing a token, say T, then you may attach a literal to that token type simply by adding T="a literal" to the tokens section of the grammar.  Similarly, if the imported vocabulary defines a literal, say "_int32", without a label, you may attach a label via INT32="_int32" in the tokens section.

You may define options on the tokens defined in the tokens section.  The only option available so far is AST=class-type-to-instantiate.

// Define a bunch of specific AST nodes to build.
// Can override at actual reference of tokens in
// grammar.
tokens {
    PLUS<AST=PLUSNode>;
    STAR<AST=MULTNode>;
}

Grammar Inheritance

Object-oriented programming languages such as C++ and Java allow you to define a new object as it differs from an existing object, which provides a number of benefits. "Programming by difference" saves development/testing time and future changes to the base or superclass are automatically propagated to the derived or subclass. ANTLR= supports grammar inheritance as a mechanism for creating a new grammar class based on a base class. Both the grammatical structure and the actions associated with the grammar may be altered independently.

Rule Definitions

Because ANTLR considers lexical analysis to be parsing on a character stream, both lexer and parser rules may be discussed simultaneously. When speaking generically about rules, we will use the term atom to mean an element from the input stream (be they characters or tokens).

The structure of an input stream of atoms is specified by a set of mutually-referential rules. Each rule has a name, optionally a set of arguments, optionally a "throws" clause, optionally an init-action, optionally a return value, and an alternative or alternatives. Each alternative contains a series of elements that specify what to match and where.

The basic form of an ANTLR rule is:

rulename
    :   alternative_1
    |   alternative_2
   ...
    |   alternative_n
    ;    

If parameters are required for the rule, use the following form:

rulename[formal parameters] : ... ;

If you want to return a value from the rule, use the returns keyword:

rulename returns [type id] : ... ;    

where type is a type specifier of the generated language, and id is a valid identifier of the generated language. In Java, a single type identifier would suffice most of the time, but returning an array of strings, for example, would require brackets:

ids returns [String[] s]: ( ID {...} )* ;    

Also, when generating C++, the return type could be complex such as:

ids returns [char *[] s]: ... ;    

The id of the returns statement is passed to the output code. An action may assign directly to this id to set the return value. Do not use a return instruction in an action.

To specify that your parser (or tree parser rule) can throw a non-ANTLR specific exception, use the exceptions clause.  For example, here is a simple parser specification with a rule that throws MyException:

class P extends Parser;

a throws MyException
  : A
  ;

ANTLR generates the following for rule a:

    public final void a()
        throws RecognitionException,
               TokenStreamException,
               MyException
    {
        try {
            match(A);
        }
        catch (RecognitionException ex) {
            reportError(ex);
            consume();
            consumeUntil(_tokenSet_0);
        }
    }

Lexer rules may not specify exceptions.

Init-actions are specified before the colon. Init-actions differ from normal actions because they are always executed regardless of guess mode. In addition, they are suitable for local variable definitions.

rule
{
    init-action
}
    :   ...
    ;    

Lexer rules. Rules defined within a lexer grammar must have a name beginning with an uppercase letter. These rules implicitly match characters on the input stream instead of tokens on the token stream. Referenced grammar elements include token references (implicit lexer rule references), characters, and strings. Lexer rules are processed in the exact same manner as parser rules and, hence, may specify arguments and return values; further, lexer rules can also have local variables and use recursion. See more about lexical analysis with ANTLR.

Parser rules. Parser rules apply structure to a stream of tokens whereas lexer rules apply structure to a stream of characters. Parser rules, therefore, must not reference character literals. Double-quoted strings in parser rules are considered token references and force ANTLR to squirrel away the string literal into a table that can be checked by actions in the associated lexer.

All parser rules must begin with lowercase letters.

Tree-parser rules. In a tree-parser, an additional special syntax is allowed to specify the match of a two-dimensional structure. Whereas a parser rule may look like:

rule : A B C;    

which means "match A B and C sequentially", a tree-parser rule may also use the syntax:

rule : #(A B C);  

which means "match a node of type A, and then descend into its list of children and match B and C". This notation can be nested arbitrarily, using #(...) anywhere an EBNF construct could be used, for example:

rule : #(A B #(C D (E)*) );      

Atomic Production elements

Character literal. A character literal can only be referred to within a lexer rule. The single character is matched on the character input stream. There are no need to escape regular expression meta symbols because regular expressions are not used to match lexical atoms. For example, '{' need not have an escape as you are specifying the literal character to match. Meta symbols are used outside of characters and string literals to specify lexical structure.

All characters that you reference are implicitly added to the overall character vocabulary (see option charVocabulary). The vocabulary comes into play when you reference the wildcard character, '.', or ~c ("every character but c").

You do not have to treat Unicode character literals specially. Just reference them as you would any other character literal. For example, here is a rule called LETTER that matches characters considered Unicode letters:


protected
LETTER
    :   '\u0024' |
        '\u0041'..'\u005a' |
        '\u005f' |
        '\u0061'..'\u007a' |
        '\u00c0'..'\u00d6' |
        '\u00d8'..'\u00f6' |
        '\u00f8'..'\u00ff' |
        '\u0100'..'\u1fff' |
        '\u3040'..'\u318f' |
        '\u3300'..'\u337f' |
        '\u3400'..'\u3d2d' |
        '\u4e00'..'\u9fff' |
        '\uf900'..'\ufaff'
    ;
You can reference this rule from another rule:

ID  :   (LETTER)+
    ;
ANTLR will generate code that tests the input characters against a bit set created in the lexer object.

String literal. Referring to a string literal within a parser rule defines a token type for the string literal, and causes the string literal to be placed in a hash table of the associated lexer. The associated lexer will have an automated check against every matched token to see if it matches a literal. If so, the token type for that token is set to the token type for that literal defintion imported from the parser. You may turn off the automatic checking and do it yourself in a convenient rule like ID. References to string literals within the parser may be suffixed with an element option; see token references below.

Referring to a string within a lexer rule matches the indicated sequence of characters and is a shorthand notation. For example, consider the following lexer rule definition:

BEGIN : "begin" ;

This rule can be rewritten in a functionally equivalent manner:

BEGIN : 'b' 'e' 'g' 'i' 'n' ;    

There are no need to escape regular expression meta symbols because regular expressions are not used to match characters in the lexer.

Token reference. Referencing a token in a parser rule implies that you want to recognize a token with the specified token type. This does not actually call the associated lexer rule--the lexical analysis phase delivers a stream of tokens to the parser.

A token reference within a lexer rule implies a method call to that rule, and carries the same analysis semantics as a rule reference within a parser. In this situation, you may specify rule arguments and return values. See the next section on rule references.

You may also specify an option on a token reference.  Currently, you can only specify the AST node type to create from the token.  For example, the following rule instructs ANTLR to build INTNode objects from the INT reference:

i : INT<AST=INTNode> ;

The syntax of an element option is

<option=value; option=value; ...>

Wildcard. The "." wildcard within a parser rule matches any single token; within a lexer rule it matches any single character. For example, this matches any single token between the B and C:

r : A B . C;

Simple Production elements

Rule reference. Referencing a rule implies a method call to that rule at that point in the parse. You may pass parameters and obtain return values. For example, formal and actual parameters are specified within square brackets:

funcdef
    :   type ID "(" args ")" block[1]
    ;
block[int scope]
    :   "begin" ... {/*use arg scope/*} "end"
    ;

Return values that are stored into variables use a simple assignment notation:

set
{ Vector ids=null; }  // init-action
    :  "(" ids=idList ")"
    ;
idList returns [Vector strs]
{ strs = new Vector(); }   // init-action
    :  id:ID
       { strs.appendElement(id.getText()); }
       (
          "," id2:ID
          { strs.appendElement(id2.getText()); }
       )*
    ;    

Semantic action. Actions are blocks of source code (expressed in the target language) enclosed in curly braces. The code is executed after the preceding production element has been recognized and before the recognition of the following element. Actions are typically used to generate output, construct trees, or modify a symbol table. An action's position dictates when it is recognized relative to the surrounding grammar elements.

If the action is the first element of a production, it is executed before any other element in that production, but only if that production is predicted by the lookahead.

The first action of an EBNF subrule may be followed by ':'. Doing so designates the action as an init-action and associates it with the subrule as a whole, instead of any production. It is executed immediately upon entering the subrule -- before lookahead prediction for the alternates of the subrule -- and is executed even while guessing (testing syntactic predicates). For example:

(   {init-action}:
    {action of 1st production} production_1
|   {action of 2nd production} production_2
)?    

The init-action would be executed regardless of what (if anything) matched in the optional subrule.

The init-actions are placed within the loops generated for subrules (...)+ and (...)*.

Production Element Operators

Element complement. The "~" not unary operator must be applied to an atomic element such as a token identifier. For some token atom T, ~T matches any token other than T except end-of-file. Within lexer rules, ~'a' matches any character other than character 'a'. The sequence ~. ("not anything") is meaningless and not allowed.

The vocabulary space is very important for this operator. In parsers, the complete list of token types is known to ANTLR and, hence, ANTLR simply clones that set and clears the indicated element. For characters, you must specify the character vocabulary if you want to use the complement operator. Note that for large vocabularies like Unicode character blocks, complementing a character means creating a set with 2^16 elements in the worst case (about 8k). The character vocabulary is the union of characters specified in the charVocabulary option and any characters referenced in the lexer rules. Here is a sample use of the character vocabulary option:

class L extends Lexer;
options { charVocabulary = '\3'..'\377'; } // LATIN

DIGIT : '0'..'9';
SL_COMMENT : "//" (~'\n')* '\n'; 

Set complement. the not operator can also be used to construct a token set or character set by complementing another set. This is most useful when you want to match tokens or characters until a certain delimiter set is encountered. Rather than invent a special syntax for such sets, ANTLR allows the placement of ~ in front of a subrule containing only simple elements and no actions. In this specific case, ANTLR will not generate a subrule, and will instead create a set-match. The simple elements may be token references, token ranges, character literals, or character ranges. For example:

class P extends Parser;
r : T1 (~(T1|T2|T3))* (T1|T2|T3);

class L extends Lexer;
SL_COMMENT : "//" (~('\n'|'\r'))* ('\n'|'\r);

STRING : '"' (ESC | ~('\\'|'"'))* '"';
protected ESC : '\\' ('n' | 'r');

Range operator. The range binary operator implies a range of atoms may be matched. The expression 'c1'..'c2' in a lexer matches characters inclusively in that range. The expression T..U in a parser matches any token whose token type is inclusively in that range, which is of dubious value unless the token types are generated externally.

AST root operator. When generating abstract syntax trees (ASTs), token references suffixed with the "^" root operator force AST nodes to be created and added as the root of the current tree. This symbol is only effective when the buildAST option is set. More information about ASTs is also available.

AST exclude operator. When generating abstract syntax trees, token references suffixed with the "!" exclude operator are not included in the AST constructed for that rule. Rule references can also be suffixed with the exclude operator, which implies that, while the tree for the referenced rule is constructed, it is not linked into the tree for the referencing rule. This symbol is only effective when the buildAST option is set. More information about ASTs is also available.

Token Classes

By using a range operator, a not operator, or a subrule with purely atomic elements, you implicitly define an "anonymous" token or character class--a set that is very efficient in time and space. For example, you can define a lexer rule such as:

OPS : (PLUS | MINUS | MULT | DIV) ;

or

WS  : (' '|'\n'|'\t') ;

These describe sets of tokens and characters respectively that are easily optimized to simple, single, bit-sets rather than series of token and character comparisons.

Predicates

Semantic predicate. Semantics predicates are conditions that must be met at parse-time before parsing can continue past them. The functionality of semantic predicates is explained in more detail later. The syntax of a semantic predicate is a semantic action suffixed by a question operator:

{ expression }?

The expression must not have side-effects and must evaluate to true or false (boolean in Java or bool in C++). Since semantic predicates can be executed while guessing, they should not rely upon the results of actions or rule parameters.

Syntactic predicate. Syntactic predicates specify the lookahead language needed to predict an alternative. Syntactic predicates are explained in more detail later. The syntax of a syntactic predicate is a subrule with a => operator suffix:


( lookahead-language ) => production

Where the lookahead-language can be any valid ANTLR construct including references to other rules. Actions are not executed, however, during the evaluation of a syntactic predicate.

Element Labels

Any atomic or rule reference production element can be labeled with an identifier (case not significant). In the case of a labeled atomic element, the identifier is used within a semantic action to access the associated Token object or character. For example,

assign
    :   v:ID "=" expr ";"
        { System.out.println(
            "assign to "+v.getText()); }
    ;

No "$" operator is needed to reference the label from within an action as was the case with PCCTS 1.xx.

Inside actions a token reference can be accessed as label to acces the Token object, or as #label to access the AST generated for the token. The AST node constructed for a rule reference may be accessed from within actions as #label.

Labels on token references can also be used in association with parser exception handlers to specify what happens when that token cannot be matched.

Labels on rule references are used for parser exception handling so that any exceptions generated while executing the labeled rule can be caught.

EBNF Rule Elements

ANTLR supports extended BNF notation according to the following four subrule syntax / syntax diagrams:

( P1 | P2 | ... | Pn )
( P1 | P2 | ... | Pn )?
( P1 | P2 | ... | Pn )*
( P1 | P2 | ... | Pn )+

Interpretation Of Semantic Actions

Semantic actions are copied to the appropriate position in the output parser verbatim with the exception of AST action translation.

None of the $-variable notation from PCCTS 1.xx carries forward into ANTLR.

Semantic Predicates

A semantic predicate specifies a condition that must be met (at run-time) before parsing may proceed. We differentiate between two types of semantic predicates: (i) validating predicates that throw exceptions if their conditions are not met while parsing a production (like assertions) and (ii) disambiguating predicates that are hoisted into the prediction expression for the associated production.

Semantic predicates are syntactically semantic actions suffixed with a question mark operator:

{ semantic-predicate-expression }?

The expression may use any symbol provided by the programmer or generated by ANTLR that is visible at the point in the output the expression appears.

The position of a predicate within a production determines which type of predicate it is. For example, consider the following validating predicate (which appear at any non-left-edge position) that ensures an identifier is semantically a type name:

decl: "var" ID ":" t:ID
      { isTypeName(t.getText()) }?
    ;    

Validating predicates generate parser exceptions when they fail. The thrown exception is is of type SemanticException. You can catch this and other parser exceptions in an exception handler.

Disambiguating predicates are always the first element in a production because they cannot be hoisted over actions, token, or rule references. For example, the first production of the following rule has a disambiguating predicate that would be hoisted into the prediction expression for the first alternative:


stat:   // declaration "type varName;"
        {isTypeName(LT(1))}? ID ID ";"
    |   ID "=" expr ";"            // assignment
    ;

If we restrict this grammar to LL(1), it is syntactically nondeterministic because of the common left-prefix: ID. However, the semantic predicate correctly provides additional information that disambiguates the parsing decision. The parsing logic would be:

if ( LA(1)==ID && isTypeName(LT(1)) ) {
    match production one
}
else if ( LA(1)==ID ) {
    match production one
}
else error    

Formally, in PCCTS 1.xx, semantic predicates represented the semantic context of a production. As such, the semantic AND syntactic context (lookahead) could be hoisted into other rules. In ANTLR, predicates are not hoisted outside of their enclosing rule. Consequently, rules such as:

type : {isType(t)}? ID ;

are meaningless. On the other hand, this "semantic context" feature caused considerable confusion to many PCCTS 1.xx folks.

Syntactic Predicates

There are occasionally parsing decisions that cannot be rendered deterministic with finite lookahead. For example:

a   :   ( A )+ B
    |   ( A )+ C
    ;

The common left-prefix renders these two productions nondeterministic in the LL(k) sense for any value of k. Clearly, these two productions can be left-factored into:

a   :   ( A )+ (B|C)
    ;

without changing the recognized language. However, when actions are embedded in grammars, left-factoring is not always possible. Further, left-factoring and other grammatical manipulations do not result in natural (readable) grammars.

The solution is simply to use arbitrary lookahead in the few cases where finite LL(k) for k>1 is insufficient. ANTLR allows you to specify a lookahead language with possibly infinite strings using the following syntax:

( prediction block ) => production

For example, consider the following rule that distinguishes between sets (comma-separated lists of words) and parallel assignments (one list assigned to another):

stat:   ( list "=" )=> list "=" list
    |   list
    ;

If a list followed by an assignment operator is found on the input stream, the first production is predicted. If not, the second alternative production is attempted.

Syntactic predicates are a form of selective backtracking and, therefore, actions are turned off while evaluating a syntactic predicate so that actions do not have to be undone.

Syntactic predicates are implemented using exceptions in the target language if they exist. When generating C code, longjmp would have to be used.

We could have chosen to simply use arbitrary lookahead for any non-LL(k) decision found in a grammar. However, making the arbitrary lookahead explicit in the grammar is useful because you don't have to guess what the parser will be doing. Most importantly, there are language constructs that are ambiguous for which there exists no deterministic grammar! For example, the infamous if-then-else construct has no LL(k) grammar for any k. The following grammar is ambiguous and, hence, nondeterministic:


stat:   "if" expr "then" stat ( "else" stat )?
    |   ...
    ;

Given a choice between two productions in a nondeterministic decision, we simply choose the first one. This works out well is most situations. Forcing this decision to use arbitrary lookahead would simply slow the parse down.

Fixed depth lookahead and syntactic predicates

ANTLR cannot be sure what lookahead can follow a syntactic predicate (the only logical possibility is whatever follows the alternative predicted by the predicate, but erroneous input and so on complicates this), hence, ANTLR assumes anything can follow.  This situation is similar to the computation of lexical lookahead when it hits the end of the token rule definition.

Consider a predicate with a (...)* whose implicit exit branch forces a computation attempt on what follows the loop, which is the end of the syntactic predicate in this case.

class parse extends Parser;
a	:	(A (P)*) => A (P)*
	|	A
	;

The lookahead is artificially set to "any token" for the exit branch.   Normally, the P and the "any token" would conflict, but ANTLR knows that what you mean is to match a bunch of P tokens if they are present--no warning is generated.

If more than one path can lead to the end of the predicate in any one decision, ANTLR will generate a warning.  The following rule results in two warnings.

class parse extends Parser;
a	:	(A (P|)*) => A (P)*
	|	A
	;

The empty alternative can indirectly be the start of the loop and, hence, conflicts with the P.  Further, ANTLR detects the problem that two paths reach end of predicate.  The resulting parser will compile but never terminate the (P|)* loop.

The situation is complicated by k>1 lookahead.  When the nth lookahead depth reaches the end of the predicate, it records the fact and then code generation ignores the lookahead for that depth.

class parse extends Parser;
options {
	k=2;
}
a	:	(A (P B|P )*) => A (P)*
	|	A
	;

ANTLR generates a decision of the following form inside the (..)* of the predicate:

if ((LA(1)==P) && (LA(2)==B)) {
    match(P);
    match(B);
}
else if ((LA(1)==P) && (true)) {
    match(P);
}
else {
    break _loop4;
}

This computation works in all grammar types.

ANTLR Meta-Language Grammar

See antlr/antlr.g for the grammar that describes ANTLR input grammar syntax in ANTLR meta-language itself.

Version: $Id: //depot/code/org.antlr/release/antlr-2.7.7/doc/metalang.html#2 $ antlr-2.7.7/doc/csharp-runtime.html0000755000175000017500000001573210522211615017207 0ustar twernertwerner Notes for using the ANTLR C# Code Generator

C# Code Generator for ANTLR 2.x

Since the release of ANTLR 2.7.3, it has been possible to generate your Lexers, Parsers and TreeParsers in the ECMA-standard C# language developed by Microsoft. This feature extends the benefits of ANTLR's predicated-LL(k) parsing technology to applications and components running on the Microsoft .NET platform and, the Mono and dotGNU open-source C#/CLI platforms.

To be able to build and use the C# language Lexers, Parsers and TreeParsers, you will need to link to the ANTLR C# runtime library. The C# runtime model is based on the existing runtime models for Java and C++ and is thus immediately familiar. The C# runtime and the Java runtime in particular are very similar although there a number of subtle (and not so subtle) differences. Some of these result from differences in the respective runtime environments.

ANTLR C# support was contributed (and is maintained) by Kunle Odutola, Micheal Jordan and Anthony Oguntimehin.

Building the ANTLR C# Runtime

The ANTLR C# runtime source and build files are located in the lib/csharp subdirectory of the ANTLR distribution. This sub-directory is known as the ANTLR C# runtime directory. The first step in building the ANTLR C# runtime library is to ensure that ANTLR has been properly installed and built. This process is described in the ANTLR Installation Guide that comes with the distribution. Once ANTLR has been properly built, the ANTLR C# runtime can be built using any one of two distinct methods:

  • Using the Microsoft Visual Studio .NET development tool.

    A Visual Studio.NET solution file named antlr.net-runtime-2.7.<X>.sln is provided in the ANTLR C# runtime directory. This allows you to build the ANTLR C# runtime library and test it with a semi-complex grammar. The solution file references three Visual Studio .NET project files:

    • lib/csharp/src/antlr.runtime-2.7.<X>.csproj - for the ANTLR C# runtime library itself (where X is a version number),
    • lib/csharp/ASTFrame/antlr.astframe.csproj - for the ANTLR C# ASTFrame library (used for displaying ASTs) and,
    • examples/csharp/java/JavaParser.csproj - for the Java grammar project located within the ANTLR C# examples directory tree.

  • Using the freely available NAnt build tool.

    A build file named antlr.runtime.build is located in the ANTLR C# runtime directory. To build the ANTLR C# runtime, run

    nant build
    from a command shell in the ANTLR C# runtime directory. You can also run
    nant release
    nant docs
    to build a release version and documentation in lib/csharp/release.

All the example grammars located in the ANTLR C# examples directory - examples\csharp are also supplied with a NAnt build file. Once the ANTLR C# library has been built, you can test it by running

nant
from a command shell in any of the example directories.

Specifying Code Generation

You can instruct ANTLR to generate your Lexers, Parsers and TreeParsers using the C# code generator by adding the following entry to the global options section at the beginning of your grammar file.

{
    language  =  "CSharp";
}
After that things are pretty much the same as in the default java code generation mode. See the examples in examples/csharp for some illustrations.
  • TIP: If you are new to NAnt, ANTLR or the .NET platform, you might want to build your ANTLR projects with something similar to the NANT build files used for the C# examples. The build file for java example in particular also shows one way to automatically copy and reference both the antlr.runtime.dll and antlr.astframe.dll assemblies during your build.

C#-Specific ANTLR Options

  • header - specify additional using directives

    You can instruct the ANTLR C# code generator to include additional using directives in your generated Lexer/Parser/TreeParser by listing the directives within the header section which must be the first section at the beginning of your ANTLR grammar file. Please note that using directives are the only source code elements that can currently be safely included in the header section for C# code generation.

    header
    {
       using SymbolTable =  kunle.parser.SymbolTable;
       using kunle.compiler;
    }
    

  • namespace - specify an enclosing C# Namespace

    You can instruct the ANTLR C# code generator to place your Lexer/Parser/TreeParser in a specific C# namespace by adding a namespace option to either the global options section at the beginiing of your ANTLR grammar file or, to the grammar options section for individual Lexers/Parsers/TreeParsers.

    {
       namespace  =  "kunle.smalltalk.parser";
    }
    

A Template C# ANTLR Grammar File

header 
{
    // gets inserted in the C# source file before any
    // generated namespace declarations
    // hence -- can only be using directives
}

options {
    language  = "CSharp";
    namespace = "something";          // encapsulate code in this namespace
    classHeaderPrefix = "protected"; // use to specify access level for generated class
}

{
   // global code stuff that will be included in the source file just before the 'MyParser' class below
   ...
}
class MyParser extends Parser;
options {
   exportVocab=My;
}
{
   // additional methods and members for the generated 'MyParser' class
   ...
}

... generated RULES go here ...

{
   // global code stuff that will be included in the source file just before the 'MyLexer' class below
   ...
}
class MyLexer extends Lexer;
options {
   exportVocab=My;
}
{
   // additional methods and members for the generated 'MyParser' class
   ...
}

... generated RULES go here ...

{
   // global code stuff that will be included in the source file just before the 'MyTreeParser' class below
   ...
}
class MyTreeParser extends TreeParser;
options {
   exportVocab=My;
}
{
   // additional methods and members for the generated 'MyParser' class
   ...
}

... generated RULES go here ...

antlr-2.7.7/doc/hidden.stream.gif0000644000175000017500000000712310522211615016564 0ustar twernertwernerGIF89ayシシシフフフ トトトタタタ,y@Hーチ*\ネー。テ#J廩ア「ナ3jワ(Glータ紊C札ル仙G!人瑣蠡6ェ\IァE2}のYp'ヘィ$=9Sヲネ・7? オ#Q"スjbユ#テ~ルAユゥOur(モ餡イ Qコ4 mレ 2惻レ和゚梱ヨkTクc去l+、゚3y>セ橿ンテ#K゙+5+ユツ\:MJレス奬xツト+kヨ:∝ニy; ャロヒHsO>掫アkモ>ュ<ッロ゚▲KキNマヤウGル-v爰s>8b勒oキn>2t+ルC6?x糲wLxソ(烙芳焉&`w 6鞨2。ANh痳U!f鞦 「#防竕$「X曦ェ靱n)コ8漸ナ(#f,ョヨUQ6\ムX#就vd忘F9ワ隈イ4VHカ}ロ9!#yJ容p[rルeWエア、]@XUJ゙ヤd僵哦d3カキ瀦TNリb挌喝槞w'6 ィbQ蝣ィ55$「ハV゚」色*)utゥル、惑h・ 嫡柮レ」T樶ケ`僑2X*RKュZヤミ-ヌ}イヲユq6*]o。~WkfウY抃ヨン「W%粒ェa`ェi隕阻塰ュErヨァ[ホVym凩k鶲昿{h岷gョー^セヲォ「ネR(丙陞サスノセキ翦mスウサ「シgWシ&輒ソャ痣シ友:レツ:スァ゙s4ロ'トミbwォ{ヌ i狢ナンヲi};6羈gw+弘゙j1ネ4k並z椹ハィ#マ@doミDmtマF'ヘ。メL7=:;-uQ8ユMW=`ィX」ヨ旒"\ー浬w'ョ$航`クa喟コ孃羊オ+,怦スノ&o"%ミ、マpd邨4チ}クルZハ隍n爽{v桑[9R拍ウhb_n)ト5.z鬨洌:鬩Uァオ3,、ーケ邵邂{ワ>ナォnソ蛔~mマ'ェ,ツ|hイ霽茅蕣ラサ~f將テΙ2クマ~マハソQ?oX鼡ス榠z_,R,8「m7Fイ1f~タヲハ僂^綽テスVI~ヨ゙J#$栗゚z@ヒ栄〉ハ^コi及媽ン ヌ@~90aH荏セ9$_、ツ蚋a!M8ソ]j3ェ!&Dツ秦「 姪⊂ォ゙Hテロi-D モ气-ヤ!ーx仟ホ遂アY/r角債;」#G$首8Zk 6=8$メキリEtョ景l7「 Uウ[LP尨トN;ロI%e2M吝uh娯ルZIJ9 Tサリヘレ「IIb-$I+_キZレ椋T+sノヒ[、梁赳@フbモ漣;ヲ0c」フfC|ヲ4ァ'R壓エ」6ウノヘnns窃臟3ヌIホr嘖辧:ァ:ラノホvコ3p*^8輅ヘノ到$$ヤノO1龠ト'@ミ3譟Mhョ礫ェw:。ホ D'鷄ゥ 5メ<沸Q]Gマ&メj2」!ヨH7ァRェlヲチノ泥]唔qYL数モ4x i@。キモ0ァd! OwZ斂“"」UムノeヲJ⊥Uユマヲ:)蹤ァコ・アェ!オェ 厂ョレ ァRRMソ芥HJユx メRンェ,3.SThサ&2?R ィeソo-en ィbYチzセア ・ィf7[ヒ,糞ァg9劔エゥC鶻ヒレユBot%餓(KI[IノL%%ュnwヒロ゙sウソt'3渙「桃ョfy@Uu詭,hnモ。^AWxム ,ヌハFPイオhァr6%V着nフcj枦ニニ:カスYロt;hE昼}Wdcテ2結Y)5xミヨ1ッ`!鞜コW+F忤-ロ釘Mx。I,「,・R西濺b)悵#リ{Yケ2/キ\セ/サ`G蛇テ2vo磯XY χRユ;rKqニuュ娟搖9ファg駲(ァワdヒ莖}Pヲ鱸槭篝FYチ覿薦ケ#ウレクY 齷uL゚uナ13dLe2欹ナFユn倒%コオ\{ウk艷3/ナトイ゙ニ!>I]1#弟虎イ&ハトO豈R、徃7ッ属エGhGK8シm~、G&ソnzフVウbY蘭ヨオ1モj1sウュs躑 マノ\ゥュ5サ7サヘ&諮JBVK切ノ^ウケヤ 鍠_塹イ鳫}ヌ槌3カEシg]宜゚拯ケ−7踟リ&q杞 カ巒佩巾7トj>rSコ從T巛ロニ、 ;pサンマv窩qp}Vリuカ6n>晋ワwsサA挾ネ幣ヨ濔ケノsq[チz跿Y@ >|Sコ蟲R」}N{K゚寫%o゙ 鰕jミC6S豪KC企$Qキ) E"Z`WTリ3ク`,cMle險ーオc%ネ}攅」]xソ;レモ嶋N籔ヒAサォ膨k,dソ檐ニ[F|5窪^ヲ・シ縱&[ゥWy}S"維涵GオラXzヤハヲUオ骨ェf鉤嵐[ミ栲甦悽mgYヲ[ シネ?醍苫|{zシミ紹」ONMソリ_ミウマ}穃ソ璞ナテO~やシモO6 vvソA痂AK」'ォゐ゚ヌ'PpFd竇8 リM RNcxラT9「xR;トキn;3FG5 ィ;1チpASzノ早ニ'Fキ~x2ユ蠏;hLウネャB迭Vt.イx?ィンFdモuV'0SH*''ov|wC)軋|縱韆8)クH25*・8A4'%M%d36t`KサfS#」fシカ,y#トツ7ク8彙CH[E-eV&cHセ7<9HUネ%W$35テイh、イ覃カ4ukツの虍wxoワOAx9K&癖%(J^ナム[、篭]V[クkDVL帽j・%X核ネkYh釜r"Zx鏡8沿vヨVtホHRKィ[(Q&Xwsヨ2サ附 bI',借浪ラX,ァ刺ネ%X、e室鉦リ3ィ8xムOソニ州ク昇xY81アdHャ憂 剞ル )琢!Q21ンh{ zルzル霜aa密uヌ9w(饂'ゥ(ノy(ァ&薐xY7陶ラ6I78Y8ケ:ル93ル:イdq跳{F9;ッ・s」{ツw濡剩7sノGuTルNチU氷YKク父;antlr-2.7.7/doc/streams.html0000644000175000017500000007456610522211615015733 0ustar twernertwerner ANTLR Specification: Token Streams

Token Streams

Traditionally, a lexer and parser are tightly coupled objects; that is, one does not imagine anything sitting between the parser and the lexer, modifying the stream of tokens.   However, language recognition and translation can benefit greatly from treating the connection between lexer and parser as a token stream.  This idea is analogous to Java I/O streams, where you can pipeline lots of stream objects to produce highly-processed data streams.

Introduction

ANTLR identifies a stream of Token objects as any object that satisfies the TokenStream interface (prior to 2.6, this interface was called Tokenizer); i.e., any object that implements the following method.

Token nextToken();

Graphically, a normal stream of tokens from a lexer (producer) to a parser (consumer) might look like the following at some point during the parse.

lexer.to.parser.tokens.gif (3585 bytes)

The most common token stream is a lexer, but once you imagine a physical stream between the lexer and parser, you start imagining interesting things that you can do.  For example, you can:

  • filter a stream of tokens to strip out unwanted tokens
  • insert imaginary tokens to help the parser recognize certain nasty structures
  • split a single stream into multiple streams, sending certain tokens of interest down the various streams
  • multiplex multiple token streams onto one stream, thus, "simulating" the lexer states of tools like PCCTS, lex, and so on.

The beauty of the token stream concept is that parsers and lexers are not affected--they are merely consumers and producers of streams.  Stream objects are filters that produce, process, combine, or separate token streams for use by consumers.   Existing lexers and parsers may be combined in new and interesting ways without modification.

This document formalizes the notion of a token stream and describes in detail some very useful stream filters.

Pass-Through Token Stream

A token stream is any object satisfying the following interface.

public interface TokenStream {
  public Token nextToken()
    throws java.io.IOException;
}

For example, a "no-op" or pass-through filter stream looks like:

import antlr.*;
import java.io.IOException;

class TokenStreamPassThrough
    implements TokenStream {
  protected TokenStream input;

  /** Stream to read tokens from */
  public TokenStreamPassThrough(TokenStream in) {
    input = in;
  }

  /** This makes us a stream */
  public Token nextToken() throws IOException {
    return input.nextToken(); // "short circuit"
  }
}

You would use this simple stream by having it pull tokens from the lexer and then have the parser pull tokens from it as in the following main() program.

public static void main(String[] args) {
  MyLexer lexer =
    new MyLexer(new DataInputStream(System.in));
  TokenStreamPassThrough filter =
    new TokenStreamPassThrough(lexer);
  MyParser parser = new MyParser(filter);
  parser.startRule();
}

Token Stream Filtering

Most of the time, you want the lexer to discard whitespace and comments, however, what if you also want to reuse the lexer in situations where the parser must see the comments?  You can design a single lexer to cover many situations by having the lexer emit comments and whitespace along with the normal tokens.  Then, when you want to discard whitespace, put a filter between the lexer and the parser to kill whitespace tokens.

ANTLR provides TokenStreamBasicFilter for such situations.  You can instruct it to discard any token type or types without having to modify the lexer.  Here is an example usage of TokenStreamBasicFilter that filters out comments and whitespace.

public static void main(String[] args) {
  MyLexer lexer =
    new MyLexer(new DataInputStream(System.in));
  TokenStreamPassThrough filter =
    new TokenStreamPassThrough(lexer);
  filter.discard(MyParser.WS);
  filter.discard(MyParser.COMMENT);
  MyParser parser = new MyParser(filter);
  parser.startRule();
}

Note that it is more efficient to have the lexer immediately discard lexical structures you do not want because you do not have to construct a Token object.  On the other hand, filtering the stream leads to more flexible lexers.

Token Stream Splitting

Sometimes you want a translator to ignore but not discard portions of the input during the recognition phase.   For example, you want to ignore comments vis-a-vis parsing, but you need the comments for translation.   The solution is to send the comments to the parser on a hidden token stream--one that the parser is not "listening" to.  During recognition, actions can then examine the hidden stream or streams, collecting the comments and so on.  Stream-splitting filters are like prisms that split white light into rainbows.

The following diagram illustrates a situation in which a single stream of tokens is split into three.

stream.splitter.gif (5527 bytes)

You would have the parser pull tokens from the topmost stream.

There are many possible capabilities and implementations of a stream splitter.   For example, you could have a "Y-splitter" that actually duplicated a stream of tokens like a cable-TV Y-connector.  If the filter were thread-safe and buffered, you could have multiple parsers pulling tokens from the filter at the same time.

This section describes a stream filter supplied with ANTLR called TokenStreamHiddenTokenFilter that behaves like a coin sorter, sending pennies to one bin, dimes to another, etc...  This filter splits the input stream into two streams, a main stream with the majority of the tokens and a hidden stream that is buffered so that you can ask it questions later about its contents.   Because of the implementation, however, you cannot attach a parser to the hidden stream.  The filter actually weaves the hidden tokens among the main tokens as you will see below.

Example

Consider the following simple grammar that reads in integer variable declarations.

decls: (decl)+
     ;
decl : begin:INT ID end:SEMI
     ; 

Now assume input:

int n; // list length
/** doc */
int f;

Imagine that whitespace is ignored by the lexer and that you have instructed the filter to split comments onto the hidden stream.  Now if the parser is pulling tokens from the main stream, it will see only "INT ID SEMI FLOAT ID SEMI" even though the comments are hanging around on the hidden stream.  So the parser effectively ignores the comments, but your actions can query the filter for tokens on the hidden stream.

The first time through rule decl, the begin token reference has no hidden tokens before or after, but

filter.getHiddenAfter(end)

returns a reference to token

// list length

which in turn provides access to

/** doc */

The second time through decl

filter.getHiddenBefore(begin)

refers to the

/** doc */

comment.

Filter Implementation

The following diagram illustrates how the Token objects are physically weaved together to simulate two different streams.

hidden.stream.gif (3667 bytes)

 

As the tokens are consumed, the TokenStreamHiddenTokenFilter object hooks the hidden tokens to the main tokens via linked list.  There is only one physical TokenStream of tokens emanating from this filter and the interweaved pointers maintain sequence information.

Because of the extra pointers required to link the tokens together, you must use a special token object called CommonHiddenStreamToken (the normal object is called CommonToken).   Recall that you can instruct a lexer to build tokens of a particular class with

lexer.setTokenObjectClass("classname");

Technically, this exact filter functionality could be implemented without requiring a special token object, but this filter implementation is extremely efficient and it is easy to tell the lexer what kind of tokens to create.  Further, this implementation makes it very easy to automatically have tree nodes built that preserve the hidden stream information.

This filter affects the lazy-consume of ANTLR.  After recognizing every main stream token, the TokenStreamHiddenTokenFilter must grab the next Token to see if it is a hidden token. Consequently, the use of this filter is not be very workable for interactive (e.g., command-line) applications.

How To Use This Filter

To use TokenStreamHiddenTokenFilter, all you have to do is:

  • Create the lexer and tell it to create token objects augmented with links to hidden tokens.
MyLexer lexer = new MyLexer(some-input-stream);
lexer.setTokenObjectClass(
  "antlr.CommonHiddenStreamToken"
);
  • Create a TokenStreamHiddenTokenFilter object that pulls tokens from the lexer.
TokenStreamHiddenTokenFilter filter =
  new TokenStreamHiddenTokenFilter(lexer);
  • Tell the TokenStreamHiddenTokenFilter which tokens to hide, and which to discard.  For example,
filter.discard(MyParser.WS);
filter.hide(MyParser.SL_COMMENT);
  • Create a parser that pulls tokens from the TokenStreamHiddenTokenFilter rather than the lexer.
MyParser parser = new MyParser(filter);
try {
  parser.startRule(); // parse as usual
}
catch (Exception e) {
  System.err.println(e.getMessage());
}

See the ANTLR fieldguide entry on preserving whitespace for a complete example.

Tree Construction

Ultimately, hidden stream tokens are needed during the translation phase, which normally means while tree walking.  How do we pass the hidden stream info to the translator without mucking up the tree grammar?  Easy: use AST nodes that save the hidden stream tokens.  ANTLR defines CommonASTWithHiddenTokens for you that hooks the hidden stream tokens onto the tree nodes automatically; methods are available to access the hidden tokens associated with a tree node.  All you have to do is tell the parser to create nodes of this node type rather than the default CommonAST.

parser.setASTNodeClass("antlr.CommonASTWithHiddenTokens");

Tree nodes are created as functions of Token objects.  The initialize() method of the tree node is called with a Token object when the ASTFactory creates the tree node.  Tree nodes created from tokens with hidden tokens before or after will have the same hidden tokens.  You do not have to use this node definition, but it works for many translation tasks:

package antlr;

/** A CommonAST whose initialization copies
 *  hidden token information from the Token
 *  used to create a node.
 */
public class CommonASTWithHiddenTokens
  extends CommonAST {
  // references to hidden tokens
  protected Token hiddenBefore, hiddenAfter;

  public CommonHiddenStreamToken getHiddenAfter() {
    return hiddenAfter;
  }
  public CommonHiddenStreamToken getHiddenBefore() {
    return hiddenBefore;
  }
  public void initialize(Token tok) {
    CommonHiddenStreamToken t =
      (CommonHiddenStreamToken)tok;
    super.initialize(t);
    hiddenBefore = t.getHiddenBefore();
    hiddenAfter  = t.getHiddenAfter();
  }
}

Notice that this node definition assumes that you are using CommonHiddenStreamToken objects.  A runtime class cast except occurs if you do not have the lexer create CommonHiddenStreamToken objects.

Garbage Collection Issues

By partitioning up the input stream and preventing hidden stream tokens from referring to main stream tokens, GC is allowed to work on the Token stream. In the integer declaration example above, when there are no more references to the first SEMI token and the second INT token, the comment tokens are candidates for garbage collection.  If all tokens were linked together, a single reference to any token would prevent GC of any tokens.  This is not the case in ANTLR's implementation.

Notes

This filter works great for preserving whitespace and comments during translation, but is not always the best solution for handling comments in situations where the output is very dissimilar to the input.  For example, there may be 3 comments interspersed within an input statement that you want to combine at the head of the output statement during translation.  Rather than having to ask each parsed token for the comments surrounding it, it would be better to have a real, physically-separate stream that buffered the comments and a means of associating groups of parsed tokens with groups of comment stream tokens.  You probably want to support questions like "give me all of the tokens on the comment stream that originally appeared between this beginning parsed token and this ending parsed token."

This filter implements the exact same functionality as JavaCC's special tokens.  Sriram Sankar (father of JavaCC) had a great idea with the special tokens and, at the 1997 Dr. T's Traveling Parsing Revival and Beer Tasting Festival, the revival attendees extended the idea to the more general token stream concept.  Now, the JavaCC special token functionality is just another ANTLR stream filter with the bonus that you do not have to modify the lexer to specify which tokens are special.

Token Stream Multiplexing (aka "Lexer states")

Now, consider the opposite problem where you want to combine multiple streams rather than splitting a single stream.  When your input contains sections or slices that are radically diverse such as Java and JavaDoc comments, you will find that it is hard to make a single lexer recognize all slices of the input.  This is primarily because merging the token definitions of the various slices results in an ambiguous lexical language or allows invalid tokens.  For example, "final" may be a keyword in one section, but an identifier in another.  Also, "@author" is a valid javadoc tag within a comment, but is invalid in the surrounding Java code.

Most people solve this problem by having the lexer sit in one of multiple states (for example, "reading Java stuff" vs "reading JavaDoc stuff").  The lexer starts out in Java mode and then, upon "/**", switches to JavaDoc mode; "*/" forces the lexer to switch back to Java mode.

Multiple Lexers

Having a single lexer with multiple states works, but having multiple lexers that are multiplexed onto the same token stream solves the same problem better because the separate lexers are easier to reuse (no cutting and pasting into a new lexer--just tell the stream multiplexor to switch to it).  For example, the JavaDoc lexer could be reused for any language problem that had JavaDoc comments.

ANTLR provides a predefined token stream called TokenStreamSelector that lets you switch between multiple lexers.  Actions in the various lexers control how the selector switches input streams.  Consider the following Java fragment.

/** Test.
 *  @author Terence
 */
int n;

Given two lexers, JavaLexer and JavaDocLexer, the sequence of actions by the two lexers might look like this:

JavaLexer: match JAVADOC_OPEN, switch to JavaDocLexer
JavaDocLexer: match AUTHOR
JavaDocLexer: match ID
JavaDocLexer: match JAVADOC_CLOSE, switch back to JavaLexer
JavaLexer: match INT
JavaLexer: match ID
JavaLexer: match SEMI

In the Java lexer grammar, you will need a rule to perform the switch to the JavaDoc lexer (recording on the stack of streams the "return lexer"):

JAVADOC_OPEN
    :    "/**" {selector.push("doclexer");}
    ;

Similarly, you will need a rule in the JavaDoc lexer to switch back:

JAVADOC_CLOSE
    :    "*/" {selector.pop();}
    ;

The selector has a stack of streams so the JavaDoc lexer does not need to know who invoked it.

Graphically, the selector combines the two lexer streams into a single stream presented to the parser.

stream.selector.gif (5976 bytes)

The selector can maintain of list of streams for you so that you can switch to another input stream by name or you can tell it to switch to an actual stream object.

public class TokenStreamSelector implements TokenStream {
  public TokenStreamSelector() {...}
  public void addInputStream(TokenStream stream,
    String key) {...}
  public void pop() {...}
  public void push(TokenStream stream) {...}
  public void push(String sname) {...}
  /** Set the stream without pushing old stream */
  public void select(TokenStream stream) {...}
  public void select(String sname)
    throws IllegalArgumentException {...}
}

Using the selector is easy:

  • Create a selector.
TokenStreamSelector selector =
  new TokenStreamSelector();
  • Name the streams (don't have to name--you can use stream object references instead to avoid the hashtable lookup on each switch).
selector.addInputStream(mainLexer, "main");
selector.addInputStream(doclexer, "doclexer");
  • Select which lexer reads from the char stream first.
// start with main java lexer
selector.select("main");
  • Attach your parser to the selector instead of one of the lexers.
JavaParser parser = new JavaParser(selector);

Lexers Sharing Same Character Stream

Before moving on to how the parser uses the selector, note that the two lexers have to read characters from the same input stream.  Prior to ANTLR 2.6.0, each lexer had its own line number variable, input char stream variable and so on.  In order to share the same input state, ANTLR 2.6.0 factors the portion of a lexer dealing with the character input into an object, LexerSharedInputState, that can be shared among n lexers (single-threaded).  To get multiple lexers to share state, you create the first lexer, ask for its input state object, and then use that when constructing any further lexers that need to share that input state:

// create Java lexer
JavaLexer mainLexer = new JavaLexer(input);
// create javadoc lexer; attach to shared
// input state of java lexer
JavaDocLexer doclexer =
  new JavaDocLexer(mainLexer.getInputState());

Parsing Multiplexed Token Streams

Just as a single lexer may have trouble producing a single stream of tokens from diverse input slices or sections, a single parser may have trouble handling the multiplexed token stream.  Again, a token that is a keyword in one lexer's vocabulary may be an identifier in another lexer's vocabulary.  Factoring the parser into separate subparsers for each input section makes sense to handle the separate vocabularies as well as for promoting grammar reuse.

The following parser grammar uses the main lexer token vocabulary (specified with the importVocab option) and upon JAVADOC_OPEN it creates and invokes a JavaDoc parser to handle the subsequent stream of tokens from within the comment.

class JavaParser extends Parser;
options {
    importVocab=Java;
}

input
    :   ( (javadoc)? INT ID SEMI )+
    ;

javadoc
    :   JAVADOC_OPEN
        {
        // create a parser to handle the javadoc comment
        JavaDocParser jdocparser =
          new JavaDocParser(getInputState());
        jdocparser.content(); // go parse the comment
        }
        JAVADOC_CLOSE
    ;

You will note that ANTLR parsers from 2.6.0 also share token input stream state.   When creating the "subparser", JavaParser tells it to pull tokens from the same input state object.

The JavaDoc parser matches a bunch of tags:

class JavaDocParser extends Parser;
options {
    importVocab=JavaDoc;
}

content
    :   (   PARAM // includes ID as part of PARAM
        |   EXCEPTION
        |   AUTHOR
        )*
    ;

When the subparser rule content finishes, control is naturally returned to the invoking method, javadoc, in the Java parser.

The Effect of Lookahead Upon Multiplexed Token Streams

What would happen if the parser needed to look two tokens ahead at the start of the JavaDoc comment?  In other words, from the perspective of the main parser, what is the token following JAVADOC_OPEN?   Token JAVADOC_CLOSE, naturally!  The main parser treats any JavaDoc comment, no matter how complicated, as a single entity; it does not see into the token stream of the comment nor should it--the subparser handles that stream.

What is the token following the content rule in the subparser?  "End of file".  The analysis of the subparser cannot determine what random method will call it from your code.  This is not an issue because there is normally a single token that signifies the termination of the subparser.  Even if EOF gets pulled into the analysis somehow, EOF will not be present on the token stream.

Multiple Lexers Versus Calling Another Lexer Rule

Multiple lexer states are also often used to handle very complicated single   tokens such as strings with embedded escape characters where input "\t" should not be allowed outside of a string.  Typically, upon the initial quote, the lexer switches to a "string state" and then switches back to the "normal state" after having matched the guts of the string.

So-called "modal" programming, where your code does something different depending on a mode, is often a bad practice.  In the situation of complex tokens, it is better to explicity specify the complicated token with more rules.  Here is the golden rule of when to and when not to use multiplexed token streams:

Complicated single tokens should be matched by calling another (protected) lexer rule whereas streams of tokens from diverse slices or sections should be handled by different lexers multiplexed onto the same stream that feeds the parser.

For example, the definition of a string in a lexer should simply call another rule to handle the nastiness of escape characters:

STRING_LITERAL
    :    '"' (ESC|~('"'|'\\'))* '"'
    ;

protected // not a token; only invoked by another rule.
ESC
    :    '\\'
        (    'n'
        |    'r'
        |    't'
        |    'b'
        |    'f'
        |    '"'
        |    '\''
        |    '\\'
        |    ('u')+
             HEX_DIGIT HEX_DIGIT HEX_DIGIT HEX_DIGIT 
        ...
       )
    ;

TokenStreamRewriteEngine Easy Syntax-Directed Translation

There are many common situations where you want to tweak or augment a program or data file. ANTLR 2.7.3 introduced a (Java/C# versions only) a very simple but powerful TokenStream targeted at the class of problems where:
  1. the output language and the input language are similar
  2. the relative order of language elements does not change
See the
Syntax Directed TokenStream Rewriting article on the antlr website.

The Future

The ANTLR 2.6 release provides the basic structure for using token streams--future versions will be more sophisticated once we have experience using them.

The current "hidden token" stream filter clearly solves the "ignore but preserve whitespace" problem really well, but it does not handle comments too well in most situations.  For example, in real translation problems you want to collect comments at various single tree nodes (like DECL or METHOD) for interpretation rather than leaving them strewn throughout the tree.  You really need a stream splitter that buffers up the comments on a separate stream so you can say "give me all comments   consumed during the recognition of this rule" or "give me all comments found between these two real tokens." That is almost certainly something you need for translation of comments.

Token streams will lead to fascinating possibilities.  Most folks are not used to thinking about token streams so it is hard to imagine what else they could be good for.  Let your mind go wild.  What about embedded languages where you see slices (aspects) of the input such as Java and SQL (each portion of the input could be sliced off and put through on a different stream).  What about parsing Java .class files with and without debugging information?  If you have a parser for .class files without debug info and you want to handle .class files with debug info, leave the parser alone and augment the lexer to see the new debug structures.  Have a filter split the debug tokens of onto a different stream and the same parser will work for both types of .class files.

Later, I would like to add "perspectives", which are really just another way to look at filters.  Imagine a raw stream of tokens emanating from a lexer--the root perspective.  I can build up a tree of perspectives very easily from there.  For example, given a Java program with embedded SQL, you might want multiple perspectives on the input stream for parsing or translation reasons:

stream.perspectives.gif (2679 bytes)

You could attach a parser to the SQL stream or the Java stream minus comments, with actions querying the comment stream.

In the future, I would also like to add the ability of a parser to generate a stream of tokens (or text) as output just like it can build trees now.  In this manner, multipass parsing becomes a very natural and simple problem because parsers become stream producers also.  The output of one parser can be the input to another.

Version: $Id: //depot/code/org.antlr/release/antlr-2.7.7/doc/streams.html#2 $ antlr-2.7.7/doc/j-guru-blue.jpg0000644000175000017500000001762610522211615016221 0ustar twernertwernerリJFIFHH ヤPhotoshop 3.08BIMHH8BIM x8BIM8BIM 8BIM' 8BIMH/fflff/ff。劒2Z5-8BIMp8BIM@@8BIM 8BIM DFPヤB@(リJFIFHH&File written by Adobe Photoshopィ 5.2Adobedロ            タPF"ント?   3!1AQa"q2贈アB#$Rチb34rびC%担碵cs5「イ&D典dEツ」t6メU稙ウテモu胚F'筈エ米ヤ蔬・オナユ襄Vfv岬カニヨ踐7GWgw立キヌラ銷5!1AQaq"2¢。アB#チRム3$b疵rCScs4%「イ&5ツメD典」dEU6te糘ウテモu胚F筈エ米ヤ蔬・オナユ襄Vfv岬カニヨ踐'7GWgw立キヌレ ?T棚%"セ6レ獰7歎Wヨ, n(キホョ?ユャY.~[qヲXムwクmY創槧6サワ゚|~沛モ麋O!リ!ホs併G,cヌdテ位BG`ンI$6ロメU哇;+ヲ[Nガ`タ0\?9尠壻zヌVハォfGVセセヘニテgモ{マココレラ 文カ;」スリモ]zュ未ィルユn泰1z仄>計 Cマモミモカサ誉燥薗ンケイ磬カユ.;・}mユ*リ゚Wォ゚+ア=@ン?」リコ キe箚モf--8h踴e遣ロ侯ヒ磯艾cソ娥゚U懸6|朽壼c「彜4bTヌエsキ9]M"bu繿ミネォクcリ}稽ヲォォuVエ>キsJl|zqェm442カフ4y敝=ツH#秘ゥRI$帖モセセVヌPサ膨ケ<踴壟シ闘カLッ窯uク元-キI&nン~ヲサbッOッ]☆ミ:NCウiォ.イNヌиoセァ"マq]オNャゥクスュメフ縮掵GPラ=サklgJスM椚o「Juo{sコOォモ2ルV|噬ミ"XvXララアyU{ャユ3:敞ハE6ア「イニ擯w"ソ゚]}*カコ弯ァヤアq;アjシ ネ?Iサv-ョ穣3児待6粢卻mホ/゚ヲヘ~擬樋]_-ケ_竡シ嬾nn-iwサユkォュ゙鰡黨5ヒロァ[}]フヲヌラ.オ8オIシケサレCソ1郷ァWOャ尋ュカゥ%エ>Mm'Xラキ埆麼oIニ靹ミ≧f=Mア犇停Iクリ恍圄/Eタッ5漆ーoス=゙ョ聞R轉.ュロソqs゚窈*ラ3ャ濺Xmamq;A+イィンメkュオヨレルX[7ヤ?ナ゚ユ|」eS倉.クムk'9ャ輙IO暉エソ=Fノニnf7さヤ+a狸メィO+ワ|ヨア5鱒[スjセ副峺ニ$帖ル8BIM XICC_PROFILE HLinomntrRGB XYZ ホ 1acspMSFTIEC sRGBヨモ-HP cprtP3desclwtptbkptrXYZgXYZ,bXYZ@dmndTpdmddトvuedLviewヤ$lumimeas $tech0 rTRC< gTRC< bTRC< textCopyright (c) 1998 Hewlett-Packard CompanydescsRGB IEC61966-2.1sRGB IEC61966-2.1XYZ QフXYZ XYZ o「8森YZ bキレXYZ $カマdescIEC http://www.iec.chIEC http://www.iec.chdesc.IEC 61966-2.1 Default RGB colour space - sRGB.IEC 61966-2.1 Default RGB colour space - sRGBdesc,Reference Viewing Condition in IEC61966-2.1,Reference Viewing Condition in IEC61966-2.1view、_.マフ \XYZ L VPW輓eassig CRT curv #(-27;@EJOTY^chmrw|、ゥョイキシチニヒミユロ %+28>ELRY`gnu|。ゥアケチノムル &/8AKT]gqz「ャカチヒユ !-8COZfr~「ョコヌモ -;HUcq~ィカトモ +:IXgwヲオナユ'7HYj{ッタム+=Oatャソメ 2FZnェセメ  % : O d y 、 コ マ  ' = T j ョ ナ ワ " 9 Q i ー ネ  * C \ u ァ タ ル & @ Z t ゥ テ ゙ .Idカメ %A^zウマ &Ca~ケラ1Omェノ&Ed」テ#Cc、ナ'Ijュホ4Vxス&IlイヨAeョメ@eッユ Ekキン*Qwナ;cイレ*R{」フGpテ@jセ>iソ  A l ト !!H!u!。!ホ!"'"U""ッ"ン# #8#f##ツ#$$M$|$ォ$レ% %8%h%%ヌ%&'&W&&キ&''I'z'ォ'ワ( (?(q(「(ヤ))8)k))ミ**5*h**マ++6+i++ム,,9,n,「,ラ- -A-v-ォ-..L..キ./$/Z//ヌ/050l0、0ロ11J11コ12*2c22ヤ3 3F33ク34+4e44リ55M55ツ5676r6ョ67$7`77ラ88P88ネ99B99シ9:6:t:イ:;-;k;ェ;<' >`>>?!?a?「?磧#@d@ヲ@蹉)AjAャAB0BrBオBC:C}CタDDGD灰ホEEUE哘゙F"FgFォFG5G{GタHHKH践ラIIcIゥIJ7J}JトK KSK哮祗*LrLコMMJM溺ワN%NnNキOOIO徹ンP'PqPサQQPQ娉覲1R|RヌSS_SェSTBT週ロU(UuUツVV\VゥVWDW淡濆/X}XヒYYiYクZZVZヲZ[E[票藹5\\ヨ]']x]ノ^^l^ス__a_ウ``W`ェ`aOa「abIb彙cCc幼d@d播馥=e弾軫=f断鑒=g堵馼?h防iCi喨jHj殍kOkァklWlッmm`mケnnknトooxoムp+pp瀲:q敏rKrヲss]sクttptフu(uu疱>v孥wVwウxxnxフy*y謁轉Fz・{{c{ツ|!|−痾A}。~~b~ツ#蛟Gィ 〔∀0rジコо繞Gォrラ;汕i夷3甥嚇缶0許屈慶弧1腰伺斜6償刃蹟?捉築偵溺童 矧簸廟碧4漫 蓉伶廊从$剞咨喇墸妝岷怏戈拇旆曄椣沂滯iリ。G。カ「&「魅」v」讀V、ヌ・8・ゥヲヲ協ァnァ爲Rィトゥ7ゥゥェェ将ォuォ鬯\ャミュDュクョ-ョ。ッッ恐ーuーア`アヨイKイツウ8ウョエ%エ愀オ憾カyカキhキ犧YクムケJケツコ;コオサ.サァシ!シ嵜ス樟 セ╂ソzソタpタチgチ翊_ツロテXテヤトQトホナKナネニFニテヌAヌソネ=ネシノ:ノケハ8ハキヒ6ヒカフ5フオヘ5ヘオホ6ホカマ7マクミ9ミコム<ムセメ?メチモDモニヤIヤヒユNユムヨUヨリラ\ラ獵dリ鞁lルレvレロワワ岸ン尤゙「゚)゚ッ6狄瓮睥禊籟緜肭舖蕈蜆 譁遨2霈颶鰔鷦褄p懣(エ@フX褥r候ァ4ツP゙m旗ィ8ヌW鍄w)コKワm&File written by Adobe Photoshopィ 5.2Adobedロ         ""   タPFン ト「  s!1AQa"q2贈アB#チRム3b$rん%C4S弔イcsツ5D'謄ウ6Tdtテメ& 忍F、エVモU(胚トヤ蔬eu払オナユ襄fv岬カニヨ踐7GWgw立キヌラ銷8HXhx乖クネリ顏)9IYiy甥ゥケノル鴿*:JZjz噛ェコハレm!1AQa"q¢2。アチム#BRbr3$4C担%「cイツsメ5礇T &6E'dtU7」ウテ()モ胚筈エトヤ蔬eu払オナユ襄FVfv岬カニヨ踐GWgw立キヌラ銷8HXhx乖クネリ顏9IYiy甥ゥケノル鴿*:JZjz噛ェコハレレ ?リォアU+笈ュ「if`アッR A nフラ2カ"O輓リg2cケクヘ|神、5Wセア%}癈熄s)+Cッ麪0禝Uニ@Wナノ>)モ3Xq>(マk傚1亶C 団文サミリォアV)諚ァ{ユオュ#On螺;[鋸IUイq^l噫dvz|r@\(3S ケ1舎F。 」2リ嬖丶 3<,ムキ菰癈1ータ,=Xc又譯ゥフ(ケタリTタムリェ栽トvミシ1ニ*N, "ロフ1ネHn穉┗^ウヒ/7エP喙uァ^iYVア賎ツr靂IィトhSdN モ"`ネM #ヘw/Tサ條r\旁;_/、nオS1/Mソ卅譛ェYlミl*ワヲ.ーキ$lMッ泪+ノワsエロ*lメリォナモAァA4Yヲ ヌリ+53#N.N>ミc嘲カ、ミソ?糞*Bニ{'VV冑」洸r續MN+NQ(JEセ2 VュョMz8鰥タ?eーテツラ<廩/゚4~`オE;JZ6ク簣,」メテゥ=0モリォ椦%岩!ハXL九ヌf'.テ>4謐巓^ 1zfypy&曽ォSァ ュ_ラS゚V5メ挫ユ橿wヲ昿ー訳X7?sjヲ際斈*ソ9N「T)サ,ロメホヤリェモ烝PX/<猛ネレO膃Km^Iニ劔=m'&。:\レハc瓢)ェク*v%ウ(n笶突sC[eK0ノqァゥリク?疵ゥ缸[a5クJ/o~ウpEロト~ト(I篆セ&甸ム9オ菖ウ|ソY怡Rチ~).eロ閑セ'ハfl#'・阮レmカ i、5ュ*イu,i滌ョYッ伯漉:1 'ユリェQ觴]BI 9ワ5 禳Gレ@ルf9-レD袖画 MUg゙ユC ΑキPx蓋粐゚オ9イD$ 9ケアウサ^7PG2H。ソ禳 Y・ュ/.1ゥー丈 +e,サリxQDレ鏤5。蚩e0鞋Ao&葮9e逗鄭 43 xワQ微M[[Ak チn8直T{曩ッセ*ォ漢ヨクォラ5ォソ(Iuャ7Xー栖ZX・yネ蔗5V#Xラノ面Q#aサTc w(]ホ~~ユ秕Bラハゥ1(スK・蓋Cスc@褞渤|ゥオ彿ラュ{i[ノi+゙[フ(霤f_ィ6璧ェ+j「エョi゚w%ゥF*゙*ヨ*ラ?促'Y*RルB~%`Gセ*qnネセOシ惷カI-籤T擢3入+pナ^pィhニ]&$An。"シ|?ノナ^1q=wフ_目ナォpgサーセA2"ニQOoDマ幹ヘjo?%」サV疝1o禁W6ォヒヤjリォ ト~UZ d幾O2 ёY8ア愆Rzg?/4->;K臾/フ`ワ\<7ュ'VhYス沙}ォ晶蕪^モョef0ヘKyキ}H茴} マc>]銑柤ノ[5スアEマヤ紕ッリセ1Wミ輩Uu霊シサurラハa"ッv9_'"EOノ寒Ul6゙Fウスoヌツs ワ+H7想=X゚LU-日 Notes for using the ANTLR Python Code Generator

Python Code Generator for ANTLR 2.7.7

As of ANTLR 2.7.5, you can generate your Lexers, Parsers and TreeParsers in Python. This feature extends the benefits of ANTLR's predicated-LL(k) parsing technology to the Python language and platform.

To be able to build and use the Python language Lexers, Parsers and TreeParsers, you will need to have the ANTLR Python runtime library installed in your Python path. The Python runtime model is based on the existing runtime model for Java and is thus immediately familiar. The Python runtime and the Java runtime are very similar although there a number of subtle (and not so subtle) differences. Some of these result from differences in the respective runtime environments.

ANTLR Python support was contributed (and is to be maintained) by Wolfgang Haefelinger and Marq Kole.

Building the ANTLR Python Runtime

The ANTLR Python runtime source and build files are completely integrated in the ANTLR build process.The ANTLR runtime support module for Python is located in the lib/python subdirectory of the ANTLR distribution. Installation of the Python runtime support is enabled automatically if Python can be found on your system by the configure script.

With Python support enabled the current distribution will look for the presence of a python executable of version 2.2 or higher. If it has found such a beast, it will generate and install the ANTLR Python runtime as part of the overall ANTLR building and installation process.

If the python distribution you are using is at an unusual location, perhaps because you are using a local installation instead of a system-wide one, you can provide the location of that python executable using the --with-python=<path> option for the configure script, for instance:

./configure --with-python=$HOME/bin/python2.3

Also, if the python executable is at a regular location, but has a name that differs from "python", you can specify the correct name through the --with-python=<path>, as shown above, or through environment variable $PYTHON

PYTHON=python2.3
export PYTHON
./configure

All the example grammars for the ANTLR Python runtime are built when ANTLR itself is built. They can be run in one go by running make test in the same directory where you ran the configure script in the ANTLR distribution. So after you've run configure you can do:

# Build ANTLR and all examples
make
# Run them
make test
# Install everything
make install

Note that make install will not add the ANTLR Python runtime (i.e. antlr.py) to your Python installation but rather install antlr.py in ${prefix}/lib. To be able to use antlr.py you would need to adjust Python's sys.path.

However, there a script is provided that let's you easily add antlr.py as module to your Python installation. After installation just run

${prefix}/sbin/pyantlr.sh install

Note that usually you need to be superuser in order to succeed. Also note that you can run this command later at any time again, for example, if you have a second Python installation etc. Just make sure that python is in your $PATH when running pyantlr.sh.

Note further that you can also do this to install ANTLR Python runtime immediatly after having called ./configure:

scripts/pyantlr.sh install

Specifying Code Generation

You can instruct ANTLR to generate your Lexers, Parsers and TreeParsers using the Python code generator by adding the following entry to the global options section at the beginning of your grammar file.

{
    language="Python";
}

After that things are pretty much the same as in the default java code generation mode. See the examples in examples/python for some illustrations.

One particular issue that is worth mentioning is the handling of comments in ANTLR Python. Java, C++, and C# all use the same lexical structures to define comments: // for single-line comments, and /* ... */ for block comments. Unfortunately, Python does not handle comments this way. It only knows about single-line comments, and these start off with a # symbol.

Normally, all comments outside of actions are actually comments in the ANTLR input language. These comments, and that is both block comments and single-line comments are translated into Python single-line comments.

Secondly, all comments inside actions should be comments in the target language, Python in this case. Unfortunately, if the actions contain ANTLR actions, such as $getText, the code generator seems to choke on Python comments as the # sign is also used in tree construction. The solution is to use Java/C++-style comments in all actions; these will be translated into Python comments by the ANTLR as it checks these actions for the presence of predefined action symbols such as $getText.

So, as a general issue: all comments in an ANTLR grammar for the Python target should be in Java/C++ style, not in Python style.

Python-Specific ANTLR Sections

  • header - specify additional import directives

    You can instruct the ANTLR Python code generator to import additional Python packages in your generated Lexer/Parser/TreeParser by adding code to the header section which must be the first section at the beginning of your ANTLR grammar file, apart from any other header sections.

    header {
       import os, sys
    }
    
  • header "__init__" - specify additional code in the __init__ method

    You can instruct the ANTLR Python code generator to include additional Python code in your generated Lexer/Parser/TreeParser by adding code to the init header section which must be the first section at the beginning of your ANTLR grammar file, apart from any other header sections. The code in the header is appended to the end of the __init__ method.

    header "__init__" {
       self.message = "This is the default message"
    }
    

    If your grammar file contains both a Lexer and a Parser (or any other multiple of definitions), the code in the __init__ header will be reproduced in the __init__ methods of all of these definitions without change. If you really want to update only one of the definitions, for instance, the __init__ method of the Lexer class you are creating, use

    header "<LexerGrammar>.__init__" {
       self.message = "This is the default message"
    }
    

    where <LexerGrammar> is the name of the Lexer grammar. The same construction also works with the Parsers and TreeParsers, of course.

    In the case both a generic init header and a grammar-specific header are present, the grammar-specific one will override the generic one.

  • header "__main__" - specify additional code after the class definition

    You can instruct the ANTLR Python code generator to add additional Python code at the end of your generated Lexer/Parser/TreeParser, so after the class definition itself by adding code to the __main__ header section which must be the first section at the beginning of your ANTLR grammar file, apart from any other header sections.

    header "__main__" {
        print "You cannot execute this file!"
    }
    

    If your grammar file contains both a Lexer and a Parser (or any other multiple of definitions), the code in the __main__ header will be reproduced at the end of all of the generated class definitions. If you really want to add code after only one of the definitions, for instance, after the Lexer class, use

    header "<LexerGrammar>.__main__" {
        print "You cannot execute this file!"
    }
    

    where <LexerGrammar> is the name of the Lexer grammar. The same construction also works with the Parsers and TreeParsers, of course.

    In the case both a generic init header and a grammar-specific header are present, the grammar-specific one will override the generic one. If no __main__ headers are present and the grammar is for a Lexer, automated test code for that lexer is automatically added at the end of the generated module. This can be prevented by providing an empty __main__ header. In the latter case it is good practise to provide a comment explaining why an empty header is present.

    header "<LexerGrammar>.__main__" {
        // Empty main header to prevent automatic test code from being added
        // to the generated lexer module.
    }
    

    This automated test code can be executed by running Python with the generated lexer file (<LexerGrammar>.pywhere <LexerGrammar> is the name of the Lexer grammar) and providing some test input on stdin:

    python <LexerGrammar>.py < test.in
    

Python-Specific ANTLR Options

  • className - change the default name of the generated class

    options {
        className="Scanner";
    }
    

    If you are using the className option conjunction with the Python specific header options, there will be no collisions. The className option changes the class name, while the main headers require the use of the grammar name which will become the module name after code generation.

    header "ParrotSketch.init" {
        self.state = JohnCleese.select("dead", "pushing up daisies", \
                                       "no longer", "in Parrot Heaven")
        print "This parrot is", self.state
    }
    
    class ParrotSketch extends Lexer;
    
    options {
        className="Scanner";
    }
    

A Template Python ANTLR Grammar File

As the handling of modules &emdash; packages in Java speak &emdash; in Python differs from that in Java, the current approach in ANTLR to call both the file and the class they contain after the name of the grammar is kind of awkward. Instead, a different approach is chosen that better reflects the handling of modules in Python. The name of the generated Python file is still derived from the name of the grammar, but the name of the class is fixed to the particular kind of grammar. A lexer grammar will be used to generate a class Lexer; a parser grammar will be used to generate a class Parser; and a treeparser grammar will be used to generate a class Walker.

header {
    // gets inserted in the Python source file before any generated
    // declarations
    ...
}
header "__init__" {
    // gets inserted in the __init__ method of each of the generated Python
    // classes
    ...
}
header "MyParser.__init__" {
    // gets inserted in the __init__ method of the generated Python class
    // for the MyParser grammar
    ...
}
header "__main__" {
    // gets inserted at the end of each of the generated Python files in an
    // indented section preceeded by the conditional:
    // if __name__ == "__main__":
    ...
}
header "MyLexer.__init__" {
    // gets inserted at the end of the generated Python file for the MyLexer
    // grammar in an indented section preceeded by the conditional:
    // if __name__ == "__main__":
    // and preventing the insertion of automatic test code in the same place.
    ...
}
options {
    language  = "Python";
}
{
    // global code stuff that will be included in the 'MyParser.py' source
    // file just before the 'Parser' class below
    ...
}
class MyParser extends Parser;
options {
   exportVocab=My;
}
{
   // additional methods and members for the generated 'Parser' class
   ...
}
... generated RULES go here ...
{
   // global code stuff that will be included in the 'MyLexer' source file
   // just before the 'Lexer' class below
   ...
}
class MyLexer extends Lexer;
options {
   exportVocab=My;
}
{
   // additional methods and members for the generated 'Lexer' class
   ...
}
... generated RULES go here ...
{
   // global code stuff that will be included in the 'MyTreeParser' source
   // file just before the 'Walker' class below
   ...
}
class MyTreeParser extends TreeParser;
options {
   exportVocab=My;
}
{
   // additional methods and members for the generated 'Walker' class
   ...
}
... generated RULES go here ...

Version number in parentheses shows the tool version used to develop and test. It may work with older versions as well. Python 2.2 or better is required as some recent Python features (like super() for example) are being used.

More notes on using ANTLR Python

  • The API of the generated lexers, parsers, and treeparsers is supposed to be similar to the Java ones. However, calling a lexer is somewhat simplified:

    ### class "calcLexer extends Lexer" will generate python
    ### module "calcLexer" with class "Lexer". 
    import calcLexer
    ### read from stdin ..
    L = calcLexer.Lexer() 
    ### read from file "test.in" ..
    L = calcLexer.Lexer("test.in")
    ### open a file and read from it ..
    f = file("test.in", "r")
    L = calcLexer.Lexer(f)
    ### this works of course as well
    import sys
    L = calcLexer.Lexer(sys.stdin)
    ### use a shared input state
    L1 = calcLexer.Lexer(...)
    state = L1.inputState
    L2 = calcLexer.Lexer(state)
    
  • The loop for the lexer to retrieve token by token can be written as:

    lexer = calcLexer.Lexer()          ### create a lexer for calculator
    for token in lexer:
        ## do something with token
        print token
    
    or even:
    for token in calcLexer.Lexer():    ### create a lexer for calculator
        ## do something with token
        print token
    

    As an iterator is available for all TokenStreams, you can apply the same technique with a TokenStreamSelector.

  • However, writing this particular lexer loop is rarely necessary as it is generated by default in each generated lexer. Just run:

    python calcLexer.py < calc.in
    
    to test the generated lexer.
  • Symbolic token number, table of literals bitsets and bitset data functions are generated on file (module) scope instead of class scope. For example:

    import calcLexer      # import calc lexer module
      
    calcLexer.EOF_TYPE    # prints 1
    calcLexer.literals    # { ';': 11, 'end': 12, 'begin': 10 }
    
  • Comments in action should be in Java/C++ formats, ie. // and /* ... */ are valid comments. However, make sure that you put a comment before or after a statement, but not within. For example, this will not work:

    x = /* one */ 1
    

    The reason is that Python only supports single-line comments. Such a Python comment skips everything till end-of-line. Therefore in the translation of the comment a newline will be introduced on reaching */. The code above would result in the following Python code in the generated file:

    x = # one
    1
    

    which is probably not what you want.

  • The Lexer actions $newline, $nl and $skip have been introduced as language independent shortcuts for calling self.newline() ($newline, $nl) and _ttype = SKIP ($skip).
  • In Python arguments to function and method calls do not have a declared type. Also, functionns and methdos do not have to declare a return type. If you want to pass a value to a rule in your grammar, you can do so by providing simply the name of a variable.

    ident [symtable]
        :   ( 'a'..'z' | '0'..'9' )+
        ;
    

    Similarly, is you want a rule to pass a return value, you do not have to provide a type either. It is possible to provide a default value.

    sign returns [isPos = False]
        :    '-' { /* default value is OK */ }
        |    '+' { isPos = True }
        ;
    
  • The __init__ method of the generated Lexer, Parser, or TreeParser has the following heading:

    def __init__(self, *args, **kwargs):
        ...
    

    So if you need to pass special arguments to your generated class, you can use the **kwargs to check for a particular keyword argument, irrespective of any non-keyword arguments that you did provide. So if you have a TokenStreamSelector that you want to access locally, you can pass it to the Lexer in the following call:

    MySpecialLexer.Lexer(sys.stdin, selector=TokenStreamSelector())
    

    while in the __init__ header of this particular grammar you can specify the handling of the selector keyword argument in the following way:

    header "MyParser.__init__" {
        self.selector = None
        if kwargs.has_key("selector"):
            self.selector = kwargs["selector"]
            assert(isinstance(self.selector, TokenStreamSelector))
    
    }
    
  • Because of limitations in the lexer of the ANTLR compiler generator itself, you cannot use single quoted strings of more than one character in your Python code.
    So if you use a Python string like 'wink, wink, nudge, nudge' in one of your actions, ANTLR will give a parse error when you try to compile this grammar. Instead you should use double quotes: "wink, wink, nudge, nudge".

  • Unicode is supported but it's easy to run into errors if your terminal(output device) is not able to handle unicode chars.
    Here are some rules when using Unicode input:

    1. You need to wrap your input stream by a stream reader which translates bytes into unicode chars. This requires usually knowledge about your input's encoding. Assume for example that your input is 'latin1', you would do this:
      ### replace  stdin  with  a  wrapper that spits out
      ### unicode chars.       
      sys.stdin = codecs.lookup('latin1')[-2](sys.stdin)
      
      Here reading from stdin gets wrapped.
    2. When printing tokens etc containing Unicode chars it appears to be best to translate explicit to a unicode string before printing. Consider:
      for token in unicode_l.Lexer() :
          print unicode(token)   ## explict cast
      
      This explicit cast appears to be a bug in Python found during development (discussion still in progress).
antlr-2.7.7/scripts/0000755000175000017500000000000010522211616014270 5ustar twernertwernerantlr-2.7.7/scripts/Rules.make0000644000175000017500000002400610522211616016223 0ustar twernertwerner############################################################################### # $Id:$ ############################################################################### # # Rules.make : # General make rules, supporting # -compilation of .cpp to .o, .cpp to .s and .s to .o # -combining of multiple .o files into one single .o file # -creation of .a archives from multiple .o files # -recursive subdirectories # -recursive (dist)clean # -creation of binary executables # -doxygen generated docs # .EXPORT_ALL_VARIABLES: # # Depending on the definition of these variables in submakefiles different # targets get build. # NOTE: These should not be exported so we unexport them right here. # unexport SUBDIRS # specifying all subdirs from $(TOPDIR) unexport SUB_DIRS # specifying all subdirs to make unexport ALL_SUB_DIRS # specifying all subdirs to process (dep/clean) unexport O_TARGET # name of combined .o file to generate unexport O_OBJS # name of .o files to combine into $(O_TARGET) unexport L_TARGET # name of .a archive to generate unexport L_OBJS # name of .o files to place in $(L_TARGET) unexport B_NAME # name of binary to generate unexport B_OBJS # name of .o files to combine into $(B_NAME) unexport GCJ_B_NAME # Rules for gcj (native java binaries) unexport GCJ_B_OBJS unexport DOXY_TARGET # name of a doxygen config file to build docs from unexport DOXY_GENDIR # name of dir doxygen generates docs into unexport JAR_TARGETS # jar files.. unexport JAR_DEST # jar files.. unexport G_FILES # antlr .g files unexport G_TAG_FILES # empty file to record compiling .g files unexport G_TARGETS # per-Makefile list of ANTLR generated files unexport C_TARGETS # name of additional targets to "clean" unexport DC_TARGETS # name of additional targets to "distclean" # # Implicit rules # %.s: %.cpp $(CXX) $(CXXFLAGS) $(EXTRA_CXXFLAGS) -S $< -o $@ # # These rules support buiding the .o files into a obj_dir different than the # current dir. This keeps the source dirs nice and clean.. # FIXME: VPATH? # ifdef obj_dir OBJDIR_DEP := $(obj_dir)objects else OBJDIR_DEP := obj_dir := endif $(obj_dir)%.o: %.cpp $(OBJDIR_DEP) $(CXX) -c -o $@ $< $(CXXFLAGS) $(SHARED_FLAGS) $(obj_dir)%.o: %.c $(CC) $(CFLAGS) $(SHARED_FLAGS) -c -o $@ $< $(obj_dir)%.o: %.s $(AS) $(ASFLAGS) -o $@ $< # # Automatic dependency generation rules # $(obj_dir)%.d: %.c $(OBJDIR_DEP) @echo "Building dependencies for $<" @$(CC) -MM -MG $(CFLAGS) $< | $(SED) -e 's,\($*\)\.o[ :]*,$(obj_dir)\1.o $@ : ,g' > $@ $(obj_dir)%.d: %.cpp $(OBJDIR_DEP) @echo "Building dependencies for $<" @$(CXX) -MM -MG $(CXXFLAGS) $< | $(SED) -e 's,\($*\)\.o[ :]*,$(obj_dir)\1.o $@ : ,g' > $@ $(obj_dir)%.d: %.g $(OBJDIR_DEP) ; @echo "Building dependencies for $<" @echo "$($(addsuffix _FILES,$(subst .,_,$<))): $(obj_dir).$*.g ;" > $@ # # How to make .x.g files from .g files. (ANTLR) # A .x.g file is an empty target to record running ANTLR on x.g # Customize flags per file 'x.g' by setting x_FLAGS # The chmod is dirty but it makes life a lot easier with perforce # $(obj_dir).%.g: %.g $(OBJDIR_DEP); @ -$(CHMOD) -f u+w $($(addsuffix _FILES, $(subst .,_,$^))) 2> /dev/null $(ANTLR) $(ANTLR_FLAGS) $($*_FLAGS) $^ @ $(TOUCH) $@ ifdef G_FILES # make list of the sentinel files of the .g files G_TARGETS := $(addprefix .,$(G_FILES)) endif # # How to build class files # # Take along existing CLASSPATH definition. Necessary for jikes. ifdef JAVAC_CLASSPATH ifneq ($(JAVAC_CLASSPATH),) javac_paths= -classpath $(call fix_path,$(TOPDIR)$(PATH_SEPARATOR)$(JAVAC_CLASSPATH)) else javac_paths = -classpath $(call fix_path,$(TOPDIR)) endif endif # # For native binary java # %.o: %.java ifdef VERBOSE $(GCJ) -c -o $@ $< -classpath $(TOPDIR) $(GCJFLAGS) $(EXTRA_GCJFLAGS) else @ echo "Building native binary for $<" @ $(GCJ) -c -o $@ $< -classpath $(TOPDIR) $(GCJFLAGS) $(EXTRA_GCJFLAGS) endif # # For class files # %.class: %.java ifdef VERBOSE $(JAVAC) $(JAVAC_FLAGS) $(javac_paths) $(obj_dir_arg) $< else @ echo "Building java bytecode for $<" @ $(JAVAC) $(JAVAC_FLAGS) $(javac_paths) $(obj_dir_arg) $< endif unexport obj_dir_arg javac_paths # # Note: this stuff is just too annoying to use.... Now do it in the # submakefile itself # # How to build a Java jar file. # # Note: The jar contents are taken from the rule dependency # list; thus the user must explicitly define dependencies per # jar target. E.g. # x.jar : $(x_jar_FILES) # Make performs variable expansion before implicit rule search, # hence, the desired implicit rule dependency # $($(subst .,_,$%)_jar_FILES) # is an undefined variable, resulting in an empty dependency # list. # #%.jar: # @ echo "===========================================" # @ echo "Making $(JAR_DEST)/$(@)..." # @ rm -f $@ # @ (cd $(JAR_DEST) $(JAR) cf $(JAR_DEST)/$@ $(subst $$,\$$,$^) # @ echo "===========================================" # # Get things started: # 1. Build ANTLR generated files, subdirectories. # 2. Remaining TARGETS # first_rule: sub_dirs $(MAKE) all_targets unexport TARGETS # # Build default targets # all_targets: $(G_TARGETS) $(O_TARGET) $(L_TARGET) $(GCJ_B_NAME) $(B_NAME) $(JAR_TARGETS) $(GCJ_B_NAME) # # Compile a set of .o files into one .o file # ifdef O_TARGET $(O_TARGET): $(O_OBJS) -$(RM) -f $@ ifneq "$(strip $(O_OBJS))" "" $(LD) $(EXTRA_LDFLAGS) -r -o $@ $(O_OBJS) else $(AR) rus $@ endif endif # # Compile a set of .o files into one .a file # ifdef L_TARGET $(L_TARGET): $(L_OBJS) -$(RM) -f $@ $(AR) $(EXTRA_ARFLAGS) $(ARFLAGS) $@ $(L_OBJS) $(RANLIB) $@ endif # # This takes care of creating obj_dirs's # ifdef obj_dir $(OBJDIR_DEP): ; $(MKDIR) -p $(obj_dir) $(TOUCH) $(OBJDIR_DEP) endif ifeq ($(ANTLR_WIN32),"yes") ifdef DLL_TARGET $(DLL_TARGET): $(DLL_OBJS) -$(RM) -f $@ $(CXX) -o $@ -Wl,-mdll $(L_OBJS) endif endif ifdef DOXY_TARGET gen_doc: $(DOXY_TARGET) ; ifdef DOXY_GENDIR ifneq ($(DOXY_GENDIR),) -$(RM) -f $(DOXY_GENDIR)/* endif endif ifneq ($(DOXYGEN),"") $(DOXYGEN) -f $(DOXY_TARGET) else echo "Doxygen not installed skipping $(DOXY_TARGET)" endif endif # Rule to make subdirs # .PHONY: $(SUB_DIRS) sub_dirs sub_dirs: $(SUB_DIRS) ifdef SUB_DIRS ifneq ($(strip $(SUB_DIRS)),) $(SUB_DIRS): @echo ">>>>>>>>>>>>>>>>>>>> Entering $@ ..." $(MAKE) -C $@ @echo "<<<<<<<<<<<<<<<<<<<< Leaving $@" endif endif # # Rule to make binaries # ifdef B_NAME $(B_NAME): $(B_OBJS) -$(RM) -f $@ $(CXX) -o $@ $(EXTRA_LDFLAGS) $(B_OBJS) $(EXTRA_LIBS) endif ifdef GCJ_B_NAME $(GCJ_B_NAME): $(GCJ_B_OBJS) @ -$(RM) -f $@ $(GCJ) $(GCJFLAGS) $(EXTRA_GCJFLAGS) -o $@ --main=antlr.Tool $(GCJ_B_OBJS) endif # # Include dependency files if they exist (and we're not cleaning) # ifneq (clean,$(findstring clean,$(MAKECMDGOALS))) -include .depend ifneq ($(SOURCE),) -include $(addprefix $(obj_dir),$(SOURCE:.cpp=.d)) endif ifneq ($(SOURCES),) -include $(addprefix $(obj_dir),$(SOURCES:.cpp=.d)) endif ifneq ($(CXXSOURCE),) -include $(addprefix $(obj_dir),$(CXXSOURCE:.cpp=.d)) endif ifneq ($(GCJSOURCES),) -include $(GCJSOURCES:.java=.d) endif ifneq ($(CSOURCE),) -include $(addprefix $(obj_dir),$(CSOURCE:.c=.d)) endif ifneq ($(G_FILES),) -include $(addprefix $(obj_dir),$(G_FILES:.g=.d)) endif endif # # Rule to bootstrap from external ANTLR jar. # .PHONY: bootstrap_g bootstrap_g: ANTLR := $(ANTLR_BOOTSTRAP) bootstrap_g: ANTLR_FLAGS := $(ANTLR_BOOTSTRAP_FLAGS) bootstrap_g: $(G_TAG_FILES) ifdef ALL_SUB_DIRS @set -e; for i in $(ALL_SUB_DIRS); do $(MAKE) -C $$i bootstrap_g; done endif # # Rule to clean ANTLR generated files (corresponding # to bootstrap_g targets). # .PHONY: clean_g clean_g: ifdef ALL_SUB_DIRS @set -e; for i in $(ALL_SUB_DIRS); do $(MAKE) -C $$i clean_g; done endif -$(RM) -f $(G_TAG_FILES) $(G_TARGETS) # # Rule to remove all objects, cores, etc.; leaving # ANTLR generated and config files. # .PHONY: mostlyclean mostlyclean: ifdef obj_dir ifneq ($(obj_dir),) -$(RM) -f $(obj_dir)/* endif endif ifdef DOXY_GENDIR ifneq ($(DOXY_GENDIR),) -$(RM) -f $(DOXY_GENDIR)/* endif endif ifdef ALL_SUB_DIRS set -e; for i in $(ALL_SUB_DIRS); do $(MAKE) -C $$i mostlyclean; done endif -$(RM) -f *.o *.class *.a *.so core *.s $(B_NAME) $(C_TARGETS) $(JAR_TARGETS) # # Rule to remove all objects, cores, ANTLR generated, etc.; # leaving configure generated files. # .PHONY: clean clean: mostlyclean clean_g ifdef obj_dir # make sure to do nothing if obj_dir empty... ifneq ($(obj_dir),) @-test -d $(obj_dir) && $(RM) -f $(obj_dir)/* $(obj_dir)/.*.g @-test -d $(obj_dir) && $(RMDIR) $(obj_dir) endif endif ifdef ALL_SUB_DIRS set -e; for i in $(ALL_SUB_DIRS); do $(MAKE) -C $$i clean; done endif # # Rule to remove all objects, cores, ANTLR generated, # configure generated, etc.; leaving files requiring # additional programs to generate (e.g., autoconf). # # FIXME: can not be called more than once successively because # FIXME: it removes files unconditionally included by subdirectory # FIXME: Makefiles (e.g., Config.make). # .PHONY: distclean distclean: clean ifdef ALL_SUB_DIRS set -e; for i in $(ALL_SUB_DIRS); do $(MAKE) -C $$i distclean; done endif -$(RM) -f .depend $(DC_TARGETS) # # Install rule # ifndef OVERRULE_INSTALL .PHONY: install install: ifdef B_NAME @echo "Installing $(B_NAME) into $(bindir)" @test -d $(DESTDIR)$(bindir) || $(MKDIR) -p $(DESTDIR)$(bindir) @$(INSTALL) -m 755 $(B_NAME) $(DESTDIR)$(bindir) endif ifdef L_TARGET @echo "Installing $(L_TARGET) into $(libdir)" @test -d $(DESTDIR)$(libdir) || $(MKDIR) -p $(DESTDIR)$(libdir) @$(INSTALL) -m 644 $(L_TARGET) $(DESTDIR)$(libdir) endif ifdef JAR_TARGETS @test -d $(DESTDIR)$(datadir)/$(versioneddir) || $(MKDIR) -p $(DESTDIR)$(datadir)/$(versioneddir) @for i in $(JAR_TARGETS); do \ echo "Installing $i into $(datadir)/$(versioneddir)" \ $(INSTALL) -m 644 $$i $(DESTDIR)$(datadir)/$(versioneddir) ;\ done endif ifdef INSTALL_TARGETS @test -d $(DESTDIR)$(INSTALL_DIR) || $(MKDIR) -p $(DESTDIR)$(INSTALL_DIR) @for i in $(INSTALL_TARGETS); do \ echo "Installing $$i into $(INSTALL_DIR)" ; \ $(INSTALL) -m $(INSTALL_MODE) $$i $(DESTDIR)$(INSTALL_DIR) > /dev/null ;\ done endif ifdef ALL_SUB_DIRS @set -e; for i in $(ALL_SUB_DIRS); do $(MAKE) -C $$i install; done endif endif antlr-2.7.7/scripts/lib.sh.in0000755000175000017500000001545310522211616016012 0ustar twernertwerner#!/bin/sh ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger. ## ## ## ## Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This script shall wrap/hide how we are going to build a library ## within the ANTLR (www.antlr.org) project. test -z "${1}" && exit 0 if test -z "${AR}" ; then AR="@AR@" ar="@ar@" else ar="`basename $AR`" ar="`echo $ar|sed 's,\..*$,,'`" fi test -z "${DEBUG}" && { DEBUG="@DEBUG@" } RANLIB="@RANLIB@" LIBNAME="@ANTLR_LIB@" ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## Prepate input arguments ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## case "@build_os@" in cygwin) LIBNAME="`cygpath -m ${LIBNAME}`" ARGV="`cygpath -m ${*}`" ;; *) ARGV="${*}" ;; esac L="${ARGV}" ; ARGV="" for x in $L ; do if test -f "${x}" ; then ARGV="$ARGV ${x}" fi done unset L if test -z "${ARGV}" ; then cat < c:/). This does not ## work when using arguments like "-out:/c". Such a path ## remains untranslated causing underlying tool to choke. case @build_os@ in mingw*) d=`dirname "${LIBNAME}"` b=`basename "${LIBNAME}"` d=`cd "$d" && pwd -W` LIBNAME="$d/$b" ;; esac arflags="-nologo -verbose -out:${LIBNAME}" cmd_pfx="rm -f ${LIBNAME}" ## no runlib required unset RANLIB ;; bcc32) ## I'm shamelessly override what has been configured -- ## nothing more than a hack. AR='tlib' ## to make the general command work b=`basename "${LIBNAME}"` cmd_pfx="rm -f ${b} ${LIBNAME}" cmd_sfx="cp $b .${b} ; rm ${b}; cp .${b} ${LIBNAME}" arflags="/P128 ${b}" ## no runlib required unset RANLIB ## Borland's interface for tlib (making a static library) ## is most stupid ever seen. For example, it does not ## accept "-" in file names, not is it able to handle ## forward slashes in pathnames. Even Microsoft can do ## this.. L="${ARGV}" ; ARGV="" for x in $L ; do ARGV="$ARGV +`basename ${x}`" done unset L ;; CC) AR="@CXX@" arflags="-xar -o ${LIBNAME}" cmd_pfx="rm -f ${LIBNAME}" ;; *) arflags="rus" ARGV="${LIBNAME} ${ARGV}" cmd_pfx="rm -f ${LIBNAME}" ;; esac ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## **NO CHANGE NECESSARY BELOW THIS LINE - EXPERTS ONLY** ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## test -z "${verbose}" && { verbose=@VERBOSE@ } ## If specific flags have been configured then they overrule ## our precomputed flags. Still a user can override by using ## environment variable $ARFLAGS - see below. test -n "@ARFLAGS@" && { set x @ARFLAGS@ ; shift case $1 in +) shift ARFLAGS="${arflags} $*" ;; -) shift arflags="$* ${arflags}" ;; =) shift arflags="$*" ;; *) if test -z "$1" ; then arflags="${arflags}" else arflags="$*" fi ;; esac } ## Regardless what has been configured, a user should always ## be able to override without the need to reconfigure or ## change this file. Therefore we check variable $ARFLAGS. ## In almost all cases the precomputed flags are just ok but ## some additional flags are needed. To support this in an ## easy way, we check for the very first value. If this val- ## ue is ## '+' -> append content of ARFLAGS to precomputed flags ## '-' -> prepend content -*- ## '=' -> do not use precomputed flags ## If none of these characters are given, the behaviour will ## be the same as if "=" would have been given. set x ${ARFLAGS} ; shift case $1 in +) shift ARFLAGS="${arflags} $*" ;; -) shift ARFLAGS="$* ${arflags}" ;; =) shift ARFLAGS="$*" ;; *) if test -z "$1" ; then ARFLAGS="${arflags}" else ARFLAGS="$*" fi ;; esac ## Any special treatment goes here .. case "${ar}" in ar) ;; *) ;; esac ##%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%## ## No c u s t o m i z a t i o n below this line ## ##%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%## ## Reset positional args set x ${ARGV} ; shift ## This is how we would run 'ranlib' .. test -n "${RANLIB}" && { ranlib_cmd="${RANLIB} ${LIBNAME}" } ## This extra copy is a hack for Borland's TLIB which does ## not accept '-' in filenames. cmd="${AR} ${ARFLAGS} ${ARGV}" ## If there's something to be done .. test -n "${cmd}" && { test -n "${cmd_pfx}" && { test $verbose -gt 0 && { echo $cmd_pfx } eval ${cmd_pfx} || exit 1 } ## be verbose of required case "${verbose}" in 0|no|nein|non) echo "*** creating ${LIBNAME}" ;; *) echo $cmd ;; esac ## remove library - just in case. test -n "${LIBNAME}" -a -f "${LIBNAME}" && { rm -f ${LIBNAME} } ## eventually .. $cmd || { rc=$? cat <> E R R O R << ============================================================ $cmd ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } test -n "${cmd_sfx}" && { test $verbose -gt 0 && { echo $cmd_sfx } eval ${cmd_sfx} || exit 1 } ## and even later .. test -n "${RANLIB}" && { cmd="${RANLIB} ${LIBNAME}" test $verbose -gt 0 && { echo $cmd } $cmd || { exit 1 } } } exit 0antlr-2.7.7/scripts/java.sh.in0000644000175000017500000000316210522211616016154 0ustar twernertwerner#!/bin/sh test -z "${verbose}" && { verbose=@VERBOSE@ } abs_top_build_dir=@abs_top_builddir@ java_cmd="@JAVA@" antlr_jar="@ANTLR_JAR@" ARGV="$*" case @build_os@ in cygwin) test -n "${antlr_jar}" && { antlr_jar="`cygpath -m ${antlr_jar}`" } sep=";" ;; macos*) sep=";" ;; *) sep=":" ;; esac if test -d "${abs_top_build_dir}"; then if test -f "${antlr_jar}" ; then if test -z "${CLASSPATH}"; then ## needs fine tuning - depends on os (wh:tbd) CLASSPATH=".${sep}${antlr_jar}" export CLASSPATH else ## needs fine tuning - depends on os (wh:tbd) CLASSPATH="${sep}${antlr_jar}${sep}${CLASSPATH}" fi fi fi ## Translate all non option arguments case @build_os@ in cygwin) set x $ARGV ; shift ARGV= while test $# -gt 0 ; do case $1 in -*) ARGV="$ARGV $1" ;; *) ARGV="$ARGV `@CYGPATH_M@ $1`" ;; esac shift done ;; *) ;; esac ## go ahead .. cmd="${java_cmd} ${ARGV}" case "${verbose}" in 0) echo $cmd ;; *) echo $cmd ;; esac $cmd || { rc=$? cat <> E R R O R << ============================================================ CLASSPATH=$CLASSPATH $cmd ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } exit 0 antlr-2.7.7/scripts/cpp.sh.in0000755000175000017500000001276510522211616016031 0ustar twernertwerner#!/bin/sh ## This script shall wrap/hide how we are going to compile C++ ## source files within the ANTLR (www.antlr.org) project. test -z "${verbose}" && { verbose=@VERBOSE@ } ## check whether we have something to do .. test -z "$1" && exit 0 ## get arguments ARGV="$*" ## Command CXX is precomputed but user may override. if test -z "${CXX}" ; then CXX="@CXX@" cxx="@cxx@" else cxx="`basename $CXX`" cxx="`echo $cxx|sed 's,\..*$,,'`" fi ## use whitespace to separate dirs, don't use compiler specific ## options like '-I' etc. That will be added at runtime when we ## know what compiler is in use. CXXINCLUDE=". @abs_top_srcdir@/lib/cpp" test -z "${DEBUG}" && { DEBUG="@DEBUG@" } ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## Here we set flags for well know programs ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## ## Do not set variable CXXFLAGS here, just use it's sister ## variable 'cxxflags'. This allows the call to override ## this settings - see handling of CXXFLAGS below. case "${cxx}" in gcc) cxxflags="-MM" ;; *) for x in ${ARGV} ; do echo > $x.d done return 0 ;; esac case ${cxx} in bcc32|CC|aCC|xlC) CXX_OPT_INCLUDE="-I" ;; *) CXX_OPT_INCLUDE="-I " ;; esac ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## **NO CHANGE NECESSARY BELOW THIS LINE - EXPERTS ONLY** ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## If specific flags have been configured then they overrule ## our precomputed flags. Still a user can override by using ## environment variable $CXXFLAGS - see below. test -n "@CXXFLAGS@" && { set x @CXXFLAGS@ ; shift case $1 in +) shift CXXFLAGS="${cxxflags} $*" ;; -) shift cxxflags="$* ${cxxflags}" ;; =) shift cxxflags="$*" ;; *) if test -z "$1" ; then cxxflags="${cxxflags}" else cxxflags="$*" fi ;; esac } ## Regardless what has been configured, a user should always ## be able to override without the need to reconfigure or ## change this file. Therefore we check variable $CXXFLAGS. ## In almost all cases the precomputed flags are just ok but ## some additional flags are needed. To support this in an ## easy way, we check for the very first value. If this val- ## ue is ## '+' -> append content of CXXFLAGS to precomputed flags ## '-' -> prepend content -*- ## '=' -> do not use precomputed flags ## If none of these characters are given, the behaviour will ## be the same as if "=" would have been given. set x ${CXXFLAGS} ; shift case $1 in +) shift CXXFLAGS="${cxxflags} $*" ;; -) shift CXXFLAGS="$* ${cxxflags}" ;; =) shift CXXFLAGS="$*" ;; *) if test -z "$1" ; then CXXFLAGS="${cxxflags}" else CXXFLAGS="$*" fi ;; esac ##%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%## ## No c u s t o m i z a t i o n below this line ## ##%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%## ## translate args - requires that we are on cygwin. Other- ## wise we have to assume that args are already in proper ## format. case "@build_os@" in cygwin) case "@cxx@" in cl|bcc32) ARGV="`cygpath -m ${ARGV}`" ;; esac ;; esac ## we only add valid directories - note that CXXINCLUDE is ## supposed to contain directories and not optionized' ## arguments. set x ${CXXINCLUDE} ; shift Y="" ## filter non valid directories while test $# -gt 0 ; do y="$1" ; shift test -d "${y}" && { Y="${Y} ${y}" } done set x ${Y} ; shift ; Y="" ## translate directories on cygwin case "@build_os@" in cygwin) set x `cygpath -m ${*}` ; shift ;; esac ## prefix each arg with CXX_OPT_INCLUDE (for instance -I) while test $# -gt 0 ; do y="$1" ; shift Y="${Y} ${CXX_OPT_INCLUDE}${y}" done CXXINCLUDE="${Y}" ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### LAST CHANCE COMPILER TUNIG HERE ### ###================================================================### case "${cxx}" in cl) ;; bcc32) ;; gcc) ;; xlC) ;; CC) ;; aCC) ;; *) ;; esac case "@cxx@" in *) CXXFLAGS="${CXXFLAGS} -c" ;; esac ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ### all variables participating in calling must be set now.. ###--------------------------------------------------------- CXX_CMD="${CXX} ${CXXFLAGS} ${CXXINCLUDE}" test -z "${ARGV}" && exit 0 for x in ${ARGV} ; do cmd="$CXX_CMD $x" case "${verbose}" in 0|no|nein|non) echo "*** update deps for `basename $x`" ;; *) echo "*** making $x.d" echo $cmd ;; esac $cmd > $x.$$$$ || { rc=$? cat <> E R R O R << ============================================================ $cmd ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } ## on compiling ${dir}/x.cpp we get dep x.o : x.cpp ${dir}/y.h .. ## but we need to have x.o ${dir}/x.o ${dir}/x.d : ... dx=`echo $x|sed 's,\.[^.]*$,@OBJEXT@,g'` sed "s,\(.*\)\@OBJEXT@[ ]*:,\1@OBJEXT@ ${dx} ${x}.d : ,g" < $x.$$$$ > $x.d rm -f $x.$$$$ done exit 0 antlr-2.7.7/scripts/config.vars.in0000644000175000017500000000476510522211616017053 0ustar twernertwerner## --*- Makefile -*-- SUBDIRS := ## helper utilities .. INSTALL = @INSTALL@ MKDIR = @MKDIR@ RM = @RM@ -r -f RMF = @RMF@ TAR = @TAR@ TOUCH = @TOUCH@ CHMOD = @CHMOD@ SED = @SED@ GREP = @GREP@ CAT = @CAT@ CHMOD = @CHMOD@ CP = @CP@ ECHO = @ECHO@ # usual dribble exec_prefix = @exec_prefix@ prefix = @prefix@ program_transform_name = @program_transform_name@ bindir = @bindir@ sbindir = @sbindir@ libexecdir = @libexecdir@ datadir = @datadir@ sysconfdir = @sysconfdir@ sharedstatedir = @sharedstatedir@ localstatedir = @localstatedir@ libdir = @libdir@ includedir = @includedir@ oldincludedir = @oldincludedir@ infodir = @infodir@ mandir = @mandir@ build_alias = @build_alias@ host_alias = @host_alias@ target_alias = @target_alias@ build = @build@ build_cpu = @build_cpu@ build_vendor = @build_vendor@ build_os = @build_os@ host = @host@ host_cpu = @host_cpu@ host_vendor = @host_vendor@ host_os = @host_os@ just_make = @just_make@ # Version stuff... VERSION = @VERSION@ SUBVERSION = @SUBVERSION@ PATCHLEVEL = @PATCHLEVEL@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_VERSION = @PACKAGE_VERSION@ versioneddir = @PACKAGE_NAME@-@PACKAGE_VERSION@ # navigation builddir = @abs_this_builddir@ buildtree = @abs_this_builddir@ sourcetree = @abs_top_srcdir@ ## Two abbrevs to shorten things. $(thisdir) shall be the current ## working directory as absolute name and $(_srcdir) is it's ## source dir companion. That means that $(thisdir)/Makefile has ## been generated by $(_srcdir)/Makefile.in. _srcdir = @abs_top_srcdir@/$(subdir) thisdir = @abs_this_builddir@/$(subdir) # variable 'srcdir' is deprecated - use sourcetree srcdir = @abs_top_srcdir@ # variable 'objdir' is deprecated - use buildtree objdir = @abs_this_builddir@ # other verbose = @VERBOSE@ ## SUBDIRS you want to exclude (separate them by using "|"). SUBDIRS_NOT := . CLR = @CLR@ ANTLR_JAR = @ANTLR_JAR@ ANTLR_LIB = @ANTLR_LIB@ ANTLR_NET = @ANTLR_NET@ ANTLR_PY = @ANTLR_PY@ ASTFRAME_NET = @ASTFRAME_NET@ antlr_jar = @antlr_jar@ antlr_lib = @antlr_lib@ antlr_net = @antlr_net@ antlr_py = @antlr_py@ astframe_net = @astframe_net@ antlr-2.7.7/scripts/c.sh.in0000755000175000017500000000203010522211616015451 0ustar twernertwerner#!/bin/sh ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger. ## ## ## ## Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This script shall wrap/hide how we are going to run a C/C++ ## preprocessor within the ANTLR (www.antlr.org) project. test -z "${verbose}" && { verbose=@VERBOSE@ } ## check whether we have something to do .. if test -z "$1" ; then exit 0 fi ARCHFLAGS= INCLUDE="-I @abs_top_srcdir@/lib/cpp" DEBUG= EXTRA_CFLAGS= C_CMD="@CC@ @CFLAGS@ ${ARCHFLAGS} ${INCLUDE} ${DEBUG} ${EXTRA_CFLAGS} -c" while test $# -gt 0 ; do x="$1" ; shift echo "compiling (C) $x .." c_cmd="$C_CMD $x" $c_cmd || { echo "" echo "error caught on .." echo ">>> $c_cmd" echo "" exit 1 } done antlr-2.7.7/scripts/jar.sh.in0000755000175000017500000001017010522211616016007 0ustar twernertwerner#!/bin/sh ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger. ## ## ## ## Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This script shall wrap/hide how we are going to pack Java class ## files. We need to wrap this as SUN's jar does not understand ## UNIX filename notation on Cygwin. test -z "${verbose}" && { verbose=@VERBOSE@ } ## check whether we have something to do .. test -z "$1" && exit 0 case @build_os@ in cygwin) ARGV="`cygpath -m $*`" ;; *) ARGV="$*" ;; esac ## Command JAR is precomputed but user may override. if test -z "${JAR}" ; then JAR="@JAR@" jar="@jar@" else jar="`basename $JAR`" jar="`echo $jar|sed 's,\..*$,,'`" fi test -z "${DEBUG}" && { DEBUG="@DEBUG@" } ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## Here we set flags for well know programs ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## ## Do not set variable JARFLAGS here, just use it's sister ## variable 'jarflags'. This allows the call to override ## this settings - see handling of JARFLAGS below. case "${jar}" in jar) jarflags="cf" ;; *) jarflags="cf" ;; esac ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## **NO CHANGE NECESSARY BELOW THIS LINE - EXPERTS ONLY** ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## If specific flags have been configured then they overrule ## our precomputed flags. Still a user can override by using ## environment variable $JARFLAGS - see below. test -n "@JARFLAGS@" && { set x @JARFLAGS@ ; shift case $1 in +) shift JARFLAGS="${jarflags} $*" ;; -) shift jarflags="$* ${jarflags}" ;; =) shift jarflags="$*" ;; *) if test -z "$1" ; then jarflags="${jarflags}" else jarflags="$*" fi ;; esac } ## Regardless what has been configured, a user should always ## be able to override without the need to reconfigure or ## change this file. Therefore we check variable $JARFLAGS. ## In almost all cases the precomputed flags are just ok but ## some additional flags are needed. To support this in an ## easy way, we check for the very first value. If this val- ## ue is ## '+' -> append content of JARFLAGS to precomputed flags ## '-' -> prepend content -*- ## '=' -> do not use precomputed flags ## If none of these characters are given, the behaviour will ## be the same as if "=" would have been given. set x ${JARFLAGS} ; shift case $1 in +) shift JARFLAGS="${jarflags} $*" ;; -) shift JARFLAGS="$* ${jarflags}" ;; =) shift JARFLAGS="$*" ;; *) if test -z "$1" ; then JARFLAGS="${jarflags}" else JARFLAGS="$*" fi ;; esac ## Any special treatment goes here .. case "${jar}" in jar) ;; *) ;; esac ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This shall be the command to be excuted below ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## cmd="${JAR} ${JARFLAGS} ${ARGV}" ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## standard template to execute a command ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## case "${verbose}" in 0|no|nein|non) set x ${ARGV} echo "*** creating $2 .." ;; *) echo $cmd ;; esac $cmd || { rc=$? cat <> E R R O R << ============================================================ $cmd ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } exit 0 antlr-2.7.7/scripts/cxx.sh.in0000755000175000017500000001421210522211616016036 0ustar twernertwerner#!/bin/sh ## This script shall wrap/hide how we are going to compile C++ ## source files within the ANTLR (www.antlr.org) project. test -z "${verbose}" && { verbose=@VERBOSE@ } ## check whether we have something to do .. test -z "$1" && exit 0 ## get arguments ARGV="$*" ## Command CXX is precomputed but user may override. if test -z "${CXX}" ; then CXX="@CXX@" cxx="@cxx@" else cxx="`basename $CXX`" cxx="`echo $cxx|sed 's,\..*$,,'`" fi ## use whitespace to separate dirs, don't use compiler specific ## options like '-I' etc. That will be added at runtime when we ## know what compiler is in use. ## 2.7.6: take CXXINCLUDE from environment into account. CXXINCLUDE=". ${CXXINCLUDE} @abs_top_srcdir@/lib/cpp" ## according to Kurt we need to set some additional included ## paths when using 'cxx' on Tru64. Here we go .. case $cxx in cxx) CXXINCLUDE="$CXXINCLUDE @abs_top_srcdir@/include" CXXINCLUDE="$CXXINCLUDE /usr/include/cxx" CXXINCLUDE="$CXXINCLUDE /usr/include" ;; esac test -z "${DEBUG}" && { DEBUG="@DEBUG@" } ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## Here we set flags for well know programs ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## ## Do not set variable CXXFLAGS here, just use it's sister ## variable 'cxxflags'. This allows the caller to override ## this settings - see handling of CXXFLAGS below. case "${cxx}" in cxx) cxxflags="-D__DECCXX -ieee -DDEC -O0 -arch host -trapuv -check_bounds -warnprotos -std1 -noansi_args -portable" #-I/spare/mccalke/antlr-2.7.5-new/include -I/usr/include/cxx -I/usr/include -L/usr/lib/cmplrs/cxx -L/usr/lib/cmplrs/cxx/V6.5-042" ;; gcc) cxxflags="-felide-constructors -pipe" case "${DEBUG}" in 0) cxxflags="-O2 -DNDEBUG ${cxxflags}" ;; 1) cxxflags="-g ${cxxflags} -W -Wall" ;; esac ;; cl) cxxflags="-nologo -GX -GR" ;; bcc32) cxxflags="-q -v -w-inl -w-aus -w-par -w-ccc" ;; CC) cxxflags="-g" ;; xlC) cxxflags="" ;; aCC) cxxflags="" ;; *) cxxflag="" ;; esac case ${cxx} in bcc32|CC|aCC|xlC|cxx) CXX_OPT_INCLUDE="-I" ;; *) CXX_OPT_INCLUDE="-I " ;; esac ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## **NO CHANGE NECESSARY BELOW THIS LINE - EXPERTS ONLY** ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## If specific flags have been configured then they overrule ## our precomputed flags. Still a user can override by using ## environment variable $CXXFLAGS - see below. test -n "@CXXFLAGS@" && { set x @CXXFLAGS@ ; shift case $1 in +) shift CXXFLAGS="${cxxflags} $*" ;; -) shift cxxflags="$* ${cxxflags}" ;; =) shift cxxflags="$*" ;; *) if test -z "$1" ; then cxxflags="${cxxflags}" else cxxflags="$*" fi ;; esac } ## Regardless what has been configured, a user should always ## be able to override without the need to reconfigure or ## change this file. Therefore we check variable $CXXFLAGS. ## In almost all cases the precomputed flags are just ok but ## some additional flags are needed. To support this in an ## easy way, we check for the very first value. If this val- ## ue is ## '+' -> append content of CXXFLAGS to precomputed flags ## '-' -> prepend content -*- ## '=' -> do not use precomputed flags ## If none of these characters are given, the behaviour will ## be the same as if "=" would have been given. set x ${CXXFLAGS} ; shift case $1 in +) shift CXXFLAGS="${cxxflags} $*" ;; -) shift CXXFLAGS="$* ${cxxflags}" ;; =) shift CXXFLAGS="$*" ;; *) if test -z "$1" ; then CXXFLAGS="${cxxflags}" else CXXFLAGS="$*" fi ;; esac ##%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%## ## No c u s t o m i z a t i o n below this line ## ##%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%## ## translate args - requires that we are on cygwin. Other- ## wise we have to assume that args are already in proper ## format. case "@build_os@" in cygwin) case "@cxx@" in cl|bcc32) ARGV="`cygpath -m ${ARGV}`" ;; esac ;; esac ## we only add valid directories - note that CXXINCLUDE is ## supposed to contain directories and not optionized' ## arguments. set x ${CXXINCLUDE} ; shift Y="" ## filter non valid directories while test $# -gt 0 ; do y="$1" ; shift test -d "${y}" && { Y="${Y} ${y}" } done set x ${Y} ; shift ; Y="" ## translate directories on cygwin case "@build_os@" in cygwin) set x `cygpath -m ${*}` ; shift ;; esac ## prefix each arg with CXX_OPT_INCLUDE (for instance -I) while test $# -gt 0 ; do y="$1" ; shift Y="${Y} ${CXX_OPT_INCLUDE}${y}" done CXXINCLUDE="${Y}" ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### LAST CHANCE COMPILER TUNIG HERE ### ###================================================================### case "${cxx}" in cl) ;; bcc32) ;; gcc) ;; xlC) ;; CC) ;; aCC) ;; cxx) ;; *) ;; esac case "@cxx@" in *) CXXFLAGS="${CXXFLAGS} -c" ;; esac ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ### all variables participating in calling must be set now.. ###--------------------------------------------------------- CXX_CMD="${CXX} ${CXXFLAGS} ${CXXINCLUDE}" test -z "${ARGV}" && exit 0 for x in ${ARGV} ; do cmd="$CXX_CMD $x" case "${verbose}" in 0|no|nein|non) echo "*** compiling $x" ;; *) echo $cmd ;; esac $cmd || { rc=$? cat <> E R R O R << ============================================================ $cmd ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } done exit 0 antlr-2.7.7/scripts/config.make.in0000644000175000017500000000656410522211616017014 0ustar twernertwerner##xxxxxxxxxxxxxxxxxxxxxx --*- Makefile -*-- xxxxxxxxxxxxxxxx>>> config.make ## By default, make will jump into any sub directory that contains a file ## named "Makefile". This is done in the order implied by "/bin/ls" which ## is in almost all cases correct (note that you should not design ## Makefiles which depend on a specific invocation order). You can override ## specific behaviour by using variable SUBDIRS. If given and not empty, ## "/bin/ls" is not used. Also, if you want to disable jumping subdirs you ## may use either ".", ".." as value for SUBDIRS. Note that SUBDIRS may ## contain any directory (except "." and "..). ## This is very much GNU specific, sigh. Variable SUBDIRS is used to tell ## make which subdirectory to jump. It's value is normally preset or just ## empty, in which case /bin/ls is used as discussed above. However, I also ## want that a user can say ## ## make SUBDIRS="d1 d2 .. dn" ## ## That means, ignore defaults and go ahead and make exactly this director- ## ies mentioned. Of course, this should only have an impact on Makefile ## being used by "make" but not for any makefils in d1 .. dn, right? For ## example, if di needs to make directories a,b and c, then they need to ## be made of course. So all burns down to the question how to prevent a ## variable from being passed to submakes. Below you can see the answer. If ## you believe that there's a simpler answer to the problem don't hesistate ## to try it out. If it works, send me an email: ora dot et dot labora at ## web dot de. But be warned - you need to try all variations. ## ## Here is in short what I found and how it works. Variables given on ## command line are saved in variable MAKEOVERRIDES. This variable is ## exported and passed down. On invocation of a submake file, make will ## have a look into MAKEOVERRIDES and unpack each variable found therein. ## Therefore I'm just going to remove every (?) occurance of SUBDIRS from ## this variable. MAKEOVERRIDES := $(patsubst SUBDIRS=%,,$(MAKEOVERRIDES)) ## The actuall rule on how to make a recursive target. all clean distclean test install force-target clean-target :: @dirs="$(SUBDIRS)" ; \ test -z "$${dirs}" && { \ dirs=`/bin/ls` ; \ } ; \ for d in . $${dirs} ; do \ case $${d} in \ . | .. ) ;; \ $(SUBDIRS_NOT) ) ;; \ *) \ if test -f "$${d}/Makefile" ; then \ echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" ; \ echo ">> @MAKE@ -C $(subdir)/$${d} $@ " ; \ echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" ; \ @MAKE@ -C "$$d" $@ || exit 1 ;\ fi ; \ ;; \ esac ; \ done ## For historical reasons only you can make local targets as "this-*" or ## "*-this" rules. The default is to do nothing. Although this targets ## exists, it is recommended to define further "all", "clean" etc. double ## colon rules. all :: this-all all-this clean :: this-clean clean-this distclean :: this-distclean distclean-this test :: this-test test-this install :: this-install install-this this-all :: this-clean :: this-distclean :: this-test :: this-install :: all-this :: clean-this :: distclean-this :: test-this :: install-this :: force-target :: clean-target all distclean :: clean distclean :: $(RM) Makefile ## xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx<< config.make antlr-2.7.7/scripts/config.deps.in0000644000175000017500000001247310522211616017026 0ustar twernertwerner## --*- Makefile -*-- ## Make listed targets even in case a file with same name exists. .PHONY: \ this \ all clean install test distclean \ this-all this-clean this-install this-test this-distclean \ all-this clean-this install-this test-this distclean-this \ $(eof) ## delete suffix rules - shortens output when using -d and is not ## used anyway (and should not be used). .SUFFIXES: Makefile :: @abs_top_srcdir@/$(subdir)/Makefile.in \ @abs_this_builddir@/scripts/config.deps \ @abs_this_builddir@/scripts/config.make \ @abs_this_builddir@/scripts/config.vars \ @abs_this_builddir@/config.status @echo "*** update $(subdir)/Makefile" @cd @abs_this_builddir@ && CONFIG_FILES="$(subdir)/Makefile" @SHELL@ ./config.status -q @abs_this_builddir@/scripts/config.deps : \ @abs_top_srcdir@/scripts/config.deps.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/config.deps @SHELL@ ./config.status -q @abs_this_builddir@/scripts/config.make : \ @abs_top_srcdir@/scripts/config.make.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/config.make @SHELL@ ./config.status -q @abs_this_builddir@/scripts/config.vars : \ @abs_top_srcdir@/scripts/config.vars.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/config.vars @SHELL@ ./config.status -q @abs_this_builddir@/config.status : \ @abs_top_srcdir@/configure @echo "*** reconfigure $@ - stay tuned .." @cd @abs_this_builddir@ && @SHELL@ ./config.status -q --recheck @echo "*** update all configured files .." @cd @abs_this_builddir@ && @SHELL@ ./config.status -q ### In a pure devel mode there should be also a dependency listed on how ### to make configure out of configure.in. This requires that usr has ### m4 and autoconf (proper version) installed. Appropriate checks are ### not done in configure. If so, then uncomment next lines: ### ### @rule_configure_configure_in@ ### @abs_this_builddir@/scripts/antlr.sh : \ @abs_top_srcdir@/scripts/antlr.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/antlr.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/cpp.sh : \ @abs_top_srcdir@/scripts/cpp.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/cpp.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/csc.sh : \ @abs_top_srcdir@/scripts/csc.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/csc.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/cxx.sh : \ @abs_top_srcdir@/scripts/cxx.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/cxx.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/jar.sh : \ @abs_top_srcdir@/scripts/jar.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/jar.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/javac.sh : \ @abs_top_srcdir@/scripts/javac.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/javac.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/java.sh : \ @abs_top_srcdir@/scripts/java.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/java.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/lib.sh : \ @abs_top_srcdir@/scripts/lib.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/lib.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/link.sh : \ @abs_top_srcdir@/scripts/link.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/link.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/pyinst.sh : \ @abs_top_srcdir@/scripts/pyinst.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/pyinst.sh @SHELL@ ./config.status -q @abs_this_builddir@/scripts/python.sh : \ @abs_top_srcdir@/scripts/python.sh.in @echo "*** update $@" @cd @abs_this_builddir@ && CONFIG_FILES=scripts/python.sh @SHELL@ ./config.status -q ## This rule shall ensure that ANTLR_NET is up-to-date. The rule is a ## double colon rule, ie. further rules with the same target may be ## added. For unknown reasons, double colon rules are always phony,ie. ## getting executed even in case target exists. We break the infinite ## loop, we only jump into subdir "lib/csharp/src" if we are not ## already in. It is very important that each Makefile[.in] sets the ## variable $(subdir) proper. @ANTLR_NET@ :: @ subdir="lib/csharp/antlr.runtime" ; \ case $(subdir) in \ $$subdir ) ;; \ * ) d="@abs_this_builddir@/$$subdir" ; \ test -f "$$d/Makefile" && { \ @MAKE@ -C "$$d" $@ ; \ } \ ;; \ esac @ASTFRAME_NET@ :: @ subdir="lib/csharp/antlr.astframe" ; \ case $(subdir) in \ $$subdir ) ;; \ * ) d="@abs_this_builddir@/$$subdir" ; \ test -f "$$d/Makefile" && { \ @MAKE@ -C "$$d" $@ ; \ } \ ;; \ esac @ANTLR_JAR@ :: @ subdir="antlr" ; \ case $(subdir) in \ $$subdir ) ;; \ * ) d="@abs_this_builddir@/$$subdir" ; \ test -f "$$d/Makefile" && { \ @MAKE@ -C "$$d" $@ ; \ } \ ;; \ esac @ANTLR_LIB@ :: @ subdir="lib/cpp/src" ; \ case $(subdir) in \ $$subdir ) ;; \ * ) d="@abs_this_builddir@/$$subdir" ; \ test -f "$$d/Makefile" && { \ @MAKE@ -C "$$d" $@ ; \ } \ ;; \ esac antlr-2.7.7/scripts/link.sh.in0000755000175000017500000001261510522211616016176 0ustar twernertwerner#!/bin/sh ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger. ## ## ## ## Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## test -z "$1" && exit 0 ## This script shall wrap/hide how we are going to link C++ ## object files within the ANTLR (www.antlr.org) project. CXX="@CXX@" CXXFLAGS="@LDFLAGS@" LIBNAME="@ANTLR_LIB@" TARGET="$1" ; shift ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## Prepate input arguments ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## case "@build_os@" in cygwin) ARGV="`cygpath -m ${*}`" test -n "${TARGET}" && { TARGET=`cygpath -m ${TARGET}` } test -n "${LIBNAME}" && { LIBNAME="`cygpath -m ${LIBNAME}`" } ;; *) ARGV="${*}" ;; esac # RK: Disabled it strips -l arguments #L="${ARGV}" ; ARGV="" #for x in $L ; do # if test -f "${x}" ; then # ARGV="$ARGV ${x}" # fi #done #unset L if test -z "${ARGV}" ; then cat < append content of LDFLAGS to precomputed flags ## '-' -> prepend content -*- ## '=' -> do not use precomputed flags ## If none of these characters are given, the behaviour will ## be the same as if "=" would have been given. set x ${LDFLAGS} ; shift case $1 in +) shift LDFLAGS="${ldflags} $*" ;; -) shift LDFLAGS="$* ${ldflags}" ;; =) shift LDFLAGS="$*" ;; *) if test -z "$1" ; then LDFLAGS="${ldflags}" else LDFLAGS="$*" fi ;; esac ## Any special treatment goes here .. case "${ld}" in ld) ;; *) ;; esac ##%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%## ## No c u s t o m i z a t i o n below this line ## ##%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%## test -z "${verbose}" && { verbose=@VERBOSE@ } ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This shall be the command to be excuted below ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## cmd="${LD} ${LDFLAGS} ${ARGV}" ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## standard template to execute a command ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## case "${verbose}" in 0|no|nein|non) echo "*** creating ${TARGET} .." ;; *) echo $cmd ;; esac $cmd || { rc=$? cat <> E R R O R << ============================================================ $cmd ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } exit 0 antlr-2.7.7/scripts/install-sh0000755000175000017500000001273610522211616016305 0ustar twernertwerner#!/bin/sh # # install - install a program, script, or datafile # This comes from X11R5 (mit/util/scripts/install.sh). # # Copyright 1991 by the Massachusetts Institute of Technology # # Permission to use, copy, modify, distribute, and sell this software and its # documentation for any purpose is hereby granted without fee, provided that # the above copyright notice appear in all copies and that both that # copyright notice and this permission notice appear in supporting # documentation, and that the name of M.I.T. not be used in advertising or # publicity pertaining to distribution of the software without specific, # written prior permission. M.I.T. makes no representations about the # suitability of this software for any purpose. It is provided "as is" # without express or implied warranty. # # Calling this script install-sh is preferred over install.sh, to prevent # `make' implicit rules from creating a file called install from it # when there is no Makefile. # # This script is compatible with the BSD install script, but was written # from scratch. It can only install one file at a time, a restriction # shared with many OS's install programs. # set DOITPROG to echo to test this script # Don't use :- since 4.3BSD and earlier shells don't like it. doit="${DOITPROG-}" # put in absolute paths if you don't have them in your path; or use env. vars. mvprog="${MVPROG-mv}" cpprog="${CPPROG-cp}" chmodprog="${CHMODPROG-chmod}" chownprog="${CHOWNPROG-chown}" chgrpprog="${CHGRPPROG-chgrp}" stripprog="${STRIPPROG-strip}" rmprog="${RMPROG-rm}" mkdirprog="${MKDIRPROG-mkdir}" transformbasename="" transform_arg="" instcmd="$mvprog" chmodcmd="$chmodprog 0755" chowncmd="" chgrpcmd="" stripcmd="" rmcmd="$rmprog -f" mvcmd="$mvprog" src="" dst="" dir_arg="" while [ x"$1" != x ]; do case $1 in -c) instcmd="$cpprog" shift continue;; -d) dir_arg=true shift continue;; -m) chmodcmd="$chmodprog $2" shift shift continue;; -o) chowncmd="$chownprog $2" shift shift continue;; -g) chgrpcmd="$chgrpprog $2" shift shift continue;; -s) stripcmd="$stripprog" shift continue;; -t=*) transformarg=`echo $1 | sed 's/-t=//'` shift continue;; -b=*) transformbasename=`echo $1 | sed 's/-b=//'` shift continue;; *) if [ x"$src" = x ] then src=$1 else # this colon is to work around a 386BSD /bin/sh bug : dst=$1 fi shift continue;; esac done if [ x"$src" = x ] then echo "install: no input file specified" exit 1 else true fi if [ x"$dir_arg" != x ]; then dst=$src src="" if [ -d $dst ]; then instcmd=: chmodcmd="" else instcmd=mkdir fi else # Waiting for this to be detected by the "$instcmd $src $dsttmp" command # might cause directories to be created, which would be especially bad # if $src (and thus $dsttmp) contains '*'. if [ -f $src -o -d $src ] then true else echo "install: $src does not exist" exit 1 fi if [ x"$dst" = x ] then echo "install: no destination specified" exit 1 else true fi # If destination is a directory, append the input filename; if your system # does not like double slashes in filenames, you may need to add some logic if [ -d $dst ] then dst="$dst"/`basename $src` else true fi fi ## this sed command emulates the dirname command dstdir=`echo $dst | sed -e 's,[^/]*$,,;s,/$,,;s,^$,.,'` # Make sure that the destination directory exists. # this part is taken from Noah Friedman's mkinstalldirs script # Skip lots of stat calls in the usual case. if [ ! -d "$dstdir" ]; then defaultIFS=' ' IFS="${IFS-${defaultIFS}}" oIFS="${IFS}" # Some sh's can't handle IFS=/ for some reason. IFS='%' set - `echo ${dstdir} | sed -e 's@/@%@g' -e 's@^%@/@'` IFS="${oIFS}" pathcomp='' while [ $# -ne 0 ] ; do pathcomp="${pathcomp}${1}" shift if [ ! -d "${pathcomp}" ] ; then $mkdirprog "${pathcomp}" else true fi pathcomp="${pathcomp}/" done fi if [ x"$dir_arg" != x ] then $doit $instcmd $dst && if [ x"$chowncmd" != x ]; then $doit $chowncmd $dst; else true ; fi && if [ x"$chgrpcmd" != x ]; then $doit $chgrpcmd $dst; else true ; fi && if [ x"$stripcmd" != x ]; then $doit $stripcmd $dst; else true ; fi && if [ x"$chmodcmd" != x ]; then $doit $chmodcmd $dst; else true ; fi else # If we're going to rename the final executable, determine the name now. if [ x"$transformarg" = x ] then dstfile=`basename $dst` else dstfile=`basename $dst $transformbasename | sed $transformarg`$transformbasename fi # don't allow the sed command to completely eliminate the filename if [ x"$dstfile" = x ] then dstfile=`basename $dst` else true fi # Make a temp file name in the proper directory. dsttmp=$dstdir/#inst.$$# # Move or copy the file name to the temp name $doit $instcmd $src $dsttmp && trap "rm -f ${dsttmp}" 0 && # and set any options; do chmod last to preserve setuid bits # If any of these fail, we abort the whole thing. If we want to # ignore errors from any of these, just make sure not to ignore # errors from the above "$doit $instcmd $src $dsttmp" command. if [ x"$chowncmd" != x ]; then $doit $chowncmd $dsttmp; else true;fi && if [ x"$chgrpcmd" != x ]; then $doit $chgrpcmd $dsttmp; else true;fi && if [ x"$stripcmd" != x ]; then $doit $stripcmd $dsttmp; else true;fi && if [ x"$chmodcmd" != x ]; then $doit $chmodcmd $dsttmp; else true;fi && # Now rename the file to the real destination. $doit $rmcmd -f $dstdir/$dstfile && $doit $mvcmd $dsttmp $dstdir/$dstfile fi && exit 0 antlr-2.7.7/scripts/javac.sh.in0000755000175000017500000001345110522211616016324 0ustar twernertwerner#!/bin/sh ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger. ## ## ## ## Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## test -z "${verbose}" && { verbose=@VERBOSE@ } ## This script will be called to compile a list of java files on ## all UNIX/Cygwin platforms. Whether we'll use SUN java, gcj or ## another Java compiler doesn't matter. ## precompute some variables required or useful to compile ## Java source files. ## srcdir shall contain absolute path to package directory. srcdir="@abs_top_srcdir@" ## objdir shall contain absolute path to this build directory. objdir="@abs_this_builddir@" ## bootclasspath shall contain jar or zip file required to ## boot Javac. An example where this variable is used is ## jikes. Note, this variable can be plain empty. bootclasspath="@BOOTCLASSPATH@" classpath="@ANTLR_JAR@" case @build_os@ in cygwin) sep=";" ;; macos*) sep=";" ;; *) sep=":" ;; esac ## When on cygwin we translage paths into mixed notation (DOS ## with forward slashes). case @build_os@ in cygwin) test -n "$1" && { ARGV="`cygpath -m $*`" } test -n "${srcdir}" && { srcdir="`cygpath -m ${srcdir}`" } test -n "${objdir}" && { objdir="`cygpath -m ${objdir}`" } test -n "${bootclasspath}" && { bootclasspath="`cygpath -m ${bootclasspath}`" } test -n "${classpath}" && { classpath="`cygpath -m ${classpath}`" } ;; *) ARGV="$*" ;; esac ## Command JAVAC is precomputed but user may override. if test -z "${JAVAC}" ; then JAVAC="@JAVAC@" javac="@javac@" else javac=`basename $JAVAC` javac=`echo $javac|sed 's,\..*$,,'` fi ## Take environment variable CLASSPATH into account if test -n "$CLASSPATH" ; then ifs_save=$IFS IFS=$sep for d in $CLASSPATH ; do case @build_os@ in cygwin) d=`@CYGPATH_M@ $d` ;; esac classpath="$classpath$sep$d" done IFS=$ifs_save fi ## Compute the flags for well known compilers. Note that a user ## may override this settings by providing JAVACFLAGS - see be- ## low. case "${javac}" in jikes) javacflags="-nowarn -d ." javacflags="${javacflags} -sourcepath ${srcdir}" javacflags="${javacflags} -bootclasspath ${bootclasspath}" javacflags="${javacflags} -classpath ${classpath}" ;; javac) javacflags="-d ." javacflags="${javacflags} -sourcepath ${srcdir}" javacflags="${javacflags} -classpath ${classpath}" ;; gcj) javacflags="-d ." javacflags="${javacflags} -I${srcdir} -C" javacflags="${javacflags} -classpath ${classpath}" ;; *) javacflags="-d ." javacflags="${javacflags} -sourcepath ${srcdir}" javacflags="${javacflags} -classpath ${classpath}" ;; esac ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## **NO CHANGE NECESSARY BELOW THIS LINE - EXPERTS ONLY** ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## If specific flags have been configured then they overrule ## our precomputed flags. Still a user can override by using ## environment variable $JAVACFLAGS - see below. test -n "@JAVACFLAGS@" && { set x @JAVACFLAGS@ ; shift case $1 in +) shift JAVACFLAGS="${javacflags} $*" ;; -) shift javacflags="$* ${javacflags}" ;; =) shift javacflags="$*" ;; *) if test -z "$1" ; then javacflags="${javacflags}" else javacflags="$*" fi ;; esac } ## Regardless what has been configured, a user should always ## be able to override without the need to reconfigure or ## change this file. Therefore we check variable $JAVACFLAGS. ## In almost all cases the precomputed flags are just ok but ## some additional flags are needed. To support this in an ## easy way, we check for the very first value. If this val- ## ue is ## '+' -> append content of JAVACFLAGS to precomputed flags ## '-' -> prepend content -*- ## '=' -> do not use precomputed flags ## If none of these characters are given, the behaviour will ## be the same as if "=" would have been given. set x ${JAVACFLAGS} ; shift case $1 in +) shift JAVACFLAGS="${javacflags} $*" ;; -) shift JAVACFLAGS="$* ${javacflags}" ;; =) shift JAVACFLAGS="$*" ;; *) if test -z "$1" ; then JAVACFLAGS="${javacflags}" else JAVACFLAGS="$*" fi ;; esac ## Any special treatment goes here .. case "${javac}" in jikes) ;; javac) ;; gcj) ;; *) ;; esac ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This shall be the command to be excuted below ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## cmd="${JAVAC} ${JAVACFLAGS} ${ARGV}" ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## standard template to execute a command ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## case "${verbose}" in 0|no|nein|non) set x `echo $ARGV | wc` echo "*** compiling $3 Java file(s)" ;; *) echo CLASSPATH=${CLASSPATH} echo $cmd ;; esac $cmd || { rc=$? cat <> E R R O R << ============================================================ $cmd ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } exit 0 antlr-2.7.7/scripts/config.guess0000755000175000017500000012555210522211616016622 0ustar twernertwerner#! /bin/sh # Attempt to guess a canonical system name. # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, # 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. timestamp='2006-01-02' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA # 02110-1301, USA. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # Originally written by Per Bothner . # Please send patches to . Submit a context # diff and a properly formatted ChangeLog entry. # # This script attempts to guess a canonical system name similar to # config.sub. If it succeeds, it prints the system name on stdout, and # exits with 0. Otherwise, it exits with 1. # # The plan is that this can be called by configure scripts if you # don't specify an explicit build system type. me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] Output the configuration name of the system \`$me' is run on. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.guess ($timestamp) Originally written by Per Bothner. Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit ;; --version | -v ) echo "$version" ; exit ;; --help | --h* | -h ) echo "$usage"; exit ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" >&2 exit 1 ;; * ) break ;; esac done if test $# != 0; then echo "$me: too many arguments$help" >&2 exit 1 fi trap 'exit 1' 1 2 15 # CC_FOR_BUILD -- compiler used by this script. Note that the use of a # compiler to aid in system detection is discouraged as it requires # temporary files to be created and, as you can see below, it is a # headache to deal with in a portable fashion. # Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still # use `HOST_CC' if defined, but it is deprecated. # Portable tmp directory creation inspired by the Autoconf team. set_cc_for_build=' trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; : ${TMPDIR=/tmp} ; { tmp=`(umask 077 && mktemp -d -q "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; dummy=$tmp/dummy ; tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; case $CC_FOR_BUILD,$HOST_CC,$CC in ,,) echo "int x;" > $dummy.c ; for c in cc gcc c89 c99 ; do if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then CC_FOR_BUILD="$c"; break ; fi ; done ; if test x"$CC_FOR_BUILD" = x ; then CC_FOR_BUILD=no_compiler_found ; fi ;; ,,*) CC_FOR_BUILD=$CC ;; ,*,*) CC_FOR_BUILD=$HOST_CC ;; esac ; set_cc_for_build= ;' # This is needed to find uname on a Pyramid OSx when run in the BSD universe. # (ghazi@noc.rutgers.edu 1994-08-24) if (test -f /.attbin/uname) >/dev/null 2>&1 ; then PATH=$PATH:/.attbin ; export PATH fi UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown # Note: order is significant - the case branches are not exclusive. case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in *:NetBSD:*:*) # NetBSD (nbsd) targets should (where applicable) match one or # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*, # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently # switched to ELF, *-*-netbsd* would select the old # object file format. This provides both forward # compatibility and a consistent mechanism for selecting the # object file format. # # Note: NetBSD doesn't particularly care about the vendor # portion of the name. We always set it to "unknown". sysctl="sysctl -n hw.machine_arch" UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \ /usr/sbin/$sysctl 2>/dev/null || echo unknown)` case "${UNAME_MACHINE_ARCH}" in armeb) machine=armeb-unknown ;; arm*) machine=arm-unknown ;; sh3el) machine=shl-unknown ;; sh3eb) machine=sh-unknown ;; *) machine=${UNAME_MACHINE_ARCH}-unknown ;; esac # The Operating System including object format, if it has switched # to ELF recently, or will in the future. case "${UNAME_MACHINE_ARCH}" in arm*|i386|m68k|ns32k|sh3*|sparc|vax) eval $set_cc_for_build if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep __ELF__ >/dev/null then # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). # Return netbsd for either. FIX? os=netbsd else os=netbsdelf fi ;; *) os=netbsd ;; esac # The OS release # Debian GNU/NetBSD machines have a different userland, and # thus, need a distinct triplet. However, they do not need # kernel version information, so it can be replaced with a # suitable tag, in the style of linux-gnu. case "${UNAME_VERSION}" in Debian*) release='-gnu' ;; *) release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'` ;; esac # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: # contains redundant information, the shorter form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. echo "${machine}-${os}${release}" exit ;; *:OpenBSD:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} exit ;; *:ekkoBSD:*:*) echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} exit ;; *:SolidBSD:*:*) echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} exit ;; macppc:MirBSD:*:*) echo powerppc-unknown-mirbsd${UNAME_RELEASE} exit ;; *:MirBSD:*:*) echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} exit ;; alpha:OSF1:*:*) case $UNAME_RELEASE in *4.0) UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` ;; *5.*) UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` ;; esac # According to Compaq, /usr/sbin/psrinfo has been available on # OSF/1 and Tru64 systems produced since 1995. I hope that # covers most systems running today. This code pipes the CPU # types through head -n 1, so we only detect the type of CPU 0. ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` case "$ALPHA_CPU_TYPE" in "EV4 (21064)") UNAME_MACHINE="alpha" ;; "EV4.5 (21064)") UNAME_MACHINE="alpha" ;; "LCA4 (21066/21068)") UNAME_MACHINE="alpha" ;; "EV5 (21164)") UNAME_MACHINE="alphaev5" ;; "EV5.6 (21164A)") UNAME_MACHINE="alphaev56" ;; "EV5.6 (21164PC)") UNAME_MACHINE="alphapca56" ;; "EV5.7 (21164PC)") UNAME_MACHINE="alphapca57" ;; "EV6 (21264)") UNAME_MACHINE="alphaev6" ;; "EV6.7 (21264A)") UNAME_MACHINE="alphaev67" ;; "EV6.8CB (21264C)") UNAME_MACHINE="alphaev68" ;; "EV6.8AL (21264B)") UNAME_MACHINE="alphaev68" ;; "EV6.8CX (21264D)") UNAME_MACHINE="alphaev68" ;; "EV6.9A (21264/EV69A)") UNAME_MACHINE="alphaev69" ;; "EV7 (21364)") UNAME_MACHINE="alphaev7" ;; "EV7.9 (21364A)") UNAME_MACHINE="alphaev79" ;; esac # A Pn.n version is a patched version. # A Vn.n version is a released version. # A Tn.n version is a released field test version. # A Xn.n version is an unreleased experimental baselevel. # 1.2 uses "1.2" for uname -r. echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` exit ;; Alpha\ *:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # Should we change UNAME_MACHINE based on the output of uname instead # of the specific Alpha model? echo alpha-pc-interix exit ;; 21064:Windows_NT:50:3) echo alpha-dec-winnt3.5 exit ;; Amiga*:UNIX_System_V:4.0:*) echo m68k-unknown-sysv4 exit ;; *:[Aa]miga[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-amigaos exit ;; *:[Mm]orph[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-morphos exit ;; *:OS/390:*:*) echo i370-ibm-openedition exit ;; *:z/VM:*:*) echo s390-ibm-zvmoe exit ;; *:OS400:*:*) echo powerpc-ibm-os400 exit ;; arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) echo arm-acorn-riscix${UNAME_RELEASE} exit ;; arm:riscos:*:*|arm:RISCOS:*:*) echo arm-unknown-riscos exit ;; SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) echo hppa1.1-hitachi-hiuxmpp exit ;; Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. if test "`(/bin/universe) 2>/dev/null`" = att ; then echo pyramid-pyramid-sysv3 else echo pyramid-pyramid-bsd fi exit ;; NILE*:*:*:dcosx) echo pyramid-pyramid-svr4 exit ;; DRS?6000:unix:4.0:6*) echo sparc-icl-nx6 exit ;; DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) case `/usr/bin/uname -p` in sparc) echo sparc-icl-nx7; exit ;; esac ;; sun4H:SunOS:5.*:*) echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; i86pc:SunOS:5.*:*) echo i386-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:6*:*) # According to config.sub, this is the proper way to canonicalize # SunOS6. Hard to guess exactly what SunOS6 will be like, but # it's likely to be more like Solaris than SunOS4. echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:*:*) case "`/usr/bin/arch -k`" in Series*|S4*) UNAME_RELEASE=`uname -v` ;; esac # Japanese Language versions have a version number like `4.1.3-JL'. echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` exit ;; sun3*:SunOS:*:*) echo m68k-sun-sunos${UNAME_RELEASE} exit ;; sun*:*:4.2BSD:*) UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3 case "`/bin/arch`" in sun3) echo m68k-sun-sunos${UNAME_RELEASE} ;; sun4) echo sparc-sun-sunos${UNAME_RELEASE} ;; esac exit ;; aushp:SunOS:*:*) echo sparc-auspex-sunos${UNAME_RELEASE} exit ;; # The situation for MiNT is a little confusing. The machine name # can be virtually everything (everything which is not # "atarist" or "atariste" at least should have a processor # > m68000). The system name ranges from "MiNT" over "FreeMiNT" # to the lowercase version "mint" (or "freemint"). Finally # the system name "TOS" denotes a system which is actually not # MiNT. But MiNT is downward compatible to TOS, so this should # be no problem. atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) echo m68k-milan-mint${UNAME_RELEASE} exit ;; hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) echo m68k-hades-mint${UNAME_RELEASE} exit ;; *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) echo m68k-unknown-mint${UNAME_RELEASE} exit ;; m68k:machten:*:*) echo m68k-apple-machten${UNAME_RELEASE} exit ;; powerpc:machten:*:*) echo powerpc-apple-machten${UNAME_RELEASE} exit ;; RISC*:Mach:*:*) echo mips-dec-mach_bsd4.3 exit ;; RISC*:ULTRIX:*:*) echo mips-dec-ultrix${UNAME_RELEASE} exit ;; VAX*:ULTRIX*:*:*) echo vax-dec-ultrix${UNAME_RELEASE} exit ;; 2020:CLIX:*:* | 2430:CLIX:*:*) echo clipper-intergraph-clix${UNAME_RELEASE} exit ;; mips:*:*:UMIPS | mips:*:*:RISCos) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #ifdef __cplusplus #include /* for printf() prototype */ int main (int argc, char *argv[]) { #else int main (argc, argv) int argc; char *argv[]; { #endif #if defined (host_mips) && defined (MIPSEB) #if defined (SYSTYPE_SYSV) printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_SVR4) printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); #endif #endif exit (-1); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && SYSTEM_NAME=`$dummy $dummyarg` && { echo "$SYSTEM_NAME"; exit; } echo mips-mips-riscos${UNAME_RELEASE} exit ;; Motorola:PowerMAX_OS:*:*) echo powerpc-motorola-powermax exit ;; Motorola:*:4.3:PL8-*) echo powerpc-harris-powermax exit ;; Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) echo powerpc-harris-powermax exit ;; Night_Hawk:Power_UNIX:*:*) echo powerpc-harris-powerunix exit ;; m88k:CX/UX:7*:*) echo m88k-harris-cxux7 exit ;; m88k:*:4*:R4*) echo m88k-motorola-sysv4 exit ;; m88k:*:3*:R3*) echo m88k-motorola-sysv3 exit ;; AViiON:dgux:*:*) # DG/UX returns AViiON for all architectures UNAME_PROCESSOR=`/usr/bin/uname -p` if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] then if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ [ ${TARGET_BINARY_INTERFACE}x = x ] then echo m88k-dg-dgux${UNAME_RELEASE} else echo m88k-dg-dguxbcs${UNAME_RELEASE} fi else echo i586-dg-dgux${UNAME_RELEASE} fi exit ;; M88*:DolphinOS:*:*) # DolphinOS (SVR3) echo m88k-dolphin-sysv3 exit ;; M88*:*:R3*:*) # Delta 88k system running SVR3 echo m88k-motorola-sysv3 exit ;; XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) echo m88k-tektronix-sysv3 exit ;; Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) echo m68k-tektronix-bsd exit ;; *:IRIX*:*:*) echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` exit ;; ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' i*86:AIX:*:*) echo i386-ibm-aix exit ;; ia64:AIX:*:*) if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} exit ;; *:AIX:2:3) if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include main() { if (!__power_pc()) exit(1); puts("powerpc-ibm-aix3.2.5"); exit(0); } EOF if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` then echo "$SYSTEM_NAME" else echo rs6000-ibm-aix3.2.5 fi elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then echo rs6000-ibm-aix3.2.4 else echo rs6000-ibm-aix3.2 fi exit ;; *:AIX:*:[45]) IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then IBM_ARCH=rs6000 else IBM_ARCH=powerpc fi if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${IBM_ARCH}-ibm-aix${IBM_REV} exit ;; *:AIX:*:*) echo rs6000-ibm-aix exit ;; ibmrt:4.4BSD:*|romp-ibm:BSD:*) echo romp-ibm-bsd4.4 exit ;; ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to exit ;; # report: romp-ibm BSD 4.3 *:BOSX:*:*) echo rs6000-bull-bosx exit ;; DPX/2?00:B.O.S.:*:*) echo m68k-bull-sysv3 exit ;; 9000/[34]??:4.3bsd:1.*:*) echo m68k-hp-bsd exit ;; hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) echo m68k-hp-bsd4.4 exit ;; 9000/[34678]??:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` case "${UNAME_MACHINE}" in 9000/31? ) HP_ARCH=m68000 ;; 9000/[34]?? ) HP_ARCH=m68k ;; 9000/[678][0-9][0-9]) if [ -x /usr/bin/getconf ]; then sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` case "${sc_cpu_version}" in 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 532) # CPU_PA_RISC2_0 case "${sc_kernel_bits}" in 32) HP_ARCH="hppa2.0n" ;; 64) HP_ARCH="hppa2.0w" ;; '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20 esac ;; esac fi if [ "${HP_ARCH}" = "" ]; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #define _HPUX_SOURCE #include #include int main () { #if defined(_SC_KERNEL_BITS) long bits = sysconf(_SC_KERNEL_BITS); #endif long cpu = sysconf (_SC_CPU_VERSION); switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0"); break; case CPU_PA_RISC1_1: puts ("hppa1.1"); break; case CPU_PA_RISC2_0: #if defined(_SC_KERNEL_BITS) switch (bits) { case 64: puts ("hppa2.0w"); break; case 32: puts ("hppa2.0n"); break; default: puts ("hppa2.0"); break; } break; #else /* !defined(_SC_KERNEL_BITS) */ puts ("hppa2.0"); break; #endif default: puts ("hppa1.0"); break; } exit (0); } EOF (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` test -z "$HP_ARCH" && HP_ARCH=hppa fi ;; esac if [ ${HP_ARCH} = "hppa2.0w" ] then eval $set_cc_for_build # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler # generating 64-bit code. GNU and HP use different nomenclature: # # $ CC_FOR_BUILD=cc ./config.guess # => hppa2.0w-hp-hpux11.23 # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess # => hppa64-hp-hpux11.23 if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | grep __LP64__ >/dev/null then HP_ARCH="hppa2.0w" else HP_ARCH="hppa64" fi fi echo ${HP_ARCH}-hp-hpux${HPUX_REV} exit ;; ia64:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` echo ia64-hp-hpux${HPUX_REV} exit ;; 3050*:HI-UX:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include int main () { long cpu = sysconf (_SC_CPU_VERSION); /* The order matters, because CPU_IS_HP_MC68K erroneously returns true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct results, however. */ if (CPU_IS_PA_RISC (cpu)) { switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; default: puts ("hppa-hitachi-hiuxwe2"); break; } } else if (CPU_IS_HP_MC68K (cpu)) puts ("m68k-hitachi-hiuxwe2"); else puts ("unknown-hitachi-hiuxwe2"); exit (0); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && { echo "$SYSTEM_NAME"; exit; } echo unknown-hitachi-hiuxwe2 exit ;; 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) echo hppa1.1-hp-bsd exit ;; 9000/8??:4.3bsd:*:*) echo hppa1.0-hp-bsd exit ;; *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) echo hppa1.0-hp-mpeix exit ;; hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) echo hppa1.1-hp-osf exit ;; hp8??:OSF1:*:*) echo hppa1.0-hp-osf exit ;; i*86:OSF1:*:*) if [ -x /usr/sbin/sysversion ] ; then echo ${UNAME_MACHINE}-unknown-osf1mk else echo ${UNAME_MACHINE}-unknown-osf1 fi exit ;; parisc*:Lites*:*:*) echo hppa1.1-hp-lites exit ;; C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) echo c1-convex-bsd exit ;; C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi exit ;; C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) echo c34-convex-bsd exit ;; C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) echo c38-convex-bsd exit ;; C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) echo c4-convex-bsd exit ;; CRAY*Y-MP:*:*:*) echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*[A-Z]90:*:*:*) echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ -e 's/\.[^.]*$/.X/' exit ;; CRAY*TS:*:*:*) echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*T3E:*:*:*) echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*SV1:*:*:*) echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; *:UNICOS/mp:*:*) echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; 5000:UNIX_System_V:4.*:*) FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'` echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} exit ;; sparc*:BSD/OS:*:*) echo sparc-unknown-bsdi${UNAME_RELEASE} exit ;; *:BSD/OS:*:*) echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} exit ;; *:FreeBSD:*:*) case ${UNAME_MACHINE} in pc98) echo i386-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; *) echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; esac exit ;; i*:CYGWIN*:*) echo ${UNAME_MACHINE}-pc-cygwin exit ;; i*:MINGW*:*) echo ${UNAME_MACHINE}-pc-mingw32 exit ;; i*:windows32*:*) # uname -m includes "-pc" on this system. echo ${UNAME_MACHINE}-mingw32 exit ;; i*:PW*:*) echo ${UNAME_MACHINE}-pc-pw32 exit ;; x86:Interix*:[345]*) echo i586-pc-interix${UNAME_RELEASE}|sed -e 's/\..*//' exit ;; [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) echo i${UNAME_MACHINE}-pc-mks exit ;; i*:Windows_NT*:* | Pentium*:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we # UNAME_MACHINE based on the output of uname instead of i386? echo i586-pc-interix exit ;; i*:UWIN*:*) echo ${UNAME_MACHINE}-pc-uwin exit ;; amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) echo x86_64-unknown-cygwin exit ;; p*:CYGWIN*:*) echo powerpcle-unknown-cygwin exit ;; prep*:SunOS:5.*:*) echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; *:GNU:*:*) # the GNU system echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` exit ;; *:GNU/*:*:*) # other systems with GNU libc and userland echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu exit ;; i*86:Minix:*:*) echo ${UNAME_MACHINE}-pc-minix exit ;; arm*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; cris:Linux:*:*) echo cris-axis-linux-gnu exit ;; crisv32:Linux:*:*) echo crisv32-axis-linux-gnu exit ;; frv:Linux:*:*) echo frv-unknown-linux-gnu exit ;; ia64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; m32r*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; m68*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; mips:Linux:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #undef CPU #undef mips #undef mipsel #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) CPU=mipsel #else #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) CPU=mips #else CPU= #endif #endif EOF eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n '/^CPU/{s: ::g;p;}'`" test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } ;; mips64:Linux:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #undef CPU #undef mips64 #undef mips64el #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) CPU=mips64el #else #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) CPU=mips64 #else CPU= #endif #endif EOF eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n '/^CPU/{s: ::g;p;}'`" test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } ;; or32:Linux:*:*) echo or32-unknown-linux-gnu exit ;; ppc:Linux:*:*) echo powerpc-unknown-linux-gnu exit ;; ppc64:Linux:*:*) echo powerpc64-unknown-linux-gnu exit ;; alpha:Linux:*:*) case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in EV5) UNAME_MACHINE=alphaev5 ;; EV56) UNAME_MACHINE=alphaev56 ;; PCA56) UNAME_MACHINE=alphapca56 ;; PCA57) UNAME_MACHINE=alphapca56 ;; EV6) UNAME_MACHINE=alphaev6 ;; EV67) UNAME_MACHINE=alphaev67 ;; EV68*) UNAME_MACHINE=alphaev68 ;; esac objdump --private-headers /bin/sh | grep ld.so.1 >/dev/null if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC} exit ;; parisc:Linux:*:* | hppa:Linux:*:*) # Look for CPU level case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in PA7*) echo hppa1.1-unknown-linux-gnu ;; PA8*) echo hppa2.0-unknown-linux-gnu ;; *) echo hppa-unknown-linux-gnu ;; esac exit ;; parisc64:Linux:*:* | hppa64:Linux:*:*) echo hppa64-unknown-linux-gnu exit ;; s390:Linux:*:* | s390x:Linux:*:*) echo ${UNAME_MACHINE}-ibm-linux exit ;; sh64*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; sh*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; sparc:Linux:*:* | sparc64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; vax:Linux:*:*) echo ${UNAME_MACHINE}-dec-linux-gnu exit ;; x86_64:Linux:*:*) echo x86_64-unknown-linux-gnu exit ;; i*86:Linux:*:*) # The BFD linker knows what the default object file format is, so # first see if it will tell us. cd to the root directory to prevent # problems with other programs or directories called `ld' in the path. # Set LC_ALL=C to ensure ld outputs messages in English. ld_supported_targets=`cd /; LC_ALL=C ld --help 2>&1 \ | sed -ne '/supported targets:/!d s/[ ][ ]*/ /g s/.*supported targets: *// s/ .*// p'` case "$ld_supported_targets" in elf32-i386) TENTATIVE="${UNAME_MACHINE}-pc-linux-gnu" ;; a.out-i386-linux) echo "${UNAME_MACHINE}-pc-linux-gnuaout" exit ;; coff-i386) echo "${UNAME_MACHINE}-pc-linux-gnucoff" exit ;; "") # Either a pre-BFD a.out linker (linux-gnuoldld) or # one that does not give us useful --help. echo "${UNAME_MACHINE}-pc-linux-gnuoldld" exit ;; esac # Determine whether the default compiler is a.out or elf eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include #ifdef __ELF__ # ifdef __GLIBC__ # if __GLIBC__ >= 2 LIBC=gnu # else LIBC=gnulibc1 # endif # else LIBC=gnulibc1 # endif #else #if defined(__INTEL_COMPILER) || defined(__PGI) LIBC=gnu #else LIBC=gnuaout #endif #endif #ifdef __dietlibc__ LIBC=dietlibc #endif EOF eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n '/^LIBC/{s: ::g;p;}'`" test x"${LIBC}" != x && { echo "${UNAME_MACHINE}-pc-linux-${LIBC}" exit } test x"${TENTATIVE}" != x && { echo "${TENTATIVE}"; exit; } ;; i*86:DYNIX/ptx:4*:*) # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. # earlier versions are messed up and put the nodename in both # sysname and nodename. echo i386-sequent-sysv4 exit ;; i*86:UNIX_SV:4.2MP:2.*) # Unixware is an offshoot of SVR4, but it has its own version # number series starting with 2... # I am not positive that other SVR4 systems won't match this, # I just have to hope. -- rms. # Use sysv4.2uw... so that sysv4* matches it. echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} exit ;; i*86:OS/2:*:*) # If we were able to find `uname', then EMX Unix compatibility # is probably installed. echo ${UNAME_MACHINE}-pc-os2-emx exit ;; i*86:XTS-300:*:STOP) echo ${UNAME_MACHINE}-unknown-stop exit ;; i*86:atheos:*:*) echo ${UNAME_MACHINE}-unknown-atheos exit ;; i*86:syllable:*:*) echo ${UNAME_MACHINE}-pc-syllable exit ;; i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.0*:*) echo i386-unknown-lynxos${UNAME_RELEASE} exit ;; i*86:*DOS:*:*) echo ${UNAME_MACHINE}-pc-msdosdjgpp exit ;; i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} else echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} fi exit ;; i*86:*:5:[678]*) # UnixWare 7.x, OpenUNIX and OpenServer 6. case `/bin/uname -X | grep "^Machine"` in *486*) UNAME_MACHINE=i486 ;; *Pentium) UNAME_MACHINE=i586 ;; *Pent*|*Celeron) UNAME_MACHINE=i686 ;; esac echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} exit ;; i*86:*:3.2:*) if test -f /usr/options/cb.name; then UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ && UNAME_MACHINE=i586 (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ && UNAME_MACHINE=i686 (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ && UNAME_MACHINE=i686 echo ${UNAME_MACHINE}-pc-sco$UNAME_REL else echo ${UNAME_MACHINE}-pc-sysv32 fi exit ;; pc:*:*:*) # Left here for compatibility: # uname -m prints for DJGPP always 'pc', but it prints nothing about # the processor, so we play safe by assuming i386. echo i386-pc-msdosdjgpp exit ;; Intel:Mach:3*:*) echo i386-pc-mach3 exit ;; paragon:*:*:*) echo i860-intel-osf1 exit ;; i860:*:4.*:*) # i860-SVR4 if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 else # Add other i860-SVR4 vendors below as they are discovered. echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 fi exit ;; mini*:CTIX:SYS*5:*) # "miniframe" echo m68010-convergent-sysv exit ;; mc68k:UNIX:SYSTEM5:3.51m) echo m68k-convergent-sysv exit ;; M680?0:D-NIX:5.3:*) echo m68k-diab-dnix exit ;; M68*:*:R3V[5678]*:*) test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) OS_REL='' test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4; exit; } ;; m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) echo m68k-unknown-lynxos${UNAME_RELEASE} exit ;; mc68030:UNIX_System_V:4.*:*) echo m68k-atari-sysv4 exit ;; TSUNAMI:LynxOS:2.*:*) echo sparc-unknown-lynxos${UNAME_RELEASE} exit ;; rs6000:LynxOS:2.*:*) echo rs6000-unknown-lynxos${UNAME_RELEASE} exit ;; PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.0*:*) echo powerpc-unknown-lynxos${UNAME_RELEASE} exit ;; SM[BE]S:UNIX_SV:*:*) echo mips-dde-sysv${UNAME_RELEASE} exit ;; RM*:ReliantUNIX-*:*:*) echo mips-sni-sysv4 exit ;; RM*:SINIX-*:*:*) echo mips-sni-sysv4 exit ;; *:SINIX-*:*:*) if uname -p 2>/dev/null >/dev/null ; then UNAME_MACHINE=`(uname -p) 2>/dev/null` echo ${UNAME_MACHINE}-sni-sysv4 else echo ns32k-sni-sysv fi exit ;; PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort # says echo i586-unisys-sysv4 exit ;; *:UNIX_System_V:4*:FTX*) # From Gerald Hewes . # How about differentiating between stratus architectures? -djm echo hppa1.1-stratus-sysv4 exit ;; *:*:*:FTX*) # From seanf@swdc.stratus.com. echo i860-stratus-sysv4 exit ;; i*86:VOS:*:*) # From Paul.Green@stratus.com. echo ${UNAME_MACHINE}-stratus-vos exit ;; *:VOS:*:*) # From Paul.Green@stratus.com. echo hppa1.1-stratus-vos exit ;; mc68*:A/UX:*:*) echo m68k-apple-aux${UNAME_RELEASE} exit ;; news*:NEWS-OS:6*:*) echo mips-sony-newsos6 exit ;; R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) if [ -d /usr/nec ]; then echo mips-nec-sysv${UNAME_RELEASE} else echo mips-unknown-sysv${UNAME_RELEASE} fi exit ;; BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. echo powerpc-be-beos exit ;; BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. echo powerpc-apple-beos exit ;; BePC:BeOS:*:*) # BeOS running on Intel PC compatible. echo i586-pc-beos exit ;; SX-4:SUPER-UX:*:*) echo sx4-nec-superux${UNAME_RELEASE} exit ;; SX-5:SUPER-UX:*:*) echo sx5-nec-superux${UNAME_RELEASE} exit ;; SX-6:SUPER-UX:*:*) echo sx6-nec-superux${UNAME_RELEASE} exit ;; Power*:Rhapsody:*:*) echo powerpc-apple-rhapsody${UNAME_RELEASE} exit ;; *:Rhapsody:*:*) echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} exit ;; *:Darwin:*:*) UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown case $UNAME_PROCESSOR in unknown) UNAME_PROCESSOR=powerpc ;; esac echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} exit ;; *:procnto*:*:* | *:QNX:[0123456789]*:*) UNAME_PROCESSOR=`uname -p` if test "$UNAME_PROCESSOR" = "x86"; then UNAME_PROCESSOR=i386 UNAME_MACHINE=pc fi echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} exit ;; *:QNX:*:4*) echo i386-pc-qnx exit ;; NSE-?:NONSTOP_KERNEL:*:*) echo nse-tandem-nsk${UNAME_RELEASE} exit ;; NSR-?:NONSTOP_KERNEL:*:*) echo nsr-tandem-nsk${UNAME_RELEASE} exit ;; *:NonStop-UX:*:*) echo mips-compaq-nonstopux exit ;; BS2000:POSIX*:*:*) echo bs2000-siemens-sysv exit ;; DS/*:UNIX_System_V:*:*) echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} exit ;; *:Plan9:*:*) # "uname -m" is not consistent, so use $cputype instead. 386 # is converted to i386 for consistency with other x86 # operating systems. if test "$cputype" = "386"; then UNAME_MACHINE=i386 else UNAME_MACHINE="$cputype" fi echo ${UNAME_MACHINE}-unknown-plan9 exit ;; *:TOPS-10:*:*) echo pdp10-unknown-tops10 exit ;; *:TENEX:*:*) echo pdp10-unknown-tenex exit ;; KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) echo pdp10-dec-tops20 exit ;; XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) echo pdp10-xkl-tops20 exit ;; *:TOPS-20:*:*) echo pdp10-unknown-tops20 exit ;; *:ITS:*:*) echo pdp10-unknown-its exit ;; SEI:*:*:SEIUX) echo mips-sei-seiux${UNAME_RELEASE} exit ;; *:DragonFly:*:*) echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` exit ;; *:*VMS:*:*) UNAME_MACHINE=`(uname -p) 2>/dev/null` case "${UNAME_MACHINE}" in A*) echo alpha-dec-vms ; exit ;; I*) echo ia64-dec-vms ; exit ;; V*) echo vax-dec-vms ; exit ;; esac ;; *:XENIX:*:SysV) echo i386-pc-xenix exit ;; i*86:skyos:*:*) echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//' exit ;; i*86:rdos:*:*) echo ${UNAME_MACHINE}-pc-rdos exit ;; esac #echo '(No uname command or uname output not recognized.)' 1>&2 #echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2 eval $set_cc_for_build cat >$dummy.c < # include #endif main () { #if defined (sony) #if defined (MIPSEB) /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, I don't know.... */ printf ("mips-sony-bsd\n"); exit (0); #else #include printf ("m68k-sony-newsos%s\n", #ifdef NEWSOS4 "4" #else "" #endif ); exit (0); #endif #endif #if defined (__arm) && defined (__acorn) && defined (__unix) printf ("arm-acorn-riscix\n"); exit (0); #endif #if defined (hp300) && !defined (hpux) printf ("m68k-hp-bsd\n"); exit (0); #endif #if defined (NeXT) #if !defined (__ARCHITECTURE__) #define __ARCHITECTURE__ "m68k" #endif int version; version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`; if (version < 4) printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); else printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); exit (0); #endif #if defined (MULTIMAX) || defined (n16) #if defined (UMAXV) printf ("ns32k-encore-sysv\n"); exit (0); #else #if defined (CMU) printf ("ns32k-encore-mach\n"); exit (0); #else printf ("ns32k-encore-bsd\n"); exit (0); #endif #endif #endif #if defined (__386BSD__) printf ("i386-pc-bsd\n"); exit (0); #endif #if defined (sequent) #if defined (i386) printf ("i386-sequent-dynix\n"); exit (0); #endif #if defined (ns32000) printf ("ns32k-sequent-dynix\n"); exit (0); #endif #endif #if defined (_SEQUENT_) struct utsname un; uname(&un); if (strncmp(un.version, "V2", 2) == 0) { printf ("i386-sequent-ptx2\n"); exit (0); } if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ printf ("i386-sequent-ptx1\n"); exit (0); } printf ("i386-sequent-ptx\n"); exit (0); #endif #if defined (vax) # if !defined (ultrix) # include # if defined (BSD) # if BSD == 43 printf ("vax-dec-bsd4.3\n"); exit (0); # else # if BSD == 199006 printf ("vax-dec-bsd4.3reno\n"); exit (0); # else printf ("vax-dec-bsd\n"); exit (0); # endif # endif # else printf ("vax-dec-bsd\n"); exit (0); # endif # else printf ("vax-dec-ultrix\n"); exit (0); # endif #endif #if defined (alliant) && defined (i860) printf ("i860-alliant-bsd\n"); exit (0); #endif exit (1); } EOF $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` && { echo "$SYSTEM_NAME"; exit; } # Apollos put the system type in the environment. test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; } # Convex versions that predate uname can use getsysinfo(1) if [ -x /usr/convex/getsysinfo ] then case `getsysinfo -f cpu_type` in c1*) echo c1-convex-bsd exit ;; c2*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi exit ;; c34*) echo c34-convex-bsd exit ;; c38*) echo c38-convex-bsd exit ;; c4*) echo c4-convex-bsd exit ;; esac fi cat >&2 < in order to provide the needed information to handle your system. config.guess timestamp = $timestamp uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` /bin/uname -X = `(/bin/uname -X) 2>/dev/null` hostinfo = `(hostinfo) 2>/dev/null` /bin/universe = `(/bin/universe) 2>/dev/null` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` /bin/arch = `(/bin/arch) 2>/dev/null` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` UNAME_MACHINE = ${UNAME_MACHINE} UNAME_RELEASE = ${UNAME_RELEASE} UNAME_SYSTEM = ${UNAME_SYSTEM} UNAME_VERSION = ${UNAME_VERSION} EOF exit 1 # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: antlr-2.7.7/scripts/antlr.sh.in0000755000175000017500000000616310522211616016362 0ustar twernertwerner#!/bin/sh ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger. ## ## ## ## Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## test -z "${verbose}" && { verbose=@VERBOSE@ } ## If there's nothing to be done we exit gracefully. test -z "$1" && exit 0 ## This is the top build directory. abs_top_build_dir="@abs_this_builddir@" ## If $ANTLR_JAR is given as environment variable we are going ## to use it, otherwise we are using configured value. The value ## given by $ANTLR_JAR must be a valid file or directory - this ## will be checked. If not, an error gets reported. antlr_jar= test -n "${ANTLR_JAR}" && { antlr_jar="${ANTLR_JAR}" test -f "${antlr_jar}" -o -d "${antlr_jar}" || { cat <&2 cat <> E R R O R << ============================================================ CLASSPATH=$CLASSPATH $cmd $arg ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } done exit 0 antlr-2.7.7/scripts/pyinst.sh.in0000755000175000017500000000226410522211616016566 0ustar twernertwerner#!/usr/bin/env python import os,sys from distutils.core import setup ## This is where module 'antlr' resides in the installed package dstdir = '@prefix@/share/@PACKAGE_NAME@-@PACKAGE_VERSION@' ## This is where module 'antlr' resides in the source distribution' srcdir = '@abs_top_srcdir@/lib/python/antlr' moddir = None for d in [ dstdir, srcdir ] : moddir = d for f in [ '__init__.py','antlr.py' ] : if moddir : x = "%s/%s" % (moddir,f) if not os.path.exists(x) : sys.stderr.write('notice: "%s" does not exist - going to skip dir "%s")\n' % (x,moddir)) sys.stderr.flush() moddir = None if moddir: break if not moddir: sys.stderr.write('error: unable to find module "antlr".\n') sys.stderr.flush() sys.exit(1) else: sys.stderr.write('notice: module "antlr" found in "%s"\n' % (moddir)) setup(name="antlr", version="@PACKAGE_VERSION@", description="Python runtime support for ANTLR-generated parsers", author="Wolfgang Haefelinger / Marq Kole", author_email="ora.et.labora@web.de", url="http://www.antlr.org/", packages=['antlr'], package_dir={'antlr' : moddir } ) antlr-2.7.7/scripts/python.sh.in0000755000175000017500000000305010522211616016553 0ustar twernertwerner#!/bin/bash ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger. ## ## ## ## Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## test -z "${verbose}" && { verbose=@VERBOSE@ } cmd=@PYTHON@ PYTHONPATH=@abs_top_srcdir@/lib/python export PYTHONPATH ARGV="$*" ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This shall be the command to be excuted below ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## cmd="@PYTHON@ @PYTHONFLAGS@ ${ARGV}" ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## standard template to execute a command ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## case "${verbose}" in 0|no|nein|non) ;; *) echo PYTHONPATH=${PYTHONPATH} echo $cmd ;; esac $cmd || { rc=$? cat <> E R R O R << ============================================================ $cmd ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } exit 0 antlr-2.7.7/scripts/csc.sh.in0000755000175000017500000001427110522211616016011 0ustar twernertwerner#!/bin/sh ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger. ## ## ## ## Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## test -z "${verbose}" && { verbose=@VERBOSE@ } ## This script will be called to compile a list of C# files ## on all UNIX/Cygwin platforms. ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ## pre-set some variables required or useful to compile C# ## source files: ## srcdir shall contain absolute path to package directory. srcdir="@abs_top_srcdir@" ## objdir shall contain absolute path to this build directory. objdir="@abs_this_builddir@" ## variable javac contains the canonical java compiler name. ## At point of writing known names are csc. csc="@csc@" antlr_net="@ANTLR_NET@" ## When on cygwin we translage paths into DOS notation as ## csc appears not to understand mixed notation. case @build_os@ in cygwin) test -n "$1" && { ARGV="`cygpath -w $*`" } test -n "${srcdir}" && { srcdir="`cygpath -w ${srcdir}`" } test -n "${objdir}" && { objdir="`cygpath -w ${objdir}`" } test -n "${antlr_net}" && { antlr_net="`cygpath -w ${antlr_net}`" } ;; mingw*) cygpathw () { test -n "$1" && { d=`dirname "$1"` b=`basename "$1"` d=`cd $d && pwd -W` d=`echo "$d" | sed -e 's:/:\\\:g'` echo "$d\\$b" } } ARGV="" for x in $* ; do x=`cygpathw "$x"` ARGV="$ARGV $x" done test -n "${srcdir}" && { srcdir="`cygpathw ${srcdir}`" } test -n "${objdir}" && { objdir="`cygpathw ${objdir}`" } test -n "${antlr_net}" && { antlr_net="`cygpathw ${antlr_net}`" } ;; *) ARGV="$*" ;; esac ## The very first argument shall alway tell us what we are ## going to build. We support here either a DLL, or an EXE. set x ${ARGV} case "$2" in *.dll|*.DLL) TARGET="$2"; shift; shift ;; *.exe|*.EXE) TARGET="$2"; shift; shift ;; *.cs|*.CS) TARGET="main.exe" ; shift ;; *) cat < append content of CSHARPCFLAGS to precomputed flags ## '-' -> prepend content -*- ## '=' -> do not use precomputed flags ## If none of these characters are given, the behaviour will ## be the same as if "=" would have been given. set x ${CSHARPCFLAGS} ; shift case $1 in +) shift CSHARPCFLAGS="${csharpcflags} $*" ;; -) shift CSHARPCFLAGS="$* ${csharpcflags}" ;; =) shift CSHARPCFLAGS="$*" ;; *) if test -z "$1" ; then CSHARPCFLAGS="${csharpcflags}" else CSHARPCFLAGS="$*" fi ;; esac ## Any special treatment goes here .. case "${csharpc}" in csc) ;; *) ;; esac ## go ahead .. cmd="${CSHARPC} ${CSHARPCFLAGS}" ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## standard template to execute a command ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## case "${verbose}" in 0|no|nein|non) set x `echo $ARGV | wc` case $3 in 1) files="file" ;; *) files="files";; esac echo "*** compiling $3 C# ${files}" ;; *) echo $cmd ;; esac $cmd ${ARGV} || { rc=$? cat <> E R R O R << ============================================================ $cmd [$3 file(s) skipped] ============================================================ Got an error while trying to execute command above. Error messages (if any) must have shown before. The exit code was: exit($rc) xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx EOF exit $rc } exit 0 antlr-2.7.7/scripts/run-antlr.in0000755000175000017500000000252710522211616016553 0ustar twernertwerner#!/bin/sh ## helper utilities .. INSTALL="@INSTALL@" MKDIR="@MKDIR@" RM="@RM@" TAR="@TAR@" TOUCH="@TOUCH@" CHMOD="@CHMOD@" SED="@SED@" GREP="@GREP@" CAT="@CAT@" CHMOD="@CHMOD@" # usual dribble prefix="@prefix@" exec_prefix="@exec_prefix@" program_transform_name="@program_transform_name@" bindir="@bindir@" sbindir="@sbindir@" libexecdir="@libexecdir@" datadir="@datadir@" sysconfdir="@sysconfdir@" sharedstatedir="@sharedstatedir@" localstatedir="@localstatedir@" libdir="@libdir@" includedir="@includedir@" oldincludedir="@oldincludedir@" infodir="@infodir@" mandir="@mandir@" build_alias="@build_alias@" host_alias="@host_alias@" target_alias="@target_alias@" build="@build@" build_cpu="@build_cpu@" build_vendor="@build_vendor@" build_os="@build_os@" host="@host@" host_cpu="@host_cpu@" host_vendor="@host_vendor@" host_os="@host_os@" just_make="@just_make@" # Version stuff... VERSION="@VERSION@" SUBVERSION="@SUBVERSION@" PATCHLEVEL="@PATCHLEVEL@" PACKAGE_NAME="@PACKAGE_NAME@" PACKAGE_VERSION="@PACKAGE_VERSION@" versioneddir="@PACKAGE_NAME@-@PACKAGE_VERSION@" ANTLR_JAR="@libdir@/antlr.jar" case @build_os@ in cygwin|mingw|msys) ANTLR_JAR="`cygpath -m $ANTLR_JAR`" ;; *) ;; esac if test "x$CLASSPATH" = x; then CLASSPATH="${ANTLR_JAR}" else CLASSPATH="${ANTLR_JAR}@PATH_SEPARATOR@${CLASSPATH}" fi export CLASSPATH @JAVA@ antlr.Tool $* antlr-2.7.7/scripts/antlr.spec.in0000644000175000017500000001070110522211616016670 0ustar twernertwernerSummary: The ANTLR Parser Framework Name: antlr Version: @PACKAGE_VERSION@ Release: 1 License: Public Domain Group: Development/Tools Source: http://www.antlr.org/download/antlr-%{version}.tar.gz URL: http://www.antlr.org %description ANTLR, ANother Tool for Language Recognition, (formerly PCCTS) is a language tool that provides a framework for constructing recognizers, compilers, and translators using Java, C#, C++ or Python. ANTLR provides excellent support for tree construction, tree walking, and translation. %prep %setup -q %build %configure --disable-examples make %install rm -rf $RPM_BUILD_ROOT make install %files %defattr(-,root,root) /usr/bin/antlr /usr/bin/antlr-config /usr/sbin/pyantlr.sh /usr/include/antlr/ANTLRException.hpp /usr/include/antlr/ANTLRUtil.hpp /usr/include/antlr/ASTArray.hpp /usr/include/antlr/ASTFactory.hpp /usr/include/antlr/AST.hpp /usr/include/antlr/ASTNULLType.hpp /usr/include/antlr/ASTPair.hpp /usr/include/antlr/ASTRefCount.hpp /usr/include/antlr/BaseAST.hpp /usr/include/antlr/BitSet.hpp /usr/include/antlr/CharBuffer.hpp /usr/include/antlr/CharInputBuffer.hpp /usr/include/antlr/CharScanner.hpp /usr/include/antlr/CharStreamException.hpp /usr/include/antlr/CharStreamIOException.hpp /usr/include/antlr/CircularQueue.hpp /usr/include/antlr/CommonAST.hpp /usr/include/antlr/CommonASTWithHiddenTokens.hpp /usr/include/antlr/CommonHiddenStreamToken.hpp /usr/include/antlr/CommonToken.hpp /usr/include/antlr/config.hpp /usr/include/antlr/InputBuffer.hpp /usr/include/antlr/IOException.hpp /usr/include/antlr/LexerSharedInputState.hpp /usr/include/antlr/LLkParser.hpp /usr/include/antlr/MismatchedCharException.hpp /usr/include/antlr/MismatchedTokenException.hpp /usr/include/antlr/NoViableAltException.hpp /usr/include/antlr/NoViableAltForCharException.hpp /usr/include/antlr/Parser.hpp /usr/include/antlr/ParserSharedInputState.hpp /usr/include/antlr/RecognitionException.hpp /usr/include/antlr/RefCount.hpp /usr/include/antlr/SemanticException.hpp /usr/include/antlr/String.hpp /usr/include/antlr/TokenBuffer.hpp /usr/include/antlr/Token.hpp /usr/include/antlr/TokenStreamBasicFilter.hpp /usr/include/antlr/TokenStreamException.hpp /usr/include/antlr/TokenStreamHiddenTokenFilter.hpp /usr/include/antlr/TokenStream.hpp /usr/include/antlr/TokenStreamIOException.hpp /usr/include/antlr/TokenStreamRecognitionException.hpp /usr/include/antlr/TokenStreamRetryException.hpp /usr/include/antlr/TokenStreamRewriteEngine.hpp /usr/include/antlr/TokenStreamSelector.hpp /usr/include/antlr/TokenWithIndex.hpp /usr/include/antlr/TreeParser.hpp /usr/include/antlr/TreeParserSharedInputState.hpp /usr/lib/antlr.jar /usr/lib/libantlr.a /usr/lib/antlr.py /usr/share/@PACKAGE_VERSION@/antlr.jar /usr/share/@PACKAGE_VERSION@/__init__.py /usr/share/@PACKAGE_VERSION@/antlr.py /usr/share/@PACKAGE_VERSION@/antlr-mode.el /usr/share/@PACKAGE_VERSION@/antlr-jedit.xml /usr/share/doc/@PACKAGE_VERSION@/closure.gif /usr/share/doc/@PACKAGE_VERSION@/cpp-runtime.html /usr/share/doc/@PACKAGE_VERSION@/csharp-runtime.html /usr/share/doc/@PACKAGE_VERSION@/err.html /usr/share/doc/@PACKAGE_VERSION@/glossary.html /usr/share/doc/@PACKAGE_VERSION@/hidden.stream.gif /usr/share/doc/@PACKAGE_VERSION@/index.html /usr/share/doc/@PACKAGE_VERSION@/inheritance.html /usr/share/doc/@PACKAGE_VERSION@/j-guru-blue.jpg /usr/share/doc/@PACKAGE_VERSION@/jguru-logo.gif /usr/share/doc/@PACKAGE_VERSION@/lexer.html /usr/share/doc/@PACKAGE_VERSION@/lexer.to.parser.tokens.gif /usr/share/doc/@PACKAGE_VERSION@/logo.gif /usr/share/doc/@PACKAGE_VERSION@/metalang.html /usr/share/doc/@PACKAGE_VERSION@/optional.gif /usr/share/doc/@PACKAGE_VERSION@/options.html /usr/share/doc/@PACKAGE_VERSION@/posclosure.gif /usr/share/doc/@PACKAGE_VERSION@/python-runtime.html /usr/share/doc/@PACKAGE_VERSION@/runtime.html /usr/share/doc/@PACKAGE_VERSION@/sor.html /usr/share/doc/@PACKAGE_VERSION@/stream.perspectives.gif /usr/share/doc/@PACKAGE_VERSION@/stream.selector.gif /usr/share/doc/@PACKAGE_VERSION@/streams.html /usr/share/doc/@PACKAGE_VERSION@/stream.splitter.gif /usr/share/doc/@PACKAGE_VERSION@/subrule.gif /usr/share/doc/@PACKAGE_VERSION@/trees.html /usr/share/doc/@PACKAGE_VERSION@/vocab.html /usr/share/doc/@PACKAGE_VERSION@/LICENSE.txt /usr/share/doc/@PACKAGE_VERSION@/README.txt /usr/share/doc/@PACKAGE_VERSION@/INSTALL.txt %clean %changelog * Tue Jan 11 2005 Wolfgang Haefelinger Build RPM on Mandrake 1o * Thu Aug 21 2003 Ric Klaren - First stab at RPM for RH9 antlr-2.7.7/scripts/antlr-config.in0000755000175000017500000000233710522211616017213 0ustar twernertwerner#!/bin/sh # @configure_input@ prefix=@prefix@ exec_prefix=@exec_prefix@ datadir=@datadir@ libdir=@libdir@ includedir=@includedir@ usage() { cat <&2 fi includes="" libs="" while test $# -gt 0; do case "$1" in -*=*) optarg=`echo "$1" | sed 's/[-_a-zA-Z0-9]*=//'` ;; *) optarg= ;; esac case $1 in --prefix) echo_prefix=yes ;; --exec-prefix) echo_exec_prefix=yes ;; --version) echo @PACKAGE_VERSION@ exit 0 ;; --cflags|-cxxflags) includes="-I${includedir}" echo_cflags="yes" ;; --libs) case @cxx@ in cl|bcc32) libs="${libdir}/antlr.lib" ;; *) libs="${libdir}/libantlr.a" ;; esac echo_libs=yes ;; *) usage 1 1>&2 ;; esac shift done if test "$echo_prefix" = "yes"; then echo $prefix fi if test "$echo_exec_prefix" = "yes"; then echo $exec_prefix fi if test "$echo_cflags" = "yes"; then echo $includes fi if test "$echo_libs" = "yes"; then echo "${libs}" fi exit 0 antlr-2.7.7/scripts/config.sub0000755000175000017500000007706010522211616016265 0ustar twernertwerner#! /bin/sh # Configuration validation subroutine script. # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, # 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. timestamp='2006-01-02' # This file is (in principle) common to ALL GNU software. # The presence of a machine in this file suggests that SOME GNU software # can handle that machine. It does not imply ALL GNU software can. # # This file is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA # 02110-1301, USA. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # Please send patches to . Submit a context # diff and a properly formatted ChangeLog entry. # # Configuration subroutine to validate and canonicalize a configuration type. # Supply the specified configuration type as an argument. # If it is invalid, we print an error message on stderr and exit with code 1. # Otherwise, we print the canonical config type on stdout and succeed. # This file is supposed to be the same for all GNU packages # and recognize all the CPU types, system types and aliases # that are meaningful with *any* GNU software. # Each package is responsible for reporting which valid configurations # it does not support. The user should be able to distinguish # a failure to support a valid configuration from a meaningless # configuration. # The goal of this file is to map all the various variations of a given # machine specification into a single specification in the form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM # or in some cases, the newer four-part form: # CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM # It is wrong to echo any other type of specification. me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] CPU-MFR-OPSYS $0 [OPTION] ALIAS Canonicalize a configuration name. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.sub ($timestamp) Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit ;; --version | -v ) echo "$version" ; exit ;; --help | --h* | -h ) echo "$usage"; exit ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" exit 1 ;; *local*) # First pass through any local machine types. echo $1 exit ;; * ) break ;; esac done case $# in 0) echo "$me: missing argument$help" >&2 exit 1;; 1) ;; *) echo "$me: too many arguments$help" >&2 exit 1;; esac # Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). # Here we must recognize all the valid KERNEL-OS combinations. maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` case $maybe_os in nto-qnx* | linux-gnu* | linux-dietlibc | linux-newlib* | linux-uclibc* | \ uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* | \ storm-chaos* | os2-emx* | rtmk-nova*) os=-$maybe_os basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` ;; *) basic_machine=`echo $1 | sed 's/-[^-]*$//'` if [ $basic_machine != $1 ] then os=`echo $1 | sed 's/.*-/-/'` else os=; fi ;; esac ### Let's recognize common machines as not being operating systems so ### that things like config.sub decstation-3100 work. We also ### recognize some manufacturers as not being operating systems, so we ### can provide default operating systems below. case $os in -sun*os*) # Prevent following clause from handling this invalid input. ;; -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ -apple | -axis | -knuth | -cray) os= basic_machine=$1 ;; -sim | -cisco | -oki | -wec | -winbond) os= basic_machine=$1 ;; -scout) ;; -wrs) os=-vxworks basic_machine=$1 ;; -chorusos*) os=-chorusos basic_machine=$1 ;; -chorusrdb) os=-chorusrdb basic_machine=$1 ;; -hiux*) os=-hiuxwe2 ;; -sco6) os=-sco5v6 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco5) os=-sco3.2v5 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco4) os=-sco3.2v4 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2.[4-9]*) os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2v[4-9]*) # Don't forget version if it is 3.2v4 or newer. basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco5v6*) # Don't forget version if it is 3.2v4 or newer. basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco*) os=-sco3.2v2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -udk*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -isc) os=-isc2.2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -clix*) basic_machine=clipper-intergraph ;; -isc*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -lynx*) os=-lynxos ;; -ptx*) basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` ;; -windowsnt*) os=`echo $os | sed -e 's/windowsnt/winnt/'` ;; -psos*) os=-psos ;; -mint | -mint[0-9]*) basic_machine=m68k-atari os=-mint ;; esac # Decode aliases for certain CPU-COMPANY combinations. case $basic_machine in # Recognize the basic CPU types without company name. # Some are omitted here because they have special meanings below. 1750a | 580 \ | a29k \ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ | am33_2.0 \ | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr \ | bfin \ | c4x | clipper \ | d10v | d30v | dlx | dsp16xx \ | fr30 | frv \ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ | i370 | i860 | i960 | ia64 \ | ip2k | iq2000 \ | m32r | m32rle | m68000 | m68k | m88k | maxq | mb | microblaze | mcore \ | mips | mipsbe | mipseb | mipsel | mipsle \ | mips16 \ | mips64 | mips64el \ | mips64vr | mips64vrel \ | mips64orion | mips64orionel \ | mips64vr4100 | mips64vr4100el \ | mips64vr4300 | mips64vr4300el \ | mips64vr5000 | mips64vr5000el \ | mips64vr5900 | mips64vr5900el \ | mipsisa32 | mipsisa32el \ | mipsisa32r2 | mipsisa32r2el \ | mipsisa64 | mipsisa64el \ | mipsisa64r2 | mipsisa64r2el \ | mipsisa64sb1 | mipsisa64sb1el \ | mipsisa64sr71k | mipsisa64sr71kel \ | mipstx39 | mipstx39el \ | mn10200 | mn10300 \ | mt \ | msp430 \ | ns16k | ns32k \ | or32 \ | pdp10 | pdp11 | pj | pjl \ | powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \ | pyramid \ | sh | sh[1234] | sh[24]a | sh[23]e | sh[34]eb | shbe | shle | sh[1234]le | sh3ele \ | sh64 | sh64le \ | sparc | sparc64 | sparc64b | sparc86x | sparclet | sparclite \ | sparcv8 | sparcv9 | sparcv9b \ | strongarm \ | tahoe | thumb | tic4x | tic80 | tron \ | v850 | v850e \ | we32k \ | x86 | xscale | xscalee[bl] | xstormy16 | xtensa \ | z8k) basic_machine=$basic_machine-unknown ;; m32c) basic_machine=$basic_machine-unknown ;; m6811 | m68hc11 | m6812 | m68hc12) # Motorola 68HC11/12. basic_machine=$basic_machine-unknown os=-none ;; m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) ;; ms1) basic_machine=mt-unknown ;; # We use `pc' rather than `unknown' # because (1) that's what they normally are, and # (2) the word "unknown" tends to confuse beginning users. i*86 | x86_64) basic_machine=$basic_machine-pc ;; # Object if more than one company name word. *-*-*) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; # Recognize the basic CPU types with company name. 580-* \ | a29k-* \ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ | avr-* \ | bfin-* | bs2000-* \ | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \ | clipper-* | craynv-* | cydra-* \ | d10v-* | d30v-* | dlx-* \ | elxsi-* \ | f30[01]-* | f700-* | fr30-* | frv-* | fx80-* \ | h8300-* | h8500-* \ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ | i*86-* | i860-* | i960-* | ia64-* \ | ip2k-* | iq2000-* \ | m32r-* | m32rle-* \ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ | m88110-* | m88k-* | maxq-* | mcore-* \ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ | mips16-* \ | mips64-* | mips64el-* \ | mips64vr-* | mips64vrel-* \ | mips64orion-* | mips64orionel-* \ | mips64vr4100-* | mips64vr4100el-* \ | mips64vr4300-* | mips64vr4300el-* \ | mips64vr5000-* | mips64vr5000el-* \ | mips64vr5900-* | mips64vr5900el-* \ | mipsisa32-* | mipsisa32el-* \ | mipsisa32r2-* | mipsisa32r2el-* \ | mipsisa64-* | mipsisa64el-* \ | mipsisa64r2-* | mipsisa64r2el-* \ | mipsisa64sb1-* | mipsisa64sb1el-* \ | mipsisa64sr71k-* | mipsisa64sr71kel-* \ | mipstx39-* | mipstx39el-* \ | mmix-* \ | mt-* \ | msp430-* \ | none-* | np1-* | ns16k-* | ns32k-* \ | orion-* \ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \ | pyramid-* \ | romp-* | rs6000-* \ | sh-* | sh[1234]-* | sh[24]a-* | sh[23]e-* | sh[34]eb-* | shbe-* \ | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ | sparc-* | sparc64-* | sparc64b-* | sparc86x-* | sparclet-* \ | sparclite-* \ | sparcv8-* | sparcv9-* | sparcv9b-* | strongarm-* | sv1-* | sx?-* \ | tahoe-* | thumb-* \ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ | tron-* \ | v850-* | v850e-* | vax-* \ | we32k-* \ | x86-* | x86_64-* | xps100-* | xscale-* | xscalee[bl]-* \ | xstormy16-* | xtensa-* \ | ymp-* \ | z8k-*) ;; m32c-*) ;; # Recognize the various machine names and aliases which stand # for a CPU type and a company and sometimes even an OS. 386bsd) basic_machine=i386-unknown os=-bsd ;; 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) basic_machine=m68000-att ;; 3b*) basic_machine=we32k-att ;; a29khif) basic_machine=a29k-amd os=-udi ;; abacus) basic_machine=abacus-unknown ;; adobe68k) basic_machine=m68010-adobe os=-scout ;; alliant | fx80) basic_machine=fx80-alliant ;; altos | altos3068) basic_machine=m68k-altos ;; am29k) basic_machine=a29k-none os=-bsd ;; amd64) basic_machine=x86_64-pc ;; amd64-*) basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; amdahl) basic_machine=580-amdahl os=-sysv ;; amiga | amiga-*) basic_machine=m68k-unknown ;; amigaos | amigados) basic_machine=m68k-unknown os=-amigaos ;; amigaunix | amix) basic_machine=m68k-unknown os=-sysv4 ;; apollo68) basic_machine=m68k-apollo os=-sysv ;; apollo68bsd) basic_machine=m68k-apollo os=-bsd ;; aux) basic_machine=m68k-apple os=-aux ;; balance) basic_machine=ns32k-sequent os=-dynix ;; c90) basic_machine=c90-cray os=-unicos ;; convex-c1) basic_machine=c1-convex os=-bsd ;; convex-c2) basic_machine=c2-convex os=-bsd ;; convex-c32) basic_machine=c32-convex os=-bsd ;; convex-c34) basic_machine=c34-convex os=-bsd ;; convex-c38) basic_machine=c38-convex os=-bsd ;; cray | j90) basic_machine=j90-cray os=-unicos ;; craynv) basic_machine=craynv-cray os=-unicosmp ;; cr16c) basic_machine=cr16c-unknown os=-elf ;; crds | unos) basic_machine=m68k-crds ;; crisv32 | crisv32-* | etraxfs*) basic_machine=crisv32-axis ;; cris | cris-* | etrax*) basic_machine=cris-axis ;; crx) basic_machine=crx-unknown os=-elf ;; da30 | da30-*) basic_machine=m68k-da30 ;; decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) basic_machine=mips-dec ;; decsystem10* | dec10*) basic_machine=pdp10-dec os=-tops10 ;; decsystem20* | dec20*) basic_machine=pdp10-dec os=-tops20 ;; delta | 3300 | motorola-3300 | motorola-delta \ | 3300-motorola | delta-motorola) basic_machine=m68k-motorola ;; delta88) basic_machine=m88k-motorola os=-sysv3 ;; djgpp) basic_machine=i586-pc os=-msdosdjgpp ;; dpx20 | dpx20-*) basic_machine=rs6000-bull os=-bosx ;; dpx2* | dpx2*-bull) basic_machine=m68k-bull os=-sysv3 ;; ebmon29k) basic_machine=a29k-amd os=-ebmon ;; elxsi) basic_machine=elxsi-elxsi os=-bsd ;; encore | umax | mmax) basic_machine=ns32k-encore ;; es1800 | OSE68k | ose68k | ose | OSE) basic_machine=m68k-ericsson os=-ose ;; fx2800) basic_machine=i860-alliant ;; genix) basic_machine=ns32k-ns ;; gmicro) basic_machine=tron-gmicro os=-sysv ;; go32) basic_machine=i386-pc os=-go32 ;; h3050r* | hiux*) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; h8300hms) basic_machine=h8300-hitachi os=-hms ;; h8300xray) basic_machine=h8300-hitachi os=-xray ;; h8500hms) basic_machine=h8500-hitachi os=-hms ;; harris) basic_machine=m88k-harris os=-sysv3 ;; hp300-*) basic_machine=m68k-hp ;; hp300bsd) basic_machine=m68k-hp os=-bsd ;; hp300hpux) basic_machine=m68k-hp os=-hpux ;; hp3k9[0-9][0-9] | hp9[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k2[0-9][0-9] | hp9k31[0-9]) basic_machine=m68000-hp ;; hp9k3[2-9][0-9]) basic_machine=m68k-hp ;; hp9k6[0-9][0-9] | hp6[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k7[0-79][0-9] | hp7[0-79][0-9]) basic_machine=hppa1.1-hp ;; hp9k78[0-9] | hp78[0-9]) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[0-9][13679] | hp8[0-9][13679]) basic_machine=hppa1.1-hp ;; hp9k8[0-9][0-9] | hp8[0-9][0-9]) basic_machine=hppa1.0-hp ;; hppa-next) os=-nextstep3 ;; hppaosf) basic_machine=hppa1.1-hp os=-osf ;; hppro) basic_machine=hppa1.1-hp os=-proelf ;; i370-ibm* | ibm*) basic_machine=i370-ibm ;; # I'm not sure what "Sysv32" means. Should this be sysv3.2? i*86v32) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv32 ;; i*86v4*) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv4 ;; i*86v) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv ;; i*86sol2) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-solaris2 ;; i386mach) basic_machine=i386-mach os=-mach ;; i386-vsta | vsta) basic_machine=i386-unknown os=-vsta ;; iris | iris4d) basic_machine=mips-sgi case $os in -irix*) ;; *) os=-irix4 ;; esac ;; isi68 | isi) basic_machine=m68k-isi os=-sysv ;; m88k-omron*) basic_machine=m88k-omron ;; magnum | m3230) basic_machine=mips-mips os=-sysv ;; merlin) basic_machine=ns32k-utek os=-sysv ;; mingw32) basic_machine=i386-pc os=-mingw32 ;; miniframe) basic_machine=m68000-convergent ;; *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) basic_machine=m68k-atari os=-mint ;; mips3*-*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` ;; mips3*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown ;; monitor) basic_machine=m68k-rom68k os=-coff ;; morphos) basic_machine=powerpc-unknown os=-morphos ;; msdos) basic_machine=i386-pc os=-msdos ;; ms1-*) basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` ;; mvs) basic_machine=i370-ibm os=-mvs ;; ncr3000) basic_machine=i486-ncr os=-sysv4 ;; netbsd386) basic_machine=i386-unknown os=-netbsd ;; netwinder) basic_machine=armv4l-rebel os=-linux ;; news | news700 | news800 | news900) basic_machine=m68k-sony os=-newsos ;; news1000) basic_machine=m68030-sony os=-newsos ;; news-3600 | risc-news) basic_machine=mips-sony os=-newsos ;; necv70) basic_machine=v70-nec os=-sysv ;; next | m*-next ) basic_machine=m68k-next case $os in -nextstep* ) ;; -ns2*) os=-nextstep2 ;; *) os=-nextstep3 ;; esac ;; nh3000) basic_machine=m68k-harris os=-cxux ;; nh[45]000) basic_machine=m88k-harris os=-cxux ;; nindy960) basic_machine=i960-intel os=-nindy ;; mon960) basic_machine=i960-intel os=-mon960 ;; nonstopux) basic_machine=mips-compaq os=-nonstopux ;; np1) basic_machine=np1-gould ;; nsr-tandem) basic_machine=nsr-tandem ;; op50n-* | op60c-*) basic_machine=hppa1.1-oki os=-proelf ;; openrisc | openrisc-*) basic_machine=or32-unknown ;; os400) basic_machine=powerpc-ibm os=-os400 ;; OSE68000 | ose68000) basic_machine=m68000-ericsson os=-ose ;; os68k) basic_machine=m68k-none os=-os68k ;; pa-hitachi) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; paragon) basic_machine=i860-intel os=-osf ;; pbd) basic_machine=sparc-tti ;; pbb) basic_machine=m68k-tti ;; pc532 | pc532-*) basic_machine=ns32k-pc532 ;; pc98) basic_machine=i386-pc ;; pc98-*) basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentium | p5 | k5 | k6 | nexgen | viac3) basic_machine=i586-pc ;; pentiumpro | p6 | 6x86 | athlon | athlon_*) basic_machine=i686-pc ;; pentiumii | pentium2 | pentiumiii | pentium3) basic_machine=i686-pc ;; pentium4) basic_machine=i786-pc ;; pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumpro-* | p6-* | 6x86-* | athlon-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentium4-*) basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pn) basic_machine=pn-gould ;; power) basic_machine=power-ibm ;; ppc) basic_machine=powerpc-unknown ;; ppc-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppcle | powerpclittle | ppc-le | powerpc-little) basic_machine=powerpcle-unknown ;; ppcle-* | powerpclittle-*) basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64) basic_machine=powerpc64-unknown ;; ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64le | powerpc64little | ppc64-le | powerpc64-little) basic_machine=powerpc64le-unknown ;; ppc64le-* | powerpc64little-*) basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ps2) basic_machine=i386-ibm ;; pw32) basic_machine=i586-unknown os=-pw32 ;; rdos) basic_machine=i386-pc os=-rdos ;; rom68k) basic_machine=m68k-rom68k os=-coff ;; rm[46]00) basic_machine=mips-siemens ;; rtpc | rtpc-*) basic_machine=romp-ibm ;; s390 | s390-*) basic_machine=s390-ibm ;; s390x | s390x-*) basic_machine=s390x-ibm ;; sa29200) basic_machine=a29k-amd os=-udi ;; sb1) basic_machine=mipsisa64sb1-unknown ;; sb1el) basic_machine=mipsisa64sb1el-unknown ;; sei) basic_machine=mips-sei os=-seiux ;; sequent) basic_machine=i386-sequent ;; sh) basic_machine=sh-hitachi os=-hms ;; sh64) basic_machine=sh64-unknown ;; sparclite-wrs | simso-wrs) basic_machine=sparclite-wrs os=-vxworks ;; sps7) basic_machine=m68k-bull os=-sysv2 ;; spur) basic_machine=spur-unknown ;; st2000) basic_machine=m68k-tandem ;; stratus) basic_machine=i860-stratus os=-sysv4 ;; sun2) basic_machine=m68000-sun ;; sun2os3) basic_machine=m68000-sun os=-sunos3 ;; sun2os4) basic_machine=m68000-sun os=-sunos4 ;; sun3os3) basic_machine=m68k-sun os=-sunos3 ;; sun3os4) basic_machine=m68k-sun os=-sunos4 ;; sun4os3) basic_machine=sparc-sun os=-sunos3 ;; sun4os4) basic_machine=sparc-sun os=-sunos4 ;; sun4sol2) basic_machine=sparc-sun os=-solaris2 ;; sun3 | sun3-*) basic_machine=m68k-sun ;; sun4) basic_machine=sparc-sun ;; sun386 | sun386i | roadrunner) basic_machine=i386-sun ;; sv1) basic_machine=sv1-cray os=-unicos ;; symmetry) basic_machine=i386-sequent os=-dynix ;; t3e) basic_machine=alphaev5-cray os=-unicos ;; t90) basic_machine=t90-cray os=-unicos ;; tic54x | c54x*) basic_machine=tic54x-unknown os=-coff ;; tic55x | c55x*) basic_machine=tic55x-unknown os=-coff ;; tic6x | c6x*) basic_machine=tic6x-unknown os=-coff ;; tx39) basic_machine=mipstx39-unknown ;; tx39el) basic_machine=mipstx39el-unknown ;; toad1) basic_machine=pdp10-xkl os=-tops20 ;; tower | tower-32) basic_machine=m68k-ncr ;; tpf) basic_machine=s390x-ibm os=-tpf ;; udi29k) basic_machine=a29k-amd os=-udi ;; ultra3) basic_machine=a29k-nyu os=-sym1 ;; v810 | necv810) basic_machine=v810-nec os=-none ;; vaxv) basic_machine=vax-dec os=-sysv ;; vms) basic_machine=vax-dec os=-vms ;; vpp*|vx|vx-*) basic_machine=f301-fujitsu ;; vxworks960) basic_machine=i960-wrs os=-vxworks ;; vxworks68) basic_machine=m68k-wrs os=-vxworks ;; vxworks29k) basic_machine=a29k-wrs os=-vxworks ;; w65*) basic_machine=w65-wdc os=-none ;; w89k-*) basic_machine=hppa1.1-winbond os=-proelf ;; xbox) basic_machine=i686-pc os=-mingw32 ;; xps | xps100) basic_machine=xps100-honeywell ;; ymp) basic_machine=ymp-cray os=-unicos ;; z8k-*-coff) basic_machine=z8k-unknown os=-sim ;; none) basic_machine=none-none os=-none ;; # Here we handle the default manufacturer of certain CPU types. It is in # some cases the only manufacturer, in others, it is the most popular. w89k) basic_machine=hppa1.1-winbond ;; op50n) basic_machine=hppa1.1-oki ;; op60c) basic_machine=hppa1.1-oki ;; romp) basic_machine=romp-ibm ;; mmix) basic_machine=mmix-knuth ;; rs6000) basic_machine=rs6000-ibm ;; vax) basic_machine=vax-dec ;; pdp10) # there are many clones, so DEC is not a safe bet basic_machine=pdp10-unknown ;; pdp11) basic_machine=pdp11-dec ;; we32k) basic_machine=we32k-att ;; sh[1234] | sh[24]a | sh[34]eb | sh[1234]le | sh[23]ele) basic_machine=sh-unknown ;; sparc | sparcv8 | sparcv9 | sparcv9b) basic_machine=sparc-sun ;; cydra) basic_machine=cydra-cydrome ;; orion) basic_machine=orion-highlevel ;; orion105) basic_machine=clipper-highlevel ;; mac | mpw | mac-mpw) basic_machine=m68k-apple ;; pmac | pmac-mpw) basic_machine=powerpc-apple ;; *-unknown) # Make sure to match an already-canonicalized machine name. ;; *) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; esac # Here we canonicalize certain aliases for manufacturers. case $basic_machine in *-digital*) basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` ;; *-commodore*) basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` ;; *) ;; esac # Decode manufacturer-specific aliases for certain operating systems. if [ x"$os" != x"" ] then case $os in # First match some system type aliases # that might get confused with valid system types. # -solaris* is a basic system type, with this one exception. -solaris1 | -solaris1.*) os=`echo $os | sed -e 's|solaris1|sunos4|'` ;; -solaris) os=-solaris2 ;; -svr4*) os=-sysv4 ;; -unixware*) os=-sysv4.2uw ;; -gnu/linux*) os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` ;; # First accept the basic system types. # The portable systems comes first. # Each alternative MUST END IN A *, to match a version number. # -sysv* is not here because it comes later, after sysvr4. -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ | -*vms* | -sco* | -esix* | -isc* | -aix* | -sunos | -sunos[34]*\ | -hpux* | -unos* | -osf* | -luna* | -dgux* | -solaris* | -sym* \ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ | -aos* \ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ | -openbsd* | -solidbsd* \ | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ | -chorusos* | -chorusrdb* \ | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ | -mingw32* | -linux-gnu* | -linux-newlib* | -linux-uclibc* \ | -uxpv* | -beos* | -mpeix* | -udk* \ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ | -skyos* | -haiku* | -rdos*) # Remember, each alternative MUST END IN *, to match a version number. ;; -qnx*) case $basic_machine in x86-* | i*86-*) ;; *) os=-nto$os ;; esac ;; -nto-qnx*) ;; -nto*) os=`echo $os | sed -e 's|nto|nto-qnx|'` ;; -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) ;; -mac*) os=`echo $os | sed -e 's|mac|macos|'` ;; -linux-dietlibc) os=-linux-dietlibc ;; -linux*) os=`echo $os | sed -e 's|linux|linux-gnu|'` ;; -sunos5*) os=`echo $os | sed -e 's|sunos5|solaris2|'` ;; -sunos6*) os=`echo $os | sed -e 's|sunos6|solaris3|'` ;; -opened*) os=-openedition ;; -os400*) os=-os400 ;; -wince*) os=-wince ;; -osfrose*) os=-osfrose ;; -osf*) os=-osf ;; -utek*) os=-bsd ;; -dynix*) os=-bsd ;; -acis*) os=-aos ;; -atheos*) os=-atheos ;; -syllable*) os=-syllable ;; -386bsd) os=-bsd ;; -ctix* | -uts*) os=-sysv ;; -nova*) os=-rtmk-nova ;; -ns2 ) os=-nextstep2 ;; -nsk*) os=-nsk ;; # Preserve the version number of sinix5. -sinix5.*) os=`echo $os | sed -e 's|sinix|sysv|'` ;; -sinix*) os=-sysv4 ;; -tpf*) os=-tpf ;; -triton*) os=-sysv3 ;; -oss*) os=-sysv3 ;; -svr4) os=-sysv4 ;; -svr3) os=-sysv3 ;; -sysvr4) os=-sysv4 ;; # This must come after -sysvr4. -sysv*) ;; -ose*) os=-ose ;; -es1800*) os=-ose ;; -xenix) os=-xenix ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) os=-mint ;; -aros*) os=-aros ;; -kaos*) os=-kaos ;; -zvmoe) os=-zvmoe ;; -none) ;; *) # Get rid of the `-' at the beginning of $os. os=`echo $os | sed 's/[^-]*-//'` echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 exit 1 ;; esac else # Here we handle the default operating systems that come with various machines. # The value should be what the vendor currently ships out the door with their # machine or put another way, the most popular os provided with the machine. # Note that if you're going to try to match "-MANUFACTURER" here (say, # "-sun"), then you have to tell the case statement up towards the top # that MANUFACTURER isn't an operating system. Otherwise, code above # will signal an error saying that MANUFACTURER isn't an operating # system, and we'll never get to this point. case $basic_machine in *-acorn) os=-riscix1.2 ;; arm*-rebel) os=-linux ;; arm*-semi) os=-aout ;; c4x-* | tic4x-*) os=-coff ;; # This must come before the *-dec entry. pdp10-*) os=-tops20 ;; pdp11-*) os=-none ;; *-dec | vax-*) os=-ultrix4.2 ;; m68*-apollo) os=-domain ;; i386-sun) os=-sunos4.0.2 ;; m68000-sun) os=-sunos3 # This also exists in the configure program, but was not the # default. # os=-sunos4 ;; m68*-cisco) os=-aout ;; mips*-cisco) os=-elf ;; mips*-*) os=-elf ;; or32-*) os=-coff ;; *-tti) # must be before sparc entry or we get the wrong os. os=-sysv3 ;; sparc-* | *-sun) os=-sunos4.1.1 ;; *-be) os=-beos ;; *-haiku) os=-haiku ;; *-ibm) os=-aix ;; *-knuth) os=-mmixware ;; *-wec) os=-proelf ;; *-winbond) os=-proelf ;; *-oki) os=-proelf ;; *-hp) os=-hpux ;; *-hitachi) os=-hiux ;; i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) os=-sysv ;; *-cbm) os=-amigaos ;; *-dg) os=-dgux ;; *-dolphin) os=-sysv3 ;; m68k-ccur) os=-rtu ;; m88k-omron*) os=-luna ;; *-next ) os=-nextstep ;; *-sequent) os=-ptx ;; *-crds) os=-unos ;; *-ns) os=-genix ;; i370-*) os=-mvs ;; *-next) os=-nextstep3 ;; *-gould) os=-sysv ;; *-highlevel) os=-bsd ;; *-encore) os=-bsd ;; *-sgi) os=-irix ;; *-siemens) os=-sysv4 ;; *-masscomp) os=-rtu ;; f30[01]-fujitsu | f700-fujitsu) os=-uxpv ;; *-rom68k) os=-coff ;; *-*bug) os=-coff ;; *-apple) os=-macos ;; *-atari*) os=-mint ;; *) os=-none ;; esac fi # Here we handle the case where we know the os, and the CPU type, but not the # manufacturer. We pick the logical manufacturer. vendor=unknown case $basic_machine in *-unknown) case $os in -riscix*) vendor=acorn ;; -sunos*) vendor=sun ;; -aix*) vendor=ibm ;; -beos*) vendor=be ;; -hpux*) vendor=hp ;; -mpeix*) vendor=hp ;; -hiux*) vendor=hitachi ;; -unos*) vendor=crds ;; -dgux*) vendor=dg ;; -luna*) vendor=omron ;; -genix*) vendor=ns ;; -mvs* | -opened*) vendor=ibm ;; -os400*) vendor=ibm ;; -ptx*) vendor=sequent ;; -tpf*) vendor=ibm ;; -vxsim* | -vxworks* | -windiss*) vendor=wrs ;; -aux*) vendor=apple ;; -hms*) vendor=hitachi ;; -mpw* | -macos*) vendor=apple ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) vendor=atari ;; -vos*) vendor=stratus ;; esac basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` ;; esac echo $basic_machine$os exit # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: antlr-2.7.7/LICENSE.txt0000644000175000017500000000225310522211616014426 0ustar twernertwerner SOFTWARE RIGHTS ANTLR 1989-2006 Developed by Terence Parr Partially supported by University of San Francisco & jGuru.com We reserve no legal rights to the ANTLR--it is fully in the public domain. An individual or company may do whatever they wish with source code distributed with ANTLR or the code generated by ANTLR, including the incorporation of ANTLR, or its output, into commerical software. We encourage users to develop software with ANTLR. However, we do ask that credit is given to us for developing ANTLR. By "credit", we mean that if you use ANTLR or incorporate any source code into one of your programs (commercial product, research project, or otherwise) that you acknowledge this fact somewhere in the documentation, research report, etc... If you like ANTLR and have developed a nice tool with the output, please mention that you developed it using ANTLR. In addition, we ask that the headers remain intact in our source code. As long as these guidelines are kept, we expect to continue enhancing this system and expect to make other tools available as they are completed. The primary ANTLR guy: Terence Parr parrt@cs.usfca.edu parrt@antlr.org antlr-2.7.7/extras/0000755000175000017500000000000010522211615014106 5ustar twernertwernerantlr-2.7.7/extras/setup.hint0000644000175000017500000000046710522211615016141 0ustar twernertwerner# comment sdesc: (AN)other (T)ool for (L)anguage (R)ecognition ldesc: "A language tool that provides a framework for constructing recognizers, compilers, and translators from grammatical descriptions. The following languages are supported: Java | C++ | C# | Python " category: "Devel" requires: cygwin antlr-2.7.7/extras/antlr-mode.el0000755000175000017500000031704410522211615016506 0ustar twernertwerner;;; antlr-mode.el --- major mode for ANTLR grammar files ;; Copyright (C) 1999-2001 Free Software Foundation, Inc. ;; ;; Author: Christoph.Wedler@sap.com ;; Keywords: languages ;; Version: 2.1 ;; X-URL: http://www.fmi.uni-passau.de/~wedler/antlr-mode/ ;; This file is part of GNU Emacs. ;; GNU Emacs is free software; you can redistribute it and/or modify ;; it under the terms of the GNU General Public License as published by ;; the Free Software Foundation; either version 2, or (at your option) ;; any later version. ;; GNU Emacs is distributed in the hope that it will be useful, ;; but WITHOUT ANY WARRANTY; without even the implied warranty of ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;; GNU General Public License for more details. ;; You should have received a copy of the GNU General Public License ;; along with GNU Emacs; see the file COPYING. If not, write to the ;; Free Software Foundation, Inc., 59 Temple Place - Suite 330, ;; Boston, MA 02111-1307, USA. ;;; Commentary: ;; This Emacs extension (major mode) provides various features for editing ;; ANTLR grammar files. ANTLR is a tool for LL(k)-based language recognition ;; and an excellent alternative to lex & yacc, see . ;; Some features depend on the value of ANTLR's "language" option (check the ;; modeline for "Antlr.Java" or "Antlr.C++"). ;; This package provides the following features: ;; * Syntax highlighting for grammar symbols and the code in actions. ;; * Indentation (pretty-print) for the current line (TAB) and lines in the ;; selected region (C-M-\). Inserting an ANTLR syntax symbol (one of ;; ":;|&(){}") might also indent the current line. ;; * Menu "Index" and Speedbar tags with all class, token and rule ;; definitions. Jump to corresponding position by selecting an entry. ;; * Commands to move to previous/next rule, beginning/end of rule body etc. ;; * Commands to hide/unhide actions. ;; * Support to insert/change file/grammar/rule/subrule options. ;; * Run ANTLR from within Emacs, create Makefile dependencies. ;; SYNTAX HIGHLIGHTING comes in three phases. First, comments and strings are ;; highlighted. Second, the grammar code is highlighted according to ;; `antlr-font-lock-additional-keywords' (rule refs: dark blue, token refs: ;; dark orange, definition: bold blue). Third, actions, semantic predicates ;; and arguments are highlighted according to the usual font-lock keywords of ;; the major-mode corresponding to ANTLR's "language" option, see also ;; `antlr-font-lock-maximum-decoration'. We define special font-lock faces for ;; the grammar code to allow you to distinguish ANTLR keywords from Java/C++ ;; keywords. ;; INDENTATION. This package supports ANTLR's (intended) indentation style ;; which is based on a simple paren/brace/bracket depth-level calculation, see ;; `antlr-indent-line'. The indentation engine of cc-mode is only used inside ;; block comments. By default, this package defines a tab width of 4 to be ;; consistent to both ANTLR's conventions (TABs usage) and the ;; `c-indentation-style' "java" which sets `c-basic-offset' to 4, see ;; `antlr-tab-offset-alist'. You might want to set this variable to nil. ;; OPTION SUPPORT. This package provides special support to insert or change ;; file, grammar, rule and subrule options via the menu or via the keyboard ;; with completion. For most options, you can also insert the value with ;; completion (or select a value from a list by pressing `?'). You get a ;; warning if an option is not supported by the version of ANTLR you are using ;; (`antlr-tool-version' defaults to 2.7.1), or if the option shouldn't be ;; inserted for other reasons. This package knows the correct position where ;; to insert the option and inserts "options {...}" if it is not already ;; present. For details, see the docstring of command \\[antlr-insert-option]. ;; MAKEFILE CREATION. Command \\[antlr-show-makefile-rules] shows/inserts the ;; dependencies for all grammar files in the current directory. It considers ;; ANTLR's "language" option, import/export vocabularies and grammar ;; inheritance, and provides a value for the -glib option if necessary (which ;; you have to edit if the super-grammar is not in the same directory). ;; TODO/WISH-LIST. Things which might be supported in future versions: ;; * Next Version [C-c C-w]. Produce HTML document with syntax highlighted ;; and hyper-links (using htmlize). ;; * Next Version [C-c C-u]. Insert/update special comments: each rule lists ;; all rules which use the current rule. With font-lock update. ;; * Next Version. Make hiding much more customizable. ;; * Planned [C-c C-j]. Jump to generated coding. ;; * Planned. Further support for imenu, i.e., include entries for method ;; definitions at beginning of grammar class. ;; * Planned [C-c C-p]. Pack/unpack rule/subrule & options (one/multi-line). ;; * Probably. Show rules/dependencies for ANT like for Makefile (does ANT ;; support vocabularies and grammar inheritance?), I have to look at ;; jde-ant.el: http://jakarta.apache.org/ant/manual/OptionalTasks/antlr.html ;; * Unlikely. Sather as generated language with syntax highlighting etc/. ;; Questions/problems: is sather-mode.el the standard mode for sather, is it ;; still supported, what is its relationship to eiffel3.el? Requirement: ;; this mode must not depend on a Sather mode. ;; * Unlikely. Faster syntax highlighting: sectionize the buffer into Antlr ;; and action code and run special highlighting functions on these regions. ;; Problems: code size, this mode would depend on font-lock internals. ;; Bug fixes, bug reports, improvements, and suggestions are strongly ;; appreciated. Please check the newest version first: ;; http://www.fmi.uni-passau.de/~wedler/antlr-mode/changes.html ;;; Installation: ;; This file requires Emacs-20.3, XEmacs-20.4 or higher and package cc-mode. ;; If antlr-mode is not part of your distribution, put this file into your ;; load-path and the following into your ~/.emacs: ;; (autoload 'antlr-mode "antlr-mode" nil t) ;; (setq auto-mode-alist (cons '("\\.g\\'" . antlr-mode) auto-mode-alist)) ;; (add-hook 'speedbar-load-hook ; would be too late in antlr-mode.el ;; (lambda () (speedbar-add-supported-extension ".g"))) ;; If you edit ANTLR's source files, you might also want to use ;; (autoload 'antlr-set-tabs "antlr-mode") ;; (add-hook 'java-mode-hook 'antlr-set-tabs) ;; I strongly recommend to use font-lock with a support mode like fast-lock, ;; lazy-lock or better jit-lock (Emacs-21.1+) / lazy-shot (XEmacs). ;; To customize, use menu item "Antlr" -> "Customize Antlr". ;;; Code: (provide 'antlr-mode) (eval-when-compile ; required and optional libraries (require 'cc-mode) (defvar c-Java-access-key) ; former cc-mode variable (condition-case nil (require 'font-lock) (error nil)) (condition-case nil (require 'compile) (error nil)) (defvar outline-level) (defvar imenu-use-markers) (defvar imenu-create-index-function)) (eval-when-compile ; Emacs: cl, easymenu, XEmacs vars (require 'cl) (require 'easymenu) (defvar zmacs-region-stays)) (eval-when-compile ; XEmacs: Emacs vars (defvar inhibit-point-motion-hooks) (defvar deactivate-mark)) (eval-and-compile ; XEmacs functions, simplified (if (featurep 'xemacs) (defalias 'antlr-scan-sexps 'scan-sexps) (defalias 'antlr-scan-sexps 'antlr-scan-sexps-internal)) (if (featurep 'xemacs) (defalias 'antlr-scan-lists 'scan-lists) (defalias 'antlr-scan-lists 'antlr-scan-lists-internal)) (if (fboundp 'default-directory) (defalias 'antlr-default-directory 'default-directory) (defun antlr-default-directory () default-directory)) (if (fboundp 'read-shell-command) (defalias 'antlr-read-shell-command 'read-shell-command) (defun antlr-read-shell-command (prompt &optional initial-input history) (read-from-minibuffer prompt initial-input nil nil (or history 'shell-command-history)))) (if (fboundp 'with-displaying-help-buffer) (defalias 'antlr-with-displaying-help-buffer 'with-displaying-help-buffer) (defun antlr-with-displaying-help-buffer (thunk &optional name) (with-output-to-temp-buffer "*Help*" (save-excursion (funcall thunk))))) (if (and (fboundp 'buffer-syntactic-context) (fboundp 'buffer-syntactic-context-depth)) (progn (defalias 'antlr-invalidate-context-cache 'antlr-xemacs-bug-workaround) (defalias 'antlr-syntactic-context 'antlr-fast-syntactic-context)) (defalias 'antlr-invalidate-context-cache 'ignore) (defalias 'antlr-syntactic-context 'antlr-slow-syntactic-context))) ;;;;########################################################################## ;;;; Variables ;;;;########################################################################## (defgroup antlr nil "Major mode for ANTLR grammar files." :group 'languages :link '(emacs-commentary-link "antlr-mode.el") :link '(url-link "http://www.fmi.uni-passau.de/~wedler/antlr-mode/") :prefix "antlr-") (defconst antlr-version "2.1" "ANTLR major mode version number.") ;;;=========================================================================== ;;; Controlling ANTLR's code generator (language option) ;;;=========================================================================== (defvar antlr-language nil "Major mode corresponding to ANTLR's \"language\" option. Set via `antlr-language-alist'. The only useful place to change this buffer-local variable yourself is in `antlr-mode-hook' or in the \"local variable list\" near the end of the file, see `enable-local-variables'.") (defcustom antlr-language-alist '((java-mode "Java" nil "\"Java\"" "Java") (c++-mode "C++" "\"Cpp\"" "Cpp")) "List of ANTLR's supported languages. Each element in this list looks like \(MAJOR-MODE MODELINE-STRING OPTION-VALUE...) MAJOR-MODE, the major mode of the code in the grammar's actions, is the value of `antlr-language' if the first group in the string matched by REGEXP in `antlr-language-limit-n-regexp' is one of the OPTION-VALUEs. An OPTION-VALUE of nil denotes the fallback element. MODELINE-STRING is also displayed in the modeline next to \"Antlr\"." :group 'antlr :type '(repeat (group :value (java-mode "") (function :tag "Major mode") (string :tag "Modeline string") (repeat :tag "ANTLR language option" :inline t (choice (const :tag "Default" nil) string ))))) (defcustom antlr-language-limit-n-regexp '(8192 . "language[ \t]*=[ \t]*\\(\"?[A-Z][A-Za-z_]*\"?\\)") "Used to set a reasonable value for `antlr-language'. Looks like \(LIMIT \. REGEXP). Search for REGEXP from the beginning of the buffer to LIMIT and use the first group in the matched string to set the language according to `antlr-language-alist'." :group 'antlr :type '(cons (choice :tag "Limit" (const :tag "No" nil) (integer :value 0)) regexp)) ;;;=========================================================================== ;;; Hide/Unhide, Indent/Tabs ;;;=========================================================================== (defcustom antlr-action-visibility 3 "Visibility of actions when command `antlr-hide-actions' is used. If nil, the actions with their surrounding braces are hidden. If a number, do not hide the braces, only hide the contents if its length is greater than this number." :group 'antlr :type '(choice (const :tag "Completely hidden" nil) (integer :tag "Hidden if longer than" :value 3))) (defcustom antlr-indent-comment 'tab "*Non-nil, if the indentation should touch lines in block comments. If nil, no continuation line of a block comment is changed. If t, they are changed according to `c-indentation-line'. When not nil and not t, they are only changed by \\[antlr-indent-command]." :group 'antlr :type '(radio (const :tag "No" nil) (const :tag "Always" t) (sexp :tag "With TAB" :format "%t" :value tab))) (defcustom antlr-tab-offset-alist '((antlr-mode nil 4 nil) (java-mode "antlr" 4 nil)) "Alist to determine whether to use ANTLR's convention for TABs. Each element looks like \(MAJOR-MODE REGEXP TAB-WIDTH INDENT-TABS-MODE). The first element whose MAJOR-MODE is nil or equal to `major-mode' and whose REGEXP is nil or matches variable `buffer-file-name' is used to set `tab-width' and `indent-tabs-mode'. This is useful to support both ANTLR's and Java's indentation styles. Used by `antlr-set-tabs'." :group 'antlr :type '(repeat (group :value (antlr-mode nil 8 nil) (choice (const :tag "All" nil) (function :tag "Major mode")) (choice (const :tag "All" nil) regexp) (integer :tag "Tab width") (boolean :tag "Indent-tabs-mode")))) (defcustom antlr-indent-style "java" "*If non-nil, cc-mode indentation style used for `antlr-mode'. See `c-set-style' for details." :group 'antlr :type '(choice (const nil) regexp)) (defcustom antlr-indent-item-regexp "[]}):;|&]\\|default[ \t]*:\\|case[ \t]+\\('\\\\?.'\\|[0-9]+\\|[A-Za-z_][A-Za-z_0-9]*\\)[ \t]*:" ; & is local ANTLR extension (SGML's and-connector) "Regexp matching lines which should be indented by one TAB less. See `antlr-indent-line' and command \\[antlr-indent-command]." :group 'antlr :type 'regexp) (defcustom antlr-indent-at-bol-alist ;; eval-when-compile not usable with defcustom... '((c++-mode . "#\\(assert\\|cpu\\|define\\|endif\\|el\\(if\\|se\\)\\|i\\(dent\\|f\\(def\\|ndef\\)?\\|mport\\|nclude\\(_next\\)?\\)\\|line\\|machine\\|pragma\\|system\\|un\\(assert\\|def\\)\\|warning\\)\\>")) "Alist of regexps matching lines are indented at column 0. Each element in this list looks like (MODE . REGEXP) where MODE is a function and REGEXP is a regular expression. If `antlr-language' equals to a MODE and the line starting at the first non-whitespace is matched by the corresponding REGEXP, indent the line at column 0 instead according to the normal rules of `antlr-indent-line'." :group 'antlr :type '(repeat (cons (function :tag "Major mode") regexp))) ;;;=========================================================================== ;;; Options: customization ;;;=========================================================================== (defcustom antlr-options-use-submenus t "*Non-nil, if the major mode menu should include option submenus. If nil, the menu just includes a command to insert options. Otherwise, it includes four submenus to insert file/grammar/rule/subrule options." :group 'antlr :type 'boolean) (defcustom antlr-tool-version 20701 "*The version number of the Antlr tool. The value is an integer of the form XYYZZ which stands for vX.YY.ZZ. This variable is used to warn about non-supported options and to supply version correct option values when using \\[antlr-insert-option]. Don't use a number smaller than 20600 since the stored history of Antlr's options starts with v2.06.00, see `antlr-options-alists'. You can make this variable buffer-local." :group 'antlr :type 'integer) (defcustom antlr-options-auto-colon t "*Non-nil, if `:' is inserted with a rule or subrule options section. A `:' is only inserted if this value is non-nil, if a rule or subrule option is inserted with \\[antlr-insert-option], if there was no rule or subrule options section before, and if a `:' is not already present after the section, ignoring whitespace, comments and the init action." :group 'antlr :type 'boolean) (defcustom antlr-options-style nil "List of symbols which determine the style of option values. If a style symbol is present, the corresponding option value is put into quotes, i.e., represented as a string, otherwise it is represented as an identifier. The only style symbol used in the default value of `antlr-options-alist' is `language-as-string'. See also `antlr-read-value'." :group 'antlr :type '(repeat (symbol :tag "Style symbol"))) (defcustom antlr-options-push-mark t "*Non-nil, if inserting an option should set & push mark. If nil, never set mark when inserting an option with command \\[antlr-insert-option]. If t, always set mark via `push-mark'. If a number, only set mark if point was outside the options area before and the number of lines between point and the insert position is greater than this value. Otherwise, only set mark if point was outside the options area before." :group 'antlr :type '(radio (const :tag "No" nil) (const :tag "Always" t) (integer :tag "Lines between" :value 10) (sexp :tag "If outside options" :format "%t" :value outside))) (defcustom antlr-options-assign-string " = " "*String containing `=' to use between option name and value. This string is only used if the option to insert did not exist before or if there was no `=' after it. In other words, the spacing around an existing `=' won't be changed when changing an option value." :group 'antlr :type 'string) ;;;=========================================================================== ;;; Options: definitions ;;;=========================================================================== (defvar antlr-options-headings '("file" "grammar" "rule" "subrule") "Headings for the four different option kinds. The standard value is (\"file\" \"grammar\" \"rule\" \"subrule\"). See `antlr-options-alists'") (defvar antlr-options-alists '(;; file options ---------------------------------------------------------- (("language" antlr-language-option-extra (20600 antlr-read-value "Generated language: " language-as-string (("Java") ("Cpp") ("HTML") ("Diagnostic"))) (20700 antlr-read-value "Generated language: " language-as-string (("Java") ("Cpp") ("HTML") ("Diagnostic") ("Sather")))) ("mangleLiteralPrefix" nil (20600 antlr-read-value "Prefix for literals (default LITERAL_): " t)) ("namespace" antlr-c++-mode-extra (20700 antlr-read-value "Wrap generated C++ code in namespace: " t)) ("namespaceStd" antlr-c++-mode-extra (20701 antlr-read-value "Replace ANTLR_USE_NAMESPACE(std) by: " t)) ("namespaceAntlr" antlr-c++-mode-extra (20701 antlr-read-value "Replace ANTLR_USE_NAMESPACE(antlr) by: " t)) ("genHashLines" antlr-c++-mode-extra (20701 antlr-read-boolean "Include #line in generated C++ code? ")) ) ;; grammar options -------------------------------------------------------- (("k" nil (20600 antlr-read-value "Lookahead depth: ")) ("importVocab" nil (20600 antlr-read-value "Import vocabulary: ")) ("exportVocab" nil (20600 antlr-read-value "Export vocabulary: ")) ("testLiterals" nil ; lexer only (20600 antlr-read-boolean "Test each token against literals table? ")) ("defaultErrorHandler" nil ; not for lexer (20600 antlr-read-boolean "Generate default exception handler for each rule? ")) ("codeGenMakeSwitchThreshold" nil (20600 antlr-read-value "Min number of alternatives for 'switch': ")) ("codeGenBitsetTestThreshold" nil (20600 antlr-read-value "Min size of lookahead set for bitset test: ")) ("analyzerDebug" nil (20600 antlr-read-boolean "Display debugging info during grammar analysis? ")) ("codeGenDebug" nil (20600 antlr-read-boolean "Display debugging info during code generation? ")) ("buildAST" nil ; not for lexer (20600 antlr-read-boolean "Use automatic AST construction/transformation? ")) ("ASTLabelType" nil ; not for lexer (20600 antlr-read-value "Class of user-defined AST node: " t)) ("charVocabulary" nil ; lexer only (20600 nil "Insert character vocabulary")) ("interactive" nil (20600 antlr-read-boolean "Generate interactive lexer/parser? ")) ("caseSensitive" nil ; lexer only (20600 antlr-read-boolean "Case significant when matching characters? ")) ("caseSensitiveLiterals" nil ; lexer only (20600 antlr-read-boolean "Case significant when testing literals table? ")) ("classHeaderSuffix" nil (20600 nil "Additional string for grammar class definition")) ("filter" nil ; lexer only (20600 antlr-read-boolean "Skip rule (the name, true or false): " antlr-grammar-tokens)) ("namespace" antlr-c++-mode-extra (20700 antlr-read-value "Wrap generated C++ code for grammar in namespace: " t)) ("namespaceStd" antlr-c++-mode-extra (20701 antlr-read-value "Replace ANTLR_USE_NAMESPACE(std) by: " t)) ("namespaceAntlr" antlr-c++-mode-extra (20701 antlr-read-value "Replace ANTLR_USE_NAMESPACE(antlr) by: " t)) ("genHashLines" antlr-c++-mode-extra (20701 antlr-read-boolean "Include #line in generated C++ code? ")) ;;; ("autoTokenDef" nil ; parser only ;;; (80000 antlr-read-boolean ; default: true ;;; "Automatically define referenced token? ")) ;;; ("keywordsMeltTo" nil ; parser only ;;; (80000 antlr-read-value ;;; "Change non-matching keywords to token type: ")) ) ;; rule options ---------------------------------------------------------- (("testLiterals" nil ; lexer only (20600 antlr-read-boolean "Test this token against literals table? ")) ("defaultErrorHandler" nil ; not for lexer (20600 antlr-read-boolean "Generate default exception handler for this rule? ")) ("ignore" nil ; lexer only (20600 antlr-read-value "In this rule, ignore tokens of type: " nil antlr-grammar-tokens)) ("paraphrase" nil ; lexer only (20600 antlr-read-value "In messages, replace name of this token by: " t)) ) ;; subrule options ------------------------------------------------------- (("warnWhenFollowAmbig" nil (20600 antlr-read-boolean "Display warnings for ambiguities with FOLLOW? ")) ("generateAmbigWarnings" nil (20600 antlr-read-boolean "Display warnings for ambiguities? ")) ("greedy" nil (20700 antlr-read-boolean "Make this optional/loop subrule greedy? ")) )) "Definitions for Antlr's options of all four different kinds. The value looks like \(FILE GRAMMAR RULE SUBRULE) where each FILE, GRAMMAR, RULE, and SUBRULE is a list of option definitions of the corresponding kind, i.e., looks like \(OPTION-DEF...). Each OPTION-DEF looks like \(OPTION-NAME EXTRA-FN VALUE-SPEC...) which defines a file/grammar/rule/subrule option with name OPTION-NAME. The OPTION-NAMEs are used for the creation of the \"Insert XXX Option\" submenus, see `antlr-options-use-submenus', and to allow to insert the option name with completion when using \\[antlr-insert-option]. If EXTRA-FN is a function, it is called at different phases of the insertion with arguments \(PHASE OPTION-NAME). PHASE can have the values `before-input' or `after-insertion', additional phases might be defined in future versions of this mode. The phase `before-input' occurs before the user is asked to insert a value. The phase `after-insertion' occurs after the option value has been inserted. EXTRA-FN might be called with additional arguments in future versions of this mode. Each specification VALUE-SPEC looks like \(VERSION READ-FN ARG...). The last VALUE-SPEC in an OPTION-DEF whose VERSION is smaller or equal to `antlr-tool-version' specifies how the user is asked for the value of the option. If READ-FN is nil, the only ARG is a string which is printed at the echo area to guide the user what to insert at point. Otherwise, READ-FN is called with arguments \(INIT-VALUE ARG...) to get the new value of the option. INIT-VALUE is the old value of the option or nil. The standard value contains the following functions as READ-FN: `antlr-read-value' with ARGs = \(PROMPT AS-STRING TABLE) which reads a general value, or `antlr-read-boolean' with ARGs = \(PROMPT TABLE) which reads a boolean value or a member of TABLE. PROMPT is the prompt when asking for a new value. If non-nil, TABLE is a table for completion or a function evaluating to such a table. The return value is quoted iff AS-STRING is non-nil and is either t or a symbol which is a member of `antlr-options-style'.") ;;;=========================================================================== ;;; Run tool, create Makefile dependencies ;;;=========================================================================== (defcustom antlr-tool-command "java antlr.Tool" "*Command used in \\[antlr-run-tool] to run the Antlr tool. This variable should include all options passed to Antlr except the option \"-glib\" which is automatically suggested if necessary." :group 'antlr :type 'string) (defcustom antlr-ask-about-save t "*If not nil, \\[antlr-run-tool] asks which buffers to save. Otherwise, it saves all modified buffers before running without asking." :group 'antlr :type 'boolean) (defcustom antlr-makefile-specification '("\n" ("GENS" "GENS%d" " \\\n\t") "$(ANTLR)") "*Variable to specify the appearance of the generated makefile rules. This variable influences the output of \\[antlr-show-makefile-rules]. It looks like \(RULE-SEP GEN-VAR-SPEC COMMAND). RULE-SEP is the string to separate different makefile rules. COMMAND is a string with the command which runs the Antlr tool, it should include all options except the option \"-glib\" which is automatically added if necessary. If GEN-VAR-SPEC is nil, each target directly consists of a list of files. If GEN-VAR-SPEC looks like \(GEN-VAR GEN-VAR-FORMAT GEN-SEP), a Makefile variable is created for each rule target. Then, GEN-VAR is a string with the name of the variable which contains the file names of all makefile rules. GEN-VAR-FORMAT is a format string producing the variable of each target with substitution COUNT/%d where COUNT starts with 1. GEN-SEP is used to separate long variable values." :group 'antlr :type '(list (string :tag "Rule separator") (choice (const :tag "Direct targets" nil) (list :tag "Variables for targets" (string :tag "Variable for all targets") (string :tag "Format for each target variable") (string :tag "Variable separator"))) (string :tag "ANTLR command"))) (defvar antlr-file-formats-alist '((java-mode ("%sTokenTypes.java") ("%s.java")) (c++-mode ("%sTokenTypes.hpp") ("%s.cpp" "%s.hpp"))) "Language dependent formats which specify generated files. Each element in this list looks looks like \(MAJOR-MODE (VOCAB-FILE-FORMAT...) (CLASS-FILE-FORMAT...)). The element whose MAJOR-MODE is equal to `antlr-language' is used to specify the generated files which are language dependent. See variable `antlr-special-file-formats' for language independent files. VOCAB-FILE-FORMAT is a format string, it specifies with substitution VOCAB/%s the generated file for each export vocabulary VOCAB. CLASS-FILE-FORMAT is a format string, it specifies with substitution CLASS/%s the generated file for each grammar class CLASS.") (defvar antlr-special-file-formats '("%sTokenTypes.txt" "expanded%s.g") "Language independent formats which specify generated files. The value looks like \(VOCAB-FILE-FORMAT EXPANDED-GRAMMAR-FORMAT). VOCAB-FILE-FORMAT is a format string, it specifies with substitution VOCAB/%s the generated or input file for each export or import vocabulary VOCAB, respectively. EXPANDED-GRAMMAR-FORMAT is a format string, it specifies with substitution GRAMMAR/%s the constructed grammar file if the file GRAMMAR.g contains a grammar class which extends a class other than \"Lexer\", \"Parser\" or \"TreeParser\". See variable `antlr-file-formats-alist' for language dependent formats.") (defvar antlr-unknown-file-formats '("?%s?.g" "?%s?") "*Formats which specify the names of unknown files. The value looks like \(SUPER-GRAMMAR-FILE-FORMAT SUPER-EVOCAB-FORMAT). SUPER-GRAMMAR-FORMAT is a format string, it specifies with substitution SUPER/%s the name of a grammar file for Antlr's option \"-glib\" if no grammar file in the current directory defines the class SUPER or if it is defined more than once. SUPER-EVOCAB-FORMAT is a format string, it specifies with substitution SUPER/%s the name for the export vocabulary of above mentioned class SUPER.") (defvar antlr-help-unknown-file-text "## The following rules contain filenames of the form ## \"?SUPERCLASS?.g\" (and \"?SUPERCLASS?TokenTypes.txt\") ## where SUPERCLASS is not found to be defined in any grammar file of ## the current directory or is defined more than once. Please replace ## these filenames by the grammar files (and their exportVocab).\n\n" "String indicating the existence of unknown files in the Makefile. See \\[antlr-show-makefile-rules] and `antlr-unknown-file-formats'.") (defvar antlr-help-rules-intro "The following Makefile rules define the dependencies for all (non- expanded) grammars in directory \"%s\".\n They are stored in the kill-ring, i.e., you can insert them with C-y into your Makefile. You can also invoke M-x antlr-show-makefile-rules from within a Makefile to insert them directly.\n\n\n" "Introduction to use with \\[antlr-show-makefile-rules]. It is a format string and used with substitution DIRECTORY/%s where DIRECTORY is the name of the current directory.") ;;;=========================================================================== ;;; Menu ;;;=========================================================================== (defcustom antlr-imenu-name t "*Non-nil, if a \"Index\" menu should be added to the menubar. If it is a string, it is used instead \"Index\". Requires package imenu." :group 'antlr :type '(choice (const :tag "No menu" nil) (const :tag "Index menu" t) (string :tag "Other menu name"))) (defvar antlr-mode-map (let ((map (make-sparse-keymap))) (define-key map "\t" 'antlr-indent-command) (define-key map "\e\C-a" 'antlr-beginning-of-rule) (define-key map "\e\C-e" 'antlr-end-of-rule) (define-key map "\C-c\C-a" 'antlr-beginning-of-body) (define-key map "\C-c\C-e" 'antlr-end-of-body) (define-key map "\C-c\C-f" 'c-forward-into-nomenclature) (define-key map "\C-c\C-b" 'c-backward-into-nomenclature) (define-key map "\C-c\C-c" 'comment-region) (define-key map "\C-c\C-v" 'antlr-hide-actions) (define-key map "\C-c\C-r" 'antlr-run-tool) (define-key map "\C-c\C-o" 'antlr-insert-option) ;; I'm too lazy to define my own: (define-key map "\ea" 'c-beginning-of-statement) (define-key map "\ee" 'c-end-of-statement) ;; electric keys: (define-key map ":" 'antlr-electric-character) (define-key map ";" 'antlr-electric-character) (define-key map "|" 'antlr-electric-character) (define-key map "&" 'antlr-electric-character) (define-key map "(" 'antlr-electric-character) (define-key map ")" 'antlr-electric-character) (define-key map "{" 'antlr-electric-character) (define-key map "}" 'antlr-electric-character) map) "Keymap used in `antlr-mode' buffers.") (easy-menu-define antlr-mode-menu antlr-mode-map "Major mode menu." `("Antlr" ,@(if (and antlr-options-use-submenus (boundp 'emacs-major-version) (or (featurep 'xemacs) (>= emacs-major-version 21))) `(("Insert File Option" :filter ,(lambda (x) (antlr-options-menu-filter 1 x))) ("Insert Grammar Option" :filter ,(lambda (x) (antlr-options-menu-filter 2 x))) ("Insert Rule Option" :filter ,(lambda (x) (antlr-options-menu-filter 3 x))) ("Insert Subrule Option" :filter ,(lambda (x) (antlr-options-menu-filter 4 x))) "---") '(["Insert Option" antlr-insert-option :active (not buffer-read-only)])) ("Forward/Backward" ["Backward Rule" antlr-beginning-of-rule t] ["Forward Rule" antlr-end-of-rule t] ["Start of Rule Body" antlr-beginning-of-body :active (antlr-inside-rule-p)] ["End of Rule Body" antlr-end-of-body :active (antlr-inside-rule-p)] "---" ["Backward Statement" c-beginning-of-statement t] ["Forward Statement" c-end-of-statement t] ["Backward Into Nomencl." c-backward-into-nomenclature t] ["Forward Into Nomencl." c-forward-into-nomenclature t]) ["Indent Region" indent-region :active (and (not buffer-read-only) (c-region-is-active-p))] ["Comment Out Region" comment-region :active (and (not buffer-read-only) (c-region-is-active-p))] ["Uncomment Region" (comment-region (region-beginning) (region-end) '(4)) :active (and (not buffer-read-only) (c-region-is-active-p))] "---" ["Hide Actions (incl. Args)" antlr-hide-actions t] ["Hide Actions (excl. Args)" (antlr-hide-actions 2) t] ["Unhide All Actions" (antlr-hide-actions 0) t] "---" ["Run Tool on Grammar" antlr-run-tool t] ["Show Makefile Rules" antlr-show-makefile-rules t] "---" ["Customize Antlr" (customize-group 'antlr) t])) ;;;=========================================================================== ;;; font-lock ;;;=========================================================================== (defcustom antlr-font-lock-maximum-decoration 'inherit "*The maximum decoration level for fontifying actions. Value `none' means, do not fontify actions, just normal grammar code according to `antlr-font-lock-additional-keywords'. Value `inherit' means, use value of `font-lock-maximum-decoration'. Any other value is interpreted as in `font-lock-maximum-decoration' with no level-0 fontification, see `antlr-font-lock-keywords-alist'. While calculating the decoration level for actions, `major-mode' is bound to `antlr-language'. For example, with value \((java-mode \. 2) (c++-mode \. 0)) Java actions are fontified with level 2 and C++ actions are not fontified at all." :type '(choice (const :tag "None" none) (const :tag "Inherit" inherit) (const :tag "Default" nil) (const :tag "Maximum" t) (integer :tag "Level" 1) (repeat :menu-tag "Mode specific" :tag "Mode specific" :value ((t . t)) (cons :tag "Instance" (radio :tag "Mode" (const :tag "All" t) (symbol :tag "Name")) (radio :tag "Decoration" (const :tag "Default" nil) (const :tag "Maximum" t) (integer :tag "Level" 1)))))) (defconst antlr-no-action-keywords nil ;; Using nil directly won't work (would use highest level, see ;; `font-lock-choose-keywords'), but a non-symbol, i.e., (list), at `car' ;; would break Emacs-21.0: "Empty font-lock keywords for actions. Do not change the value of this constant.") (defvar antlr-font-lock-keywords-alist '((java-mode antlr-no-action-keywords java-font-lock-keywords-1 java-font-lock-keywords-2 java-font-lock-keywords-3) (c++-mode antlr-no-action-keywords c++-font-lock-keywords-1 c++-font-lock-keywords-2 c++-font-lock-keywords-3)) "List of font-lock keywords for actions in the grammar. Each element in this list looks like \(MAJOR-MODE KEYWORD...) If `antlr-language' is equal to MAJOR-MODE, the KEYWORDs are the font-lock keywords according to `font-lock-defaults' used for the code in the grammar's actions and semantic predicates, see `antlr-font-lock-maximum-decoration'.") (defvar antlr-font-lock-default-face 'antlr-font-lock-default-face) (defface antlr-font-lock-default-face nil "Face to prevent strings from language dependent highlighting. Do not change." :group 'antlr) (defvar antlr-font-lock-keyword-face 'antlr-font-lock-keyword-face) (defface antlr-font-lock-keyword-face '((((class color) (background light)) (:foreground "black" :bold t))) "ANTLR keywords." :group 'antlr) (defvar antlr-font-lock-syntax-face 'antlr-font-lock-keyword-face) (defface antlr-font-lock-syntax-face '((((class color) (background light)) (:foreground "black" :bold t))) "ANTLR syntax symbols like :, |, (, ), ...." :group 'antlr) (defvar antlr-font-lock-ruledef-face 'antlr-font-lock-ruledef-face) (defface antlr-font-lock-ruledef-face '((((class color) (background light)) (:foreground "blue" :bold t))) "ANTLR rule references (definition)." :group 'antlr) (defvar antlr-font-lock-tokendef-face 'antlr-font-lock-tokendef-face) (defface antlr-font-lock-tokendef-face '((((class color) (background light)) (:foreground "blue" :bold t))) "ANTLR token references (definition)." :group 'antlr) (defvar antlr-font-lock-ruleref-face 'antlr-font-lock-ruleref-face) (defface antlr-font-lock-ruleref-face '((((class color) (background light)) (:foreground "blue4"))) "ANTLR rule references (usage)." :group 'antlr) (defvar antlr-font-lock-tokenref-face 'antlr-font-lock-tokenref-face) (defface antlr-font-lock-tokenref-face '((((class color) (background light)) (:foreground "orange4"))) "ANTLR token references (usage)." :group 'antlr) (defvar antlr-font-lock-literal-face 'antlr-font-lock-literal-face) (defface antlr-font-lock-literal-face '((((class color) (background light)) (:foreground "brown4" :bold t))) "ANTLR special literal tokens. It is used to highlight strings matched by the first regexp group of `antlr-font-lock-literal-regexp'." :group 'antlr) (defcustom antlr-font-lock-literal-regexp "\"\\(\\sw\\(\\sw\\|-\\)*\\)\"" "Regexp matching literals with special syntax highlighting, or nil. If nil, there is no special syntax highlighting for some literals. Otherwise, it should be a regular expression which must contain a regexp group. The string matched by the first group is highlighted with `antlr-font-lock-literal-face'." :group 'antlr :type '(choice (const :tag "None" nil) regexp)) (defvar antlr-class-header-regexp "\\(class\\)[ \t]+\\([A-Za-z\300-\326\330-\337]\\sw*\\)[ \t]+\\(extends\\)[ \t]+\\([A-Za-z\300-\326\330-\337]\\sw*\\)[ \t]*;" "Regexp matching class headers.") (defvar antlr-font-lock-additional-keywords `((antlr-invalidate-context-cache) ("\\$setType[ \t]*(\\([A-Za-z\300-\326\330-\337]\\sw*\\))" (1 antlr-font-lock-tokendef-face)) ("\\$\\sw+" (0 font-lock-keyword-face)) ;; the tokens are already fontified as string/docstrings: (,(lambda (limit) (if antlr-font-lock-literal-regexp (antlr-re-search-forward antlr-font-lock-literal-regexp limit))) (1 antlr-font-lock-literal-face t) ,@(and (featurep 'xemacs) '((0 nil)))) ; XEmacs bug workaround (,(lambda (limit) (antlr-re-search-forward antlr-class-header-regexp limit)) (1 antlr-font-lock-keyword-face) (2 antlr-font-lock-ruledef-face) (3 antlr-font-lock-keyword-face) (4 (if (member (match-string 4) '("Lexer" "Parser" "TreeParser")) 'antlr-font-lock-keyword-face 'font-lock-type-face))) (,(lambda (limit) (antlr-re-search-forward "\\<\\(header\\|options\\|tokens\\|exception\\|catch\\|returns\\)\\>" limit)) (1 antlr-font-lock-keyword-face)) (,(lambda (limit) (antlr-re-search-forward "^\\(private\\|public\\|protected\\)\\>[ \t]*\\(\\(\\sw+[ \t]*\\(:\\)?\\)\\)?" limit)) (1 font-lock-type-face) ; not XEmacs' java level-3 fruit salad (3 (if (antlr-upcase-p (char-after (match-beginning 3))) 'antlr-font-lock-tokendef-face 'antlr-font-lock-ruledef-face) nil t) (4 antlr-font-lock-syntax-face nil t)) (,(lambda (limit) (antlr-re-search-forward "^\\(\\sw+\\)[ \t]*\\(:\\)?" limit)) (1 (if (antlr-upcase-p (char-after (match-beginning 0))) 'antlr-font-lock-tokendef-face 'antlr-font-lock-ruledef-face) nil t) (2 antlr-font-lock-syntax-face nil t)) (,(lambda (limit) ;; v:ruleref and v:"literal" is allowed... (antlr-re-search-forward "\\(\\sw+\\)[ \t]*\\([=:]\\)?" limit)) (1 (if (match-beginning 2) (if (eq (char-after (match-beginning 2)) ?=) 'antlr-font-lock-default-face 'font-lock-variable-name-face) (if (antlr-upcase-p (char-after (match-beginning 1))) 'antlr-font-lock-tokenref-face 'antlr-font-lock-ruleref-face))) (2 antlr-font-lock-default-face nil t)) (,(lambda (limit) (antlr-re-search-forward "[|&:;(]\\|)\\([*+?]\\|=>\\)?" limit)) (0 'antlr-font-lock-syntax-face))) "Font-lock keywords for ANTLR's normal grammar code. See `antlr-font-lock-keywords-alist' for the keywords of actions.") (defvar antlr-font-lock-defaults '(antlr-font-lock-keywords nil nil ((?_ . "w") (?\( . ".") (?\) . ".")) beginning-of-defun) "Font-lock defaults used for ANTLR syntax highlighting. The SYNTAX-ALIST element is also used to initialize `antlr-action-syntax-table'.") ;;;=========================================================================== ;;; Internal variables ;;;=========================================================================== (defvar antlr-mode-hook nil "Hook called by `antlr-mode'.") (defvar antlr-mode-syntax-table nil "Syntax table used in `antlr-mode' buffers. If non-nil, it will be initialized in `antlr-mode'.") ;; used for "in Java/C++ code" = syntactic-depth>0 (defvar antlr-action-syntax-table nil "Syntax table used for ANTLR action parsing. Initialized by `antlr-mode-syntax-table', changed by SYNTAX-ALIST in `antlr-font-lock-defaults'. This table should be selected if you use `buffer-syntactic-context' and `buffer-syntactic-context-depth' in order not to confuse their context_cache.") (defvar antlr-mode-abbrev-table nil "Abbreviation table used in `antlr-mode' buffers.") (define-abbrev-table 'antlr-mode-abbrev-table ()) ;;;;########################################################################## ;;;; The Code ;;;;########################################################################## ;;;=========================================================================== ;;; Syntax functions -- Emacs vs XEmacs dependent ;;;=========================================================================== ;; From help.el (XEmacs-21.1), without `copy-syntax-table' (defmacro antlr-with-syntax-table (syntab &rest body) "Evaluate BODY with the syntax table SYNTAB." `(let ((stab (syntax-table))) (unwind-protect (progn (set-syntax-table ,syntab) ,@body) (set-syntax-table stab)))) (put 'antlr-with-syntax-table 'lisp-indent-function 1) (put 'antlr-with-syntax-table 'edebug-form-spec '(form body)) (defun antlr-scan-sexps-internal (from count &optional dummy no-error) ;; checkdoc-params: (from count dummy) "Like `scan-sexps' but with additional arguments. When optional arg NO-ERROR is non-nil, `antlr-scan-sexps-internal' will return nil instead of signaling an error." (if no-error (condition-case nil (scan-sexps from count) (error nil)) (scan-sexps from count))) (defun antlr-scan-lists-internal (from count depth &optional dummy no-error) ;; checkdoc-params: (from count depth dummy) "Like `scan-lists' but with additional arguments. When optional arg NO-ERROR is non-nil, `antlr-scan-lists-internal' will return nil instead of signaling an error." (if no-error (condition-case nil (scan-lists from count depth) (error nil)) (scan-lists from count depth))) (defun antlr-xemacs-bug-workaround (&rest dummies) ;; checkdoc-params: (dummies) "Invalidate context_cache for syntactical context information." ;; XEmacs bug workaround (save-excursion (set-buffer (get-buffer-create " ANTLR XEmacs bug workaround")) (buffer-syntactic-context-depth)) nil) (defun antlr-fast-syntactic-context () "Return some syntactic context information. Return `string' if point is within a string, `block-comment' or `comment' is point is within a comment or the depth within all parenthesis-syntax delimiters at point otherwise. WARNING: this may alter `match-data'." (or (buffer-syntactic-context) (buffer-syntactic-context-depth))) (defun antlr-slow-syntactic-context () "Return some syntactic context information. Return `string' if point is within a string, `block-comment' or `comment' is point is within a comment or the depth within all parenthesis-syntax delimiters at point otherwise. WARNING: this may alter `match-data'." (let ((orig (point))) (beginning-of-defun) (let ((state (parse-partial-sexp (point) orig))) (goto-char orig) (cond ((nth 3 state) 'string) ((nth 4 state) 'comment) ; block-comment? -- we don't care (t (car state)))))) ;;;=========================================================================== ;;; Misc functions ;;;=========================================================================== (defun antlr-upcase-p (char) "Non-nil, if CHAR is an uppercase character (if CHAR was a char)." ;; in XEmacs, upcase only works for ASCII (or (and (<= ?A char) (<= char ?Z)) (and (<= ?\300 char) (<= char ?\337)))) ; ?\327 is no letter (defun antlr-re-search-forward (regexp bound) "Search forward from point for regular expression REGEXP. Set point to the end of the occurrence found, and return point. Return nil if no occurrence was found. Do not search within comments, strings and actions/semantic predicates. BOUND bounds the search; it is a buffer position. See also the functions `match-beginning', `match-end' and `replace-match'." ;; WARNING: Should only be used with `antlr-action-syntax-table'! (let ((continue t)) (while (and (re-search-forward regexp bound 'limit) (save-match-data (if (eq (antlr-syntactic-context) 0) (setq continue nil) t)))) (if continue nil (point)))) (defun antlr-search-forward (string) "Search forward from point for STRING. Set point to the end of the occurrence found, and return point. Return nil if no occurrence was found. Do not search within comments, strings and actions/semantic predicates." ;; WARNING: Should only be used with `antlr-action-syntax-table'! (let ((continue t)) (while (and (search-forward string nil 'limit) (if (eq (antlr-syntactic-context) 0) (setq continue nil) t))) (if continue nil (point)))) (defun antlr-search-backward (string) "Search backward from point for STRING. Set point to the beginning of the occurrence found, and return point. Return nil if no occurrence was found. Do not search within comments, strings and actions/semantic predicates." ;; WARNING: Should only be used with `antlr-action-syntax-table'! (let ((continue t)) (while (and (search-backward string nil 'limit) (if (eq (antlr-syntactic-context) 0) (setq continue nil) t))) (if continue nil (point)))) (defsubst antlr-skip-sexps (count) "Skip the next COUNT balanced expressions and the comments after it. Return position before the comments after the last expression." (goto-char (or (antlr-scan-sexps (point) count nil t) (point-max))) (prog1 (point) (c-forward-syntactic-ws))) ;;;=========================================================================== ;;; font-lock ;;;=========================================================================== (defun antlr-font-lock-keywords () "Return font-lock keywords for current buffer. See `antlr-font-lock-additional-keywords', `antlr-language' and `antlr-font-lock-maximum-decoration'." (if (eq antlr-font-lock-maximum-decoration 'none) antlr-font-lock-additional-keywords (append antlr-font-lock-additional-keywords (eval (let ((major-mode antlr-language)) ; dynamic (font-lock-choose-keywords (cdr (assq antlr-language antlr-font-lock-keywords-alist)) (if (eq antlr-font-lock-maximum-decoration 'inherit) font-lock-maximum-decoration antlr-font-lock-maximum-decoration))))))) ;;;=========================================================================== ;;; imenu support ;;;=========================================================================== (defun antlr-grammar-tokens () "Return alist for tokens defined in current buffer." (save-excursion (antlr-imenu-create-index-function t))) (defun antlr-imenu-create-index-function (&optional tokenrefs-only) "Return imenu index-alist for ANTLR grammar files. IF TOKENREFS-ONLY is non-nil, just return alist with tokenref names." (let ((items nil) (classes nil) (semi (point-max))) ;; Using `imenu-progress-message' would require imenu for compilation -- ;; nobody is missing these messages... (antlr-with-syntax-table antlr-action-syntax-table ;; We stick to the imenu standard and search backwards, although I don't ;; think this is right. It is slower and more likely not to work during ;; editing (you are more likely to add functions to the end of the file). (while semi (goto-char semi) (setq semi (antlr-search-backward ";")) (if semi (progn (forward-char) (antlr-skip-exception-part t)) (antlr-skip-file-prelude t)) (if (looking-at "{") (antlr-skip-sexps 1)) (if (looking-at antlr-class-header-regexp) (or tokenrefs-only (push (cons (match-string 2) (if imenu-use-markers (copy-marker (match-beginning 2)) (match-beginning 2))) classes)) (if (looking-at "p\\(ublic\\|rotected\\|rivate\\)") (antlr-skip-sexps 1)) (when (looking-at "\\sw+") (if tokenrefs-only (if (antlr-upcase-p (char-after (point))) (push (list (match-string 0)) items)) (push (cons (match-string 0) (if imenu-use-markers (copy-marker (match-beginning 0)) (match-beginning 0))) items)))))) (if classes (cons (cons "Classes" classes) items) items))) ;;;=========================================================================== ;;; Parse grammar files (internal functions) ;;;=========================================================================== (defun antlr-skip-exception-part (skip-comment) "Skip exception part of current rule, i.e., everything after `;'. This also includes the options and tokens part of a grammar class header. If SKIP-COMMENT is non-nil, also skip the comment after that part." (let ((pos (point)) (class nil)) (c-forward-syntactic-ws) (while (looking-at "options\\>\\|tokens\\>") (setq class t) (setq pos (antlr-skip-sexps 2))) (if class ;; Problem: an action only belongs to a class def, not a normal rule. ;; But checking the current rule type is too expensive => only expect ;; an action if we have found an option or tokens part. (if (looking-at "{") (setq pos (antlr-skip-sexps 1))) (while (looking-at "exception\\>") (setq pos (antlr-skip-sexps 1)) (when (looking-at "\\[") (setq pos (antlr-skip-sexps 1))) (while (looking-at "catch\\>") (setq pos (antlr-skip-sexps 3))))) (or skip-comment (goto-char pos)))) (defun antlr-skip-file-prelude (skip-comment) "Skip the file prelude: the header and file options. If SKIP-COMMENT is non-nil, also skip the comment after that part. Return the start position of the file prelude. Hack: if SKIP-COMMENT is `header-only' only skip header and return position before the comment after the header." (let* ((pos (point)) (pos0 pos)) (c-forward-syntactic-ws) (if skip-comment (setq pos0 (point))) (while (looking-at "header\\>[ \t]*\\(\"\\)?") (setq pos (antlr-skip-sexps (if (match-beginning 1) 3 2)))) (if (eq skip-comment 'header-only) ; a hack... pos (when (looking-at "options\\>") (setq pos (antlr-skip-sexps 2))) (or skip-comment (goto-char pos)) pos0))) (defun antlr-next-rule (arg skip-comment) "Move forward to next end of rule. Do it ARG many times. A grammar class header and the file prelude are also considered as a rule. Negative argument ARG means move back to ARGth preceding end of rule. The behavior is not defined when ARG is zero. If SKIP-COMMENT is non-nil, move to beginning of the rule." ;; WARNING: Should only be used with `antlr-action-syntax-table'! ;; PRE: ARG<>0 (let ((pos (point)) (beg (point))) ;; first look whether point is in exception part (if (antlr-search-backward ";") (progn (setq beg (point)) (forward-char) (antlr-skip-exception-part skip-comment)) (antlr-skip-file-prelude skip-comment)) (if (< arg 0) (unless (and (< (point) pos) (zerop (incf arg))) ;; if we have moved backward, we already moved one defun backward (goto-char beg) ; rewind (to ";" / point) (while (and arg (<= (incf arg) 0)) (if (antlr-search-backward ";") (setq beg (point)) (when (>= arg -1) ;; try file prelude: (setq pos (antlr-skip-file-prelude skip-comment)) (if (zerop arg) (if (>= (point) beg) (goto-char (if (>= pos beg) (point-min) pos))) (goto-char (if (or (>= (point) beg) (= (point) pos)) (point-min) pos)))) (setq arg nil))) (when arg ; always found a ";" (forward-char) (antlr-skip-exception-part skip-comment))) (if (<= (point) pos) ; moved backward? (goto-char pos) ; rewind (decf arg)) ; already moved one defun forward (unless (zerop arg) (while (>= (decf arg) 0) (antlr-search-forward ";")) (antlr-skip-exception-part skip-comment))))) (defun antlr-outside-rule-p () "Non-nil if point is outside a grammar rule. Move to the beginning of the current rule if point is inside a rule." ;; WARNING: Should only be used with `antlr-action-syntax-table'! (let ((pos (point))) (antlr-next-rule -1 nil) (let ((between (or (bobp) (< (point) pos)))) (c-forward-syntactic-ws) (and between (> (point) pos) (goto-char pos))))) ;;;=========================================================================== ;;; Parse grammar files (commands) ;;;=========================================================================== ;; No (interactive "_") in Emacs... use `zmacs-region-stays'. (defun antlr-inside-rule-p () "Non-nil if point is inside a grammar rule. A grammar class header and the file prelude are also considered as a rule." (save-excursion (antlr-with-syntax-table antlr-action-syntax-table (not (antlr-outside-rule-p))))) (defun antlr-end-of-rule (&optional arg) "Move forward to next end of rule. Do it ARG [default: 1] many times. A grammar class header and the file prelude are also considered as a rule. Negative argument ARG means move back to ARGth preceding end of rule. If ARG is zero, run `antlr-end-of-body'." (interactive "p") (if (zerop arg) (antlr-end-of-body) (antlr-with-syntax-table antlr-action-syntax-table (antlr-next-rule arg nil)) (setq zmacs-region-stays t))) (defun antlr-beginning-of-rule (&optional arg) "Move backward to preceding beginning of rule. Do it ARG many times. A grammar class header and the file prelude are also considered as a rule. Negative argument ARG means move forward to ARGth next beginning of rule. If ARG is zero, run `antlr-beginning-of-body'." (interactive "p") (if (zerop arg) (antlr-beginning-of-body) (antlr-with-syntax-table antlr-action-syntax-table (antlr-next-rule (- arg) t)) (setq zmacs-region-stays t))) (defun antlr-end-of-body (&optional msg) "Move to position after the `;' of the current rule. A grammar class header is also considered as a rule. With optional prefix arg MSG, move to `:'." (interactive) (antlr-with-syntax-table antlr-action-syntax-table (let ((orig (point))) (if (antlr-outside-rule-p) (error "Outside an ANTLR rule")) (let ((bor (point))) (when (< (antlr-skip-file-prelude t) (point)) ;; Yes, we are in the file prelude (goto-char orig) (error (or msg "The file prelude is without `;'"))) (antlr-search-forward ";") (when msg (when (< (point) (progn (goto-char bor) (or (antlr-search-forward ":") (point-max)))) (goto-char orig) (error msg)) (c-forward-syntactic-ws))))) (setq zmacs-region-stays t)) (defun antlr-beginning-of-body () "Move to the first element after the `:' of the current rule." (interactive) (antlr-end-of-body "Class headers and the file prelude are without `:'")) ;;;=========================================================================== ;;; Literal normalization, Hide Actions ;;;=========================================================================== (defun antlr-downcase-literals (&optional transform) "Convert all literals in buffer to lower case. If non-nil, TRANSFORM is used on literals instead of `downcase-region'." (interactive) (or transform (setq transform 'downcase-region)) (let ((literals 0)) (save-excursion (goto-char (point-min)) (antlr-with-syntax-table antlr-action-syntax-table (antlr-invalidate-context-cache) (while (antlr-re-search-forward "\"\\(\\sw\\(\\sw\\|-\\)*\\)\"" nil) (funcall transform (match-beginning 0) (match-end 0)) (incf literals)))) (message "Transformed %d literals" literals))) (defun antlr-upcase-literals () "Convert all literals in buffer to upper case." (interactive) (antlr-downcase-literals 'upcase-region)) (defun antlr-hide-actions (arg &optional silent) "Hide or unhide all actions in buffer. Hide all actions including arguments in brackets if ARG is 1 or if called interactively without prefix argument. Hide all actions excluding arguments in brackets if ARG is 2 or higher. Unhide all actions if ARG is 0 or negative. See `antlr-action-visibility'. Display a message unless optional argument SILENT is non-nil." (interactive "p") ;; from Emacs/lazy-lock: `save-buffer-state' (let ((modified (buffer-modified-p)) (buffer-undo-list t) (inhibit-read-only t) (inhibit-point-motion-hooks t) deactivate-mark ; Emacs only before-change-functions after-change-functions buffer-file-name buffer-file-truename) (if (> arg 0) (let ((regexp (if (= arg 1) "[]}]" "}")) (diff (and antlr-action-visibility (+ (max antlr-action-visibility 0) 2)))) (antlr-hide-actions 0 t) (save-excursion (goto-char (point-min)) (antlr-with-syntax-table antlr-action-syntax-table (antlr-invalidate-context-cache) (while (antlr-re-search-forward regexp nil) (let ((beg (antlr-scan-sexps (point) -1 nil t))) (when beg (if diff ; braces are visible (if (> (point) (+ beg diff)) (add-text-properties (1+ beg) (1- (point)) '(invisible t intangible t))) ;; if actions is on line(s) of its own, hide WS (and (looking-at "[ \t]*$") (save-excursion (goto-char beg) (skip-chars-backward " \t") (and (bolp) (setq beg (point)))) (beginning-of-line 2)) ; beginning of next line (add-text-properties beg (point) '(invisible t intangible t)))))))) (or silent (message "Hide all actions (%s arguments)...done" (if (= arg 1) "including" "excluding")))) (remove-text-properties (point-min) (point-max) '(invisible nil intangible nil)) (or silent (message "Unhide all actions (including arguments)...done"))) (and (not modified) (buffer-modified-p) (set-buffer-modified-p nil)))) ;;;=========================================================================== ;;; Insert option: command ;;;=========================================================================== (defun antlr-insert-option (level option &optional location) "Insert file/grammar/rule/subrule option near point. LEVEL determines option kind to insert: 1=file, 2=grammar, 3=rule, 4=subrule. OPTION is a string with the name of the option to insert. LOCATION can be specified for not calling `antlr-option-kind' twice. Inserting an option with this command works as follows: 1. When called interactively, LEVEL is determined by the prefix argument or automatically deduced without prefix argument. 2. Signal an error if no option of that level could be inserted, e.g., if the buffer is read-only, the option area is outside the visible part of the buffer or a subrule/rule option should be inserted with point outside a subrule/rule. 3. When called interactively, OPTION is read from the minibuffer with completion over the known options of the given LEVEL. 4. Ask user for confirmation if the given OPTION does not seem to be a valid option to insert into the current file. 5. Find a correct position to insert the option. 6. Depending on the option, insert it the following way \(inserting an option also means inserting the option section if necessary\): - Insert the option and let user insert the value at point. - Read a value (with completion) from the minibuffer, using a previous value as initial contents, and insert option with value. 7. Final action depending on the option. For example, set the language according to a newly inserted language option. The name of all options with a specification for their values are stored in `antlr-options-alist'. The used specification also depends on the value of `antlr-tool-version', i.e., step 4 will warn you if you use an option that has been introduced in newer version of ANTLR, and step 5 will offer completion using version-correct values. If the option already exists inside the visible part of the buffer, this command can be used to change the value of that option. Otherwise, find a correct position where the option can be inserted near point. The search for a correct position is as follows: * If search is within an area where options can be inserted, use the position of point. Inside the options section and if point is in the middle of a option definition, skip the rest of it. * If an options section already exists, insert the options at the end. If only the beginning of the area is visible, insert at the beginning. * Otherwise, find the position where an options section can be inserted and insert a new section before any comments. If the position before the comments is not visible, insert the new section after the comments. This function also inserts \"options {...}\" and the \":\" if necessary, see `antlr-options-auto-colon'. See also `antlr-options-assign-string'. This command might also set the mark like \\[set-mark-command] does, see `antlr-options-push-mark'." (interactive (antlr-insert-option-interactive current-prefix-arg)) (barf-if-buffer-read-only) (or location (setq location (cdr (antlr-option-kind level)))) (cond ((null level) (error "Cannot deduce what kind of option to insert")) ((atom location) (error "Cannot insert any %s options around here" (elt antlr-options-headings (1- level))))) (let ((area (car location)) (place (cdr location))) (cond ((null place) ; invisible (error (if area "Invisible %s options, use %s to make them visible" "Invisible area for %s options, use %s to make it visible") (elt antlr-options-headings (1- level)) (substitute-command-keys "\\[widen]"))) ((null area) ; without option part (antlr-insert-option-do level option nil (null (cdr place)) (car place))) ((save-excursion ; with option part, option visible (goto-char (max (point-min) (car area))) (re-search-forward (concat "\\(^\\|;\\)[ \t]*\\(\\<" (regexp-quote option) "\\>\\)[ \t\n]*\\(\\(=[ \t]?\\)[ \t]*\\(\\(\\sw\\|\\s_\\)+\\|\"\\([^\n\"\\]\\|[\\][^\n]\\)*\"\\)?\\)?") ;; 2=name, 3=4+5, 4="=", 5=value (min (point-max) (cdr area)) t)) (antlr-insert-option-do level option (cons (or (match-beginning 5) (match-beginning 3)) (match-end 5)) (and (null (cdr place)) area) (or (match-beginning 5) (match-end 4) (match-end 2)))) (t ; with option part, option not yet (antlr-insert-option-do level option t (and (null (cdr place)) area) (car place)))))) (defun antlr-insert-option-interactive (arg) "Interactive specification for `antlr-insert-option'. Use prefix argument ARG to return \(LEVEL OPTION LOCATION)." (barf-if-buffer-read-only) (if arg (setq arg (prefix-numeric-value arg))) (unless (memq arg '(nil 1 2 3 4)) (error "Valid prefix args: no=auto, 1=file, 2=grammar, 3=rule, 4=subrule")) (let* ((kind (antlr-option-kind arg)) (level (car kind))) (if (atom (cdr kind)) (list level nil (cdr kind)) (let* ((table (elt antlr-options-alists (1- level))) (completion-ignore-case t) ;dynamic (input (completing-read (format "Insert %s option: " (elt antlr-options-headings (1- level))) table))) (list level input (cdr kind)))))) (defun antlr-options-menu-filter (level menu-items) "Return items for options submenu of level LEVEL." ;; checkdoc-params: (menu-items) (let ((active (if buffer-read-only nil (consp (cdr-safe (cdr (antlr-option-kind level))))))) (mapcar (lambda (option) (vector option (list 'antlr-insert-option level option) :active active)) (sort (mapcar 'car (elt antlr-options-alists (1- level))) 'string-lessp)))) ;;;=========================================================================== ;;; Insert option: determine section-kind ;;;=========================================================================== (defun antlr-option-kind (requested) "Return level and location for option to insert near point. Call function `antlr-option-level' with argument REQUESTED. If the result is nil, return \(REQUESTED \. error). If the result has the non-nil value LEVEL, return \(LEVEL \. LOCATION) where LOCATION looks like \(AREA \. PLACE), see `antlr-option-location'." (save-excursion (save-restriction (let ((min0 (point-min)) ; before `widen'! (max0 (point-max)) (orig (point)) (level (antlr-option-level requested)) ; calls `widen'! pos) (cond ((null level) (setq level requested)) ((eq level 1) ; file options (goto-char (point-min)) (setq pos (antlr-skip-file-prelude 'header-only))) ((not (eq level 3)) ; grammar or subrule options (setq pos (point)) (c-forward-syntactic-ws)) ((looking-at "^\\(private[ \t\n]\\|public[ \t\n]\\|protected[ \t\n]\\)?[ \t\n]*\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*\\(!\\)?[ \t\n]*\\(\\[\\)?") ;; rule options, with complete rule header (goto-char (or (match-end 4) (match-end 3))) (setq pos (antlr-skip-sexps (if (match-end 5) 1 0))) (when (looking-at "returns[ \t\n]*\\[") (goto-char (1- (match-end 0))) (setq pos (antlr-skip-sexps 1))))) (cons level (cond ((null pos) 'error) ((looking-at "options[ \t\n]*{") (goto-char (match-end 0)) (setq pos (antlr-scan-lists (point) 1 1 nil t)) (antlr-option-location orig min0 max0 (point) (if pos (1- pos) (point-max)) t)) (t (antlr-option-location orig min0 max0 pos (point) nil)))))))) (defun antlr-option-level (requested) "Return level for option to insert near point. Remove any restrictions from current buffer and return level for the option to insert near point, i.e., 1, 2, 3, 4, or nil if no such option can be inserted. If REQUESTED is non-nil, it is the only possible value to return except nil. If REQUESTED is nil, return level for the nearest option kind, i.e., the highest number possible. If the result is 2, point is at the beginning of the class after the class definition. If the result is 3 or 4, point is at the beginning of the rule/subrule after the init action. Otherwise, the point position is undefined." (widen) (if (eq requested 1) 1 (antlr-with-syntax-table antlr-action-syntax-table (antlr-invalidate-context-cache) (let* ((orig (point)) (outsidep (antlr-outside-rule-p)) bor depth) (if (eq (char-after) ?\{) (antlr-skip-sexps 1)) (setq bor (point)) ; beginning of rule (after init action) (cond ((eq requested 2) ; grammar options required? (let (boc) ; beginning of class (goto-char (point-min)) (while (and (<= (point) bor) (antlr-re-search-forward antlr-class-header-regexp nil)) (if (<= (match-beginning 0) bor) (setq boc (match-end 0)))) (when boc (goto-char boc) 2))) ((save-excursion ; in region of file options? (goto-char (point-min)) (antlr-skip-file-prelude t) ; ws/comment after: OK (< orig (point))) (and (null requested) 1)) (outsidep ; outside rule not OK nil) ((looking-at antlr-class-header-regexp) ; rule = class def? (goto-char (match-end 0)) (and (null requested) 2)) ((eq requested 3) ; rule options required? (goto-char bor) 3) ((setq depth (antlr-syntactic-grammar-depth orig bor)) (if (> depth 0) ; move out of actions (goto-char (scan-lists (point) -1 depth))) (set-syntax-table antlr-mode-syntax-table) (antlr-invalidate-context-cache) (if (eq (antlr-syntactic-context) 0) ; not in subrule? (unless (eq requested 4) (goto-char bor) 3) (goto-char (1+ (scan-lists (point) -1 1))) 4))))))) (defun antlr-option-location (orig min-vis max-vis min-area max-area withp) "Return location for the options area. ORIG is the original position of `point', MIN-VIS is `point-min' and MAX-VIS is `point-max'. If WITHP is non-nil, there exists an option specification and it starts after the brace at MIN-AREA and stops at MAX-AREA. If WITHP is nil, there is no area and the region where it could be inserted starts at MIN-AREA and stops at MAX-AREA. The result has the form (AREA . PLACE). AREA is (MIN-AREA . MAX-AREA) if WITHP is non-nil, and nil otherwise. PLACE is nil if the area is invisible, (ORIG) if ORIG is inside the area, (MIN-AREA . beginning) for a visible start position and (MAX-AREA . end) for a visible end position where the beginning is preferred if WITHP is nil and the end if WITHP is non-nil." (cons (and withp (cons min-area max-area)) (cond ((and (<= min-area orig) (<= orig max-area)) ;; point in options area (list orig)) ((and (null withp) (<= min-vis min-area) (<= min-area max-vis)) ;; use start of options area (only if not `withp') (cons min-area 'beginning)) ((and (<= min-vis max-area) (<= max-area max-vis)) ;; use end of options area (cons max-area 'end)) ((and withp (<= min-vis min-area) (<= min-area max-vis)) ;; use start of options area (only if `withp') (cons min-area 'beginning))))) (defun antlr-syntactic-grammar-depth (pos beg) "Return syntactic context depth at POS. Move to POS and from there on to the beginning of the string or comment if POS is inside such a construct. Then, return the syntactic context depth at point if the point position is smaller than BEG. WARNING: this may alter `match-data'." (goto-char pos) (let ((context (or (antlr-syntactic-context) 0))) (while (and context (not (integerp context))) (cond ((eq context 'string) (setq context (and (search-backward "\"" nil t) (>= (point) beg) (or (antlr-syntactic-context) 0)))) ((memq context '(comment block-comment)) (setq context (and (re-search-backward "/[/*]" nil t) (>= (point) beg) (or (antlr-syntactic-context) 0)))))) context)) ;;;=========================================================================== ;;; Insert options: do the insertion ;;;=========================================================================== (defun antlr-insert-option-do (level option old area pos) "Insert option into buffer at position POS. Insert option of level LEVEL and name OPTION. If OLD is non-nil, an options area is already exists. If OLD looks like \(BEG \. END), the option already exists. Then, BEG is the start position of the option value, the position of the `=' or nil, and END is the end position of the option value or nil. If the original point position was outside an options area, AREA is nil. Otherwise, and if an option specification already exists, AREA is a cons cell where the two values determine the area inside the braces." (let* ((spec (cdr (assoc option (elt antlr-options-alists (1- level))))) (value (antlr-option-spec level option (cdr spec) (consp old)))) (if (fboundp (car spec)) (funcall (car spec) 'before-input option)) ;; set mark (unless point was inside options area before) (if (cond (area (eq antlr-options-push-mark t)) ((numberp antlr-options-push-mark) (> (count-lines (min (point) pos) (max (point) pos)) antlr-options-push-mark)) (antlr-options-push-mark)) (push-mark)) ;; read option value ----------------------------------------------------- (goto-char pos) (if (null value) ;; no option specification found (if (y-or-n-p (format "Insert unknown %s option %s? " (elt antlr-options-headings (1- level)) option)) (message "Insert value for %s option %s" (elt antlr-options-headings (1- level)) option) (error "Didn't insert unknown %s option %s" (elt antlr-options-headings (1- level)) option)) ;; option specification found (setq value (cdr value)) (if (car value) (let ((initial (and (consp old) (cdr old) (buffer-substring (car old) (cdr old))))) (setq value (apply (car value) (and initial (if (eq (aref initial 0) ?\") (read initial) initial)) (cdr value)))) (message (cadr value)) (setq value nil))) ;; insert value ---------------------------------------------------------- (if (consp old) (antlr-insert-option-existing old value) (if (consp area) ;; Move outside string/comment if point is inside option spec (antlr-syntactic-grammar-depth (point) (car area))) (antlr-insert-option-space area old) (or old (antlr-insert-option-area level)) (insert option " = ;") (backward-char) (if value (insert value))) ;; final ----------------------------------------------------------------- (if (fboundp (car spec)) (funcall (car spec) 'after-insertion option)))) (defun antlr-option-spec (level option specs existsp) "Return version correct option value specification. Return specification for option OPTION of kind level LEVEL. SPECS should correspond to the VALUE-SPEC... in `antlr-option-alists'. EXISTSP determines whether the option already exists." (let (value) (while (and specs (>= antlr-tool-version (caar specs))) (setq value (pop specs))) (cond (value) ; found correct spec ((null specs) nil) ; didn't find any specs (existsp (car specs)) ; wrong version, but already present ((y-or-n-p (format "Insert v%s %s option %s in v%s? " (antlr-version-string (caar specs)) (elt antlr-options-headings (1- level)) option (antlr-version-string antlr-tool-version))) (car specs)) (t (error "Didn't insert v%s %s option %s in v%s" (antlr-version-string (caar specs)) (elt antlr-options-headings (1- level)) option (antlr-version-string antlr-tool-version)))))) (defun antlr-version-string (version) "Format the Antlr version number VERSION, see `antlr-tool-version'." (let ((version100 (/ version 100))) (format "%d.%d.%d" (/ version100 100) (mod version100 100) (mod version 100)))) ;;;=========================================================================== ;;; Insert options: the details (used by `antlr-insert-option-do') ;;;=========================================================================== (defun antlr-insert-option-existing (old value) "Insert option value VALUE at point for existing option. For OLD, see `antlr-insert-option-do'." ;; no = => insert = (unless (car old) (insert antlr-options-assign-string)) ;; with user input => insert if necessary (when value (if (cdr old) ; with value (if (string-equal value (buffer-substring (car old) (cdr old))) (goto-char (cdr old)) (delete-region (car old) (cdr old)) (insert value)) (insert value))) (unless (looking-at "\\([^\n=;{}/'\"]\\|'\\([^\n'\\]\\|\\\\.\\)*'\\|\"\\([^\n\"\\]\\|\\\\.\\)*\"\\)*;") ;; stuff (no =, {, } or /) at point is not followed by ";" (insert ";") (backward-char))) (defun antlr-insert-option-space (area old) "Find appropriate place to insert option, insert newlines/spaces. For AREA and OLD, see `antlr-insert-option-do'." (let ((orig (point)) (open t)) (skip-chars-backward " \t") (unless (bolp) (let ((before (char-after (1- (point))))) (goto-char orig) (and old ; with existing options area (consp area) ; if point inside existing area (not (eq before ?\;)) ; if not at beginning of option ; => skip to end of option (if (and (search-forward ";" (cdr area) t) (let ((context (antlr-syntactic-context))) (or (null context) (numberp context)))) (setq orig (point)) (goto-char orig))) (skip-chars-forward " \t") (if (looking-at "$\\|//") ;; just comment after point => skip (+ lines w/ same col comment) (let ((same (if (> (match-end 0) (match-beginning 0)) (current-column)))) (beginning-of-line 2) (or (bolp) (insert "\n")) (when (and same (null area)) ; or (consp area)? (while (and (looking-at "[ \t]*\\(//\\)") (goto-char (match-beginning 1)) (= (current-column) same)) (beginning-of-line 2) (or (bolp) (insert "\n"))))) (goto-char orig) (if (null old) (progn (insert "\n") (antlr-indent-line)) (unless (eq (char-after (1- (point))) ?\ ) (insert " ")) (unless (eq (char-after (point)) ?\ ) (insert " ") (backward-char)) (setq open nil))))) (when open (beginning-of-line 1) (insert "\n") (backward-char) (antlr-indent-line)))) (defun antlr-insert-option-area (level) "Insert new options area for options of level LEVEL. Used by `antlr-insert-option-do'." (insert "options {\n\n}") (when (and antlr-options-auto-colon (memq level '(3 4)) (save-excursion (c-forward-syntactic-ws) (if (eq (char-after (point)) ?\{) (antlr-skip-sexps 1)) (not (eq (char-after (point)) ?\:)))) (insert "\n:") (antlr-indent-line) (end-of-line 0)) (backward-char 1) (antlr-indent-line) (beginning-of-line 0) (antlr-indent-line)) ;;;=========================================================================== ;;; Insert options: in `antlr-options-alists' ;;;=========================================================================== (defun antlr-read-value (initial-contents prompt &optional as-string table table-x) "Read a string from the minibuffer, possibly with completion. If INITIAL-CONTENTS is non-nil, insert it in the minibuffer initially. PROMPT is a string to prompt with, normally it ends in a colon and a space. If AS-STRING is t or is a member \(comparison done with `eq') of `antlr-options-style', return printed representation of the user input, otherwise return the user input directly. If TABLE or TABLE-X is non-nil, read with completion. The completion table is the resulting alist of TABLE-X concatenated with TABLE where TABLE can also be a function evaluation to an alist. Used inside `antlr-options-alists'." (let* ((table0 (and (or table table-x) (append table-x (if (functionp table) (funcall table) table)))) (input (if table0 (completing-read prompt table0 nil nil initial-contents) (read-from-minibuffer prompt initial-contents)))) (if (and as-string (or (eq as-string t) (cdr (assq as-string antlr-options-style)))) (format "%S" input) input))) (defun antlr-read-boolean (initial-contents prompt &optional table) "Read a boolean value from the minibuffer, with completion. If INITIAL-CONTENTS is non-nil, insert it in the minibuffer initially. PROMPT is a string to prompt with, normally it ends in a question mark and a space. \"(true or false) \" is appended if TABLE is nil. Read with completion over \"true\", \"false\" and the keys in TABLE, see also `antlr-read-value'. Used inside `antlr-options-alists'." (antlr-read-value initial-contents (if table prompt (concat prompt "(true or false) ")) nil table '(("false") ("true")))) (defun antlr-language-option-extra (phase &rest dummies) ;; checkdoc-params: (dummies) "Change language according to the new value of the \"language\" option. Call `antlr-mode' if the new language would be different from the value of `antlr-language', keeping the value of variable `font-lock-mode'. Called in PHASE `after-insertion', see `antlr-options-alists'." (when (eq phase 'after-insertion) (let ((new-language (antlr-language-option t))) (or (null new-language) (eq new-language antlr-language) (let ((font-lock (and (boundp 'font-lock-mode) font-lock-mode))) (if font-lock (font-lock-mode 0)) (antlr-mode) (and font-lock (null font-lock-mode) (font-lock-mode 1))))))) (defun antlr-c++-mode-extra (phase option &rest dummies) ;; checkdoc-params: (option dummies) "Warn if C++ option is used with the wrong language. Ask user \(\"y or n\"), if a C++ only option is going to be inserted but `antlr-language' has not the value `c++-mode'. Called in PHASE `before-input', see `antlr-options-alists'." (and (eq phase 'before-input) (not (y-or-n-p (format "Insert C++ %s option? " option))) (error "Didn't insert C++ %s option with language %s" option (cadr (assq antlr-language antlr-language-alist))))) ;;;=========================================================================== ;;; Compute dependencies ;;;=========================================================================== (defun antlr-file-dependencies () "Return dependencies for grammar in current buffer. The result looks like \(FILE \(CLASSES \. SUPERS) VOCABS \. LANGUAGE) where CLASSES = ((CLASS . CLASS-EVOCAB) ...), SUPERS = ((SUPER . USE-EVOCAB-P) ...), and VOCABS = ((EVOCAB ...) . (IVOCAB ...)) FILE is the current buffer's file-name without directory part and LANGUAGE is the value of `antlr-language' in the current buffer. Each EVOCAB is an export vocabulary and each IVOCAB is an import vocabulary. Each CLASS is a grammar class with its export vocabulary CLASS-EVOCAB. Each SUPER is a super-grammar class where USE-EVOCAB-P indicates whether its export vocabulary is used as an import vocabulary." (unless buffer-file-name (error "Grammar buffer does not visit a file")) (let (classes exportVocabs importVocabs superclasses default-vocab) (antlr-with-syntax-table antlr-action-syntax-table (goto-char (point-min)) (while (antlr-re-search-forward antlr-class-header-regexp nil) ;; parse class definition -------------------------------------------- (let* ((class (match-string 2)) (sclass (match-string 4)) ;; export vocab defaults to class name (first grammar in file) ;; or to the export vocab of the first grammar in file: (evocab (or default-vocab class)) (ivocab nil)) (goto-char (match-end 0)) (c-forward-syntactic-ws) (while (looking-at "options\\>\\|\\(tokens\\)\\>") (if (match-beginning 1) (antlr-skip-sexps 2) (goto-char (match-end 0)) (c-forward-syntactic-ws) ;; parse grammar option sections ------------------------------- (when (eq (char-after (point)) ?\{) (let* ((beg (1+ (point))) (end (1- (antlr-skip-sexps 1))) (cont (point))) (goto-char beg) (if (re-search-forward "\\ (FILE . EVOCAB) ... (deps (cdr deps0)) ; FILE -> (c . s) (ev . iv) . LANGUAGE (with-error nil) (gen-sep (or (caddr (cadr antlr-makefile-specification)) " ")) (n (and (cdr deps) (cadr antlr-makefile-specification) 0))) (or in-makefile (set-buffer standard-output)) (dolist (dep deps) (let ((supers (cdadr dep)) (lang (cdr (assoc (cdddr dep) antlr-file-formats-alist)))) (if n (incf n)) (antlr-makefile-insert-variable n "" " =") (if supers (insert " " (format (cadr antlr-special-file-formats) (file-name-sans-extension (car dep))))) (dolist (class-def (caadr dep)) (let ((sep gen-sep)) (dolist (class-file (cadr lang)) (insert sep (format class-file (car class-def))) (setq sep " ")))) (dolist (evocab (caaddr dep)) (let ((sep gen-sep)) (dolist (vocab-file (cons (car antlr-special-file-formats) (car lang))) (insert sep (format vocab-file evocab)) (setq sep " ")))) (antlr-makefile-insert-variable n "\n$(" ")") (insert ": " (car dep)) (dolist (ivocab (cdaddr dep)) (insert " " (format (car antlr-special-file-formats) ivocab))) (let ((glibs (antlr-superclasses-glibs supers classes))) (if (cadr glibs) (setq with-error t)) (dolist (super (cddr glibs)) (insert " " (car super)) (if (cdr super) (insert " " (format (car antlr-special-file-formats) (cdr super))))) (insert "\n\t" (caddr antlr-makefile-specification) (car glibs) " $<\n" (car antlr-makefile-specification))))) (if n (let ((i 0)) (antlr-makefile-insert-variable nil "" " =") (while (<= (incf i) n) (antlr-makefile-insert-variable i " $(" ")")) (insert "\n" (car antlr-makefile-specification)))) (if (string-equal (car antlr-makefile-specification) "\n") (backward-delete-char 1)) (when with-error (goto-char (point-min)) (insert antlr-help-unknown-file-text)) (unless in-makefile (copy-region-as-kill (point-min) (point-max)) (goto-char (point-min)) (insert (format antlr-help-rules-intro dirname))))) ;;;###autoload (defun antlr-show-makefile-rules () "Show Makefile rules for all grammar files in the current directory. If the `major-mode' of the current buffer has the value `makefile-mode', the rules are directory inserted at point. Otherwise, a *Help* buffer is shown with the rules which are also put into the `kill-ring' for \\[yank]. This command considers import/export vocabularies and grammar inheritance and provides a value for the \"-glib\" option if necessary. Customize variable `antlr-makefile-specification' for the appearance of the rules. If the file for a super-grammar cannot be determined, special file names are used according to variable `antlr-unknown-file-formats' and a commentary with value `antlr-help-unknown-file-text' is added. The *Help* buffer always starts with the text in `antlr-help-rules-intro'." (interactive) (if (null (eq major-mode 'makefile-mode)) (antlr-with-displaying-help-buffer 'antlr-insert-makefile-rules) (push-mark) (antlr-insert-makefile-rules t))) ;;;=========================================================================== ;;; Indentation ;;;=========================================================================== (defun antlr-indent-line () "Indent the current line as ANTLR grammar code. The indentation of non-comment lines are calculated by `c-basic-offset', multiplied by: - the level of the paren/brace/bracket depth, - plus 0/2/1, depending on the position inside the rule: header, body, exception part, - minus 1 if `antlr-indent-item-regexp' matches the beginning of the line starting from the first non-blank. Lines inside block comments are indented by `c-indent-line' according to `antlr-indent-comment'. If `antlr-language' has value `c++-mode', indent line at column 0 if it is matched by `antlr-c++-indent-at-bol-regexp'. For the initialization of `c-basic-offset', see `antlr-indent-style' and, to a lesser extent, `antlr-tab-offset-alist'." (save-restriction (let ((orig (point)) (min0 (point-min)) bol boi indent syntax) (widen) (beginning-of-line) (setq bol (point)) (if (< bol min0) (error "Beginning of current line not visible")) (skip-chars-forward " \t") (setq boi (point)) ;; check syntax at beginning of indentation ---------------------------- (antlr-with-syntax-table antlr-action-syntax-table (antlr-invalidate-context-cache) (setq syntax (antlr-syntactic-context)) (cond ((symbolp syntax) (setq indent nil)) ; block-comments, strings, (comments) ((and (assq antlr-language antlr-indent-at-bol-alist) (looking-at (cdr (assq antlr-language antlr-indent-at-bol-alist)))) (setq syntax 'bol) (setq indent 0)) ; indentation at 0 ((progn (antlr-next-rule -1 t) (if (antlr-search-forward ":") (< boi (1- (point))) t)) (setq indent 0)) ; in rule header ((if (antlr-search-forward ";") (< boi (point)) t) (setq indent 2)) ; in rule body (t (forward-char) (antlr-skip-exception-part nil) (setq indent (if (> (point) boi) 1 0))))) ; in exception part? ;; compute the corresponding indentation and indent -------------------- (if (null indent) ;; Use the indentation engine of cc-mode for block comments. Using ;; it-mode for actions is not easy, especially if the actions come ;; early in the rule body. (progn (goto-char orig) (and (eq antlr-indent-comment t) (not (eq syntax 'string)) (c-indent-line))) ;; do it ourselves (goto-char boi) (unless (symbolp syntax) ; direct indentation (antlr-invalidate-context-cache) (incf indent (antlr-syntactic-context)) (and (> indent 0) (looking-at antlr-indent-item-regexp) (decf indent)) (setq indent (* indent c-basic-offset))) ;; the usual major-mode indent stuff --------------------------------- (setq orig (- (point-max) orig)) (unless (= (current-column) indent) (delete-region bol boi) (beginning-of-line) (indent-to indent)) ;; If initial point was within line's indentation, ;; position after the indentation. Else stay at same point in text. (if (> (- (point-max) orig) (point)) (goto-char (- (point-max) orig))))))) (defun antlr-indent-command (&optional arg) "Indent the current line or insert tabs/spaces. With optional prefix argument ARG or if the previous command was this command, insert ARG tabs or spaces according to `indent-tabs-mode'. Otherwise, indent the current line with `antlr-indent-line'." (interactive "*P") (if (or arg (eq last-command 'antlr-indent-command)) (insert-tab arg) (let ((antlr-indent-comment (and antlr-indent-comment t))) ; dynamic (antlr-indent-line)))) (defun antlr-electric-character (&optional arg) "Insert the character you type and indent the current line. Insert the character like `self-insert-command' and indent the current line as `antlr-indent-command' does. Do not indent the line if * this command is called with a prefix argument ARG, * there are characters except whitespaces between point and the beginning of the line, or * point is not inside a normal grammar code, { and } are also OK in actions. This command is useful for a character which has some special meaning in ANTLR's syntax and influences the auto indentation, see `antlr-indent-item-regexp'." (interactive "*P") (if (or arg (save-excursion (skip-chars-backward " \t") (not (bolp))) (antlr-with-syntax-table antlr-action-syntax-table (antlr-invalidate-context-cache) (let ((context (antlr-syntactic-context))) (not (and (numberp context) (or (zerop context) (memq last-command-char '(?\{ ?\})))))))) (self-insert-command (prefix-numeric-value arg)) (self-insert-command (prefix-numeric-value arg)) (antlr-indent-line))) ;;;=========================================================================== ;;; Mode entry ;;;=========================================================================== (defun antlr-c-common-init () "Like `c-common-init' except menu, auto-hungry and c-style stuff." ;; X/Emacs 20 only (make-local-variable 'paragraph-start) (make-local-variable 'paragraph-separate) (make-local-variable 'paragraph-ignore-fill-prefix) (make-local-variable 'require-final-newline) (make-local-variable 'parse-sexp-ignore-comments) (make-local-variable 'indent-line-function) (make-local-variable 'indent-region-function) (make-local-variable 'comment-start) (make-local-variable 'comment-end) (make-local-variable 'comment-column) (make-local-variable 'comment-start-skip) (make-local-variable 'comment-multi-line) (make-local-variable 'outline-regexp) (make-local-variable 'outline-level) (make-local-variable 'adaptive-fill-regexp) (make-local-variable 'adaptive-fill-mode) (make-local-variable 'imenu-generic-expression) ;set in the mode functions (and (boundp 'comment-line-break-function) (make-local-variable 'comment-line-break-function)) ;; Emacs 19.30 and beyond only, AFAIK (if (boundp 'fill-paragraph-function) (progn (make-local-variable 'fill-paragraph-function) (setq fill-paragraph-function 'c-fill-paragraph))) ;; now set their values (setq paragraph-start (concat page-delimiter "\\|$") paragraph-separate paragraph-start paragraph-ignore-fill-prefix t require-final-newline t parse-sexp-ignore-comments t indent-line-function 'c-indent-line indent-region-function 'c-indent-region outline-regexp "[^#\n\^M]" outline-level 'c-outline-level comment-column 32 comment-start-skip "/\\*+ *\\|// *" comment-multi-line nil comment-line-break-function 'c-comment-line-break-function adaptive-fill-regexp nil adaptive-fill-mode nil) ;; we have to do something special for c-offsets-alist so that the ;; buffer local value has its own alist structure. (setq c-offsets-alist (copy-alist c-offsets-alist)) ;; setup the comment indent variable in a Emacs version portable way ;; ignore any byte compiler warnings you might get here (make-local-variable 'comment-indent-function) (setq comment-indent-function 'c-comment-indent)) (defun antlr-language-option (search) "Find language in `antlr-language-alist' for language option. If SEARCH is non-nil, find element for language option. Otherwise, find the default language." (let ((value (and search (save-excursion (goto-char (point-min)) (re-search-forward (cdr antlr-language-limit-n-regexp) (car antlr-language-limit-n-regexp) t)) (match-string 1))) (seq antlr-language-alist) r) ;; Like (find-VALUE antlr-language-alist :key 'cddr :test 'member) (while seq (setq r (pop seq)) (if (member value (cddr r)) (setq seq nil) ; stop (setq r nil))) ; no result yet (car r))) ;;;###autoload (defun antlr-mode () "Major mode for editing ANTLR grammar files. \\{antlr-mode-map}" (interactive) (c-initialize-cc-mode) ; for java syntax table (kill-all-local-variables) ;; ANTLR specific ---------------------------------------------------------- (setq major-mode 'antlr-mode mode-name "Antlr") (setq local-abbrev-table antlr-mode-abbrev-table) (unless antlr-mode-syntax-table (setq antlr-mode-syntax-table (make-syntax-table)) (c-populate-syntax-table antlr-mode-syntax-table)) (set-syntax-table antlr-mode-syntax-table) (unless antlr-action-syntax-table (let ((slist (nth 3 antlr-font-lock-defaults))) (setq antlr-action-syntax-table (copy-syntax-table antlr-mode-syntax-table)) (while slist (modify-syntax-entry (caar slist) (cdar slist) antlr-action-syntax-table) (setq slist (cdr slist))))) (use-local-map antlr-mode-map) (make-local-variable 'antlr-language) (unless antlr-language (setq antlr-language (or (antlr-language-option t) (antlr-language-option nil)))) (if (stringp (cadr (assq antlr-language antlr-language-alist))) (setq mode-name (concat "Antlr." (cadr (assq antlr-language antlr-language-alist))))) ;; indentation, for the C engine ------------------------------------------- (antlr-c-common-init) (setq indent-line-function 'antlr-indent-line indent-region-function nil) ; too lazy (setq comment-start "// " comment-end "") (c-set-style "java") (if (eq antlr-language 'c++-mode) (setq c-conditional-key c-C++-conditional-key c-comment-start-regexp c-C++-comment-start-regexp c-class-key c-C++-class-key c-extra-toplevel-key c-C++-extra-toplevel-key c-access-key c-C++-access-key c-recognize-knr-p nil) (setq c-conditional-key c-Java-conditional-key c-comment-start-regexp c-Java-comment-start-regexp c-class-key c-Java-class-key c-method-key nil c-baseclass-key nil c-recognize-knr-p nil c-access-key (and (boundp 'c-Java-access-key) c-Java-access-key)) (and (boundp 'c-inexpr-class-key) (boundp 'c-Java-inexpr-class-key) (setq c-inexpr-class-key c-Java-inexpr-class-key))) ;; various ----------------------------------------------------------------- (make-local-variable 'font-lock-defaults) (setq font-lock-defaults antlr-font-lock-defaults) (easy-menu-add antlr-mode-menu) (make-local-variable 'imenu-create-index-function) (setq imenu-create-index-function 'antlr-imenu-create-index-function) (make-local-variable 'imenu-generic-expression) (setq imenu-generic-expression t) ; fool stupid test (and antlr-imenu-name ; there should be a global variable... (fboundp 'imenu-add-to-menubar) (imenu-add-to-menubar (if (stringp antlr-imenu-name) antlr-imenu-name "Index"))) (antlr-set-tabs) (run-hooks 'antlr-mode-hook)) ;; A smarter version of `group-buffers-menu-by-mode-then-alphabetically' (in ;; XEmacs) could use the following property. The header of the submenu would ;; be "Antlr" instead of "Antlr.C++" or (not and!) "Antlr.Java". (put 'antlr-mode 'mode-name "Antlr") ;;;###autoload (defun antlr-set-tabs () "Use ANTLR's convention for TABs according to `antlr-tab-offset-alist'. Used in `antlr-mode'. Also a useful function in `java-mode-hook'." (if buffer-file-name (let ((alist antlr-tab-offset-alist) elem) (while alist (setq elem (pop alist)) (and (or (null (car elem)) (eq (car elem) major-mode)) (or (null (cadr elem)) (string-match (cadr elem) buffer-file-name)) (setq tab-width (caddr elem) indent-tabs-mode (cadddr elem) alist nil)))))) ; LocalWords: antlr ANother ANTLR's Cpp Lexer TreeParser esp refs VALUEs ea ee ; LocalWords: Java's Nomencl ruledef tokendef ruleref tokenref setType ader ev ; LocalWords: ivate syntab lexer treeparser lic rotected rivate bor boi AFAIK ; LocalWords: slist knr inexpr unhide jit GENS SEP GEN sTokenTypes hpp cpp DEP ; LocalWords: VOCAB EVOCAB Antlr's TokenTypes exportVocab incl excl SUPERS gen ; LocalWords: VOCABS IVOCAB exportVocabs importVocabs superclasses vocab kens ; LocalWords: sclass evocab ivocab importVocab deps glibs supers sep dep lang ; LocalWords: htmlize subrule jde Sather sather eiffel SGML's XYYZZ namespace ; LocalWords: mangleLiteralPrefix namespaceStd namespaceAntlr genHashLines AST ; LocalWords: testLiterals defaultErrorHandler codeGenMakeSwitchThreshold XXX ; LocalWords: codeGenBitsetTestThreshold bitset analyzerDebug codeGenDebug boc ; LocalWords: buildAST ASTLabelType charVocabulary caseSensitive autoTokenDef ; LocalWords: caseSensitiveLiterals classHeaderSuffix keywordsMeltTo NAMEs LL ; LocalWords: warnWhenFollowAmbig generateAmbigWarnings ARGs tokenrefs withp ; LocalWords: outsidep existsp JOR sert endif se ndef mport nclude pragma LE ; LocalWords: TION ASE RSION OMPT ava serting VEL mparison AMMAR ;;; antlr-mode.el ends here antlr-2.7.7/extras/antlr-x.y.z.README0000644000175000017500000000106510522211615017073 0ustar twernertwernerANTLR, (AN)other (T)ool for (L)anguage (R)ecognition - formerly known as PCCTS - is a language tool that provides a framework for constructing recognizers, compilers, and translators from grammatical descriptions for the following languages: Java | C++ | C# | Python Canonical homepage: http://www.antlr.org Canonical download: http://www.antlr.org Canonical build & install: $ ./configure --prefix=/usr --disable-examples $ make $ make install For further information checkout README.txt and INSTALATION.txt the /share/doc/antlr-x.y.z. antlr-2.7.7/extras/antlr-jedit.xml0000644000175000017500000001421310522211615017046 0ustar twernertwerner header{ } header { } header { } tokens } options } [ ] { } /**/ /** */ /* */ // " " ' ' < > : ; class extends returns private protected /**/ /** */ /* */ // " " ' ' [ ] { } => | # ! ? ^ + * ( ) = : { /**/ /** */ /* */ " " ' ' < > // language k importVocab exportVocab testLiterals defaultErrorHandler codeGenMakeSwitchThreshold codeGenBitsetTestThreshold buildAST analyzerDebug codeGenDebug ASTLabelType charVocabulary interactive caseSensitive ignore paraphrase caseSensitiveLiterals classHeaderPrefix classHeaderSuffix mangleLiteralPrefix warnWhenFollowAmbig generateAmbigWarnings filter namespace namespaceStd namespaceAntlr genHashLines greedy true false antlr-2.7.7/configure.in0000644000175000017500000011141010522211616015110 0ustar twernertwernerdnl --*- sh -*-- ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger ## ## ## ## ...............Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## Process this file with autoconf to produce a configure ## script. AC_INIT(antlr, 2.7.7) AC_CONFIG_SRCDIR([LICENSE.txt]) AC_CONFIG_AUX_DIR(scripts) ## This shall be the very first config file. Do not change ## this. AC_CONFIG_FILES([scripts/config.vars]) AC_CONFIG_FILES([scripts/config.deps]) AC_CONFIG_FILES([scripts/config.make]) AC_CONFIG_FILES([antlr/Version.java]) AC_SUBST_FILE([stdvars]) AC_SUBST_FILE([stddeps]) AC_SUBST_FILE([stdmake]) ## ANTLR's core libraries for each supporte language. The variable ## in uppercase letters denotes the absolute name of the library. ## When in lower cases letters - see below - the variable just ## holds the basename. AC_SUBST([ANTLR_JAR]) AC_SUBST([ANTLR_LIB]) AC_SUBST([ANTLR_NET]) AC_SUBST([ANTLR_PY]) AC_SUBST([ASTFRAME_NET]) AC_SUBST([antlr_jar]) AC_SUBST([antlr_lib]) AC_SUBST([antlr_net]) AC_SUBST([antlr_py]) AC_SUBST([astframe_net]) AC_SUBST([ANTLRFLAGS]) AC_SUBST([ANTLR]) AC_SUBST([ANTLR_ACTION_FILES]) AC_SUBST([ANTLR_ANTLR_FILES]) AC_SUBST([ANTLR_COMPILE_CMD]) AC_SUBST([ANTLR_CYGWIN]) AC_SUBST([ANTLR_MINGW]) AC_SUBST([ANTLR_TOKDEF_FILES]) AC_SUBST([ANTLR_WIN32]) AC_SUBST([ANTLR_WITH_ANTLR_CMD]) AC_SUBST([ANTLR_WITH_ANTLR_JAR]) AC_SUBST([ARFLAGS]) AC_SUBST([AR]) AC_SUBST([AS]) AC_SUBST([BOOTCLASSPATH]) AC_SUBST([CSHARPCFLAGS]) AC_SUBST([CSHARPC]) AC_SUBST([CSHARP_COMPILE_CMD]) AC_SUBST([CLR]) AC_SUBST([CXX_COMPILE_CMD]) AC_SUBST([CXX_LIB_CMD]) AC_SUBST([CXX_LINK_CMD]) AC_SUBST([CYGPATH]) AC_SUBST([C_COMPILE_CMD]) AC_SUBST([DEBUG]) AC_SUBST([EXEEXT]) AC_SUBST([JARFLAGS]) AC_SUBST([JAR]) AC_SUBST([JAR_CMD]) AC_SUBST([JAVACFLAGS]) AC_SUBST([JAVAC]) AC_SUBST([JAVAFLAGS]) AC_SUBST([JAVA]) AC_SUBST([JAVA_CMD]) AC_SUBST([JAVA_COMPILE_CMD]) AC_SUBST([LIBEXT]) AC_SUBST([MAKE]) AC_SUBST([OBJEXT]) AC_SUBST([PATCHLEVEL]) AC_SUBST([PYTHONFLAGS]) AC_SUBST([PYTHON]) AC_SUBST([SUBVERSION]) AC_SUBST([TIMESTAMP]) AC_SUBST([TOUCH]) AC_SUBST([VERBOSE]) AC_SUBST([VERSION]) AC_SUBST([WITH_EXAMPLES]) AC_SUBST([abs_this_builddir]) AC_SUBST([cxx]) AC_SUBST([jar]) AC_SUBST([java]) AC_SUBST([javac]) AC_SUBST([TAR]) AC_SUBST([RMF]) AC_SUBST([CP]) AC_SUBST([ECHO]) AC_SUBST([FIND]) # create strong named assemblies [true|false(*)] AC_SUBST([STRONGNAME]) # file containing public/private key pair for creating strong named # assemblies (no default value) AC_SUBST([KEYFILE]) # Allow partially trusted callers (C#) AC_SUBST([APTC]) ## introduce package information as autoconf vars. VERSION=`echo $PACKAGE_VERSION | cut -d . -f 1` SUBVERSION=`echo $PACKAGE_VERSION | cut -d . -f 2` PATCHLEVEL=`echo $PACKAGE_VERSION | cut -d . -f 3` TIMESTAMP=`date +%Y%m%d` ## @abs_this_builddir@ - absolute path to top of build directory. ## According to GNU autoconf we can rely on that there's a proper ## pwd around. abs_this_builddir=`pwd` ## This is how we compile Java files .. JAVA_COMPILE_CMD="/bin/sh $abs_this_builddir/scripts/javac.sh" ## This is how we run Java .. JAVA_CMD="/bin/sh $abs_this_builddir/scripts/java.sh" ## This is how we pack Java (class) files .. JAR_CMD="/bin/sh $abs_this_builddir/scripts/jar.sh" ## And this is how we are going to compile ANTLR grammar files .. ANTLR_COMPILE_CMD="/bin/sh $abs_this_builddir/scripts/antlr.sh" ## This is how we compile CSHARP files .. CSHARP_COMPILE_CMD="/bin/sh $abs_this_builddir/scripts/csc.sh" ## This is how we compile C++ files and how we are going to create ## libantlr.a or antlr.lib etc. .. CXX_COMPILE_CMD="/bin/sh $abs_this_builddir/scripts/cxx.sh" CXX_LIB_CMD="/bin/sh $abs_this_builddir/scripts/lib.sh" CXX_LINK_CMD="/bin/sh $abs_this_builddir/scripts/link.sh" C_COMPILE_CMD="/bin/sh $abs_this_builddir/scripts/c.sh" ANTLR_JAR="$abs_this_builddir/antlr/antlr.jar" ANTLR_NET="$abs_this_builddir/lib/antlr.runtime.dll" ASTFRAME_NET="$abs_this_builddir/lib/antlr.astframe.dll" ANTLR_PY="$abs_this_builddir/lib/python/antlr/python.py" ## Note: values might be overriden in C++ section. OBJEXT=".o" LIBEXT=".a" ANTLR_LIB="$abs_this_builddir/lib/cpp/src/libantlr.a" stdvars="scripts/config.vars" stddeps="scripts/config.deps" stdmake="scripts/config.make" ## ## option --enable-java ## AX_ARG_ENABLE( [java], [LANG_JAVA], [enable or disable ANTLR for Java (enabled)], [1], ) ## ## option --enable-cxx ## AX_ARG_ENABLE( [cxx], [LANG_CXX], [enable or disable ANTLR for C++ (enabled)], [1], ) ## ## option --enable-python ## AX_ARG_ENABLE( [python], [LANG_PY], [enable or disable ANTLR for Python (enabled).], [1], ) ## ## option --enable-csharp ## AX_ARG_ENABLE( [csharp], [LANG_CS], [enable or disable ANTLR for C# (enabled)], [1], ) ## ## option --enable-verbose= ## AX_ARG_ENABLE( [verbose], [VERBOSE], [turn on verbosity when building package.], [0], ) ## ## option --enable-debug= ## AX_ARG_ENABLE( [debug], [DEBUG], [set debug level - any value greater zero enables a debug version], [0], ) ## ## option --enable-examples ## WITH_EXAMPLES=1 AX_ARG_ENABLE( [examples], [WITH_EXAMPLES], [include examples into this configuration (enabled)], [1], ) ## ## option --enable-allow-partially-trusted-callers ## APTC=1 AX_ARG_ENABLE( [allow-partially-trusted-callers], [APTC], [allow partially trusted callers (C#)], [1], ) case $APTC in 0) APTC=false ;; *) APTC=true ;; esac ## ## option --with-antlr-jar ## ANTLR_WITH_ANTLR_JAR="" AC_ARG_WITH( [antlr-jar], [AC_HELP_STRING( [--with-antlr-jar=ARG], [use given file (antlr.jar) to bootstrap]) ],[ if test -n "${ANTLR_WITH_ANTLR_CMD}" ; then opts="--with-antlr-jar,--with-antlr-cmd" AC_MSG_ERROR( [this configuration options mutually exclusive: $opts]) fi ANTLR_WITH_ANTLR_JAR="${withval}"] ) ## ## option --with-antlr-cmd ## ANTLR_WITH_ANTLR_CMD="" AC_ARG_WITH( [antlr-cmd], [AC_HELP_STRING( [--with-antlr-cmd=ARG], [use given command to compile ANTLR grammar files while bootstrapping..]) ],[ if test -n "${ANTLR_WITH_ANTLR_JAR}" ; then opts="--with-antlr-jar,--with-antlr-cmd" AC_MSG_ERROR( [this configuration options mutually exclusive: $opts]) fi ANTLR_WITH_ANTLR_CMD="${withval}" ] ) ## ## option --with-strong-assemblies ## STRONGNAME=false KEYFILE= AC_ARG_WITH( [strong-assemblies], [AC_HELP_STRING( [--with-strong-assemblies=ARG], [enable strong named assemblies by passing a keyfile holding a public/private key pair (only useful when building C#)]) ],[ STRONGNAME=true KEYFILE="${withval}" ] ) AC_ARG_WITH( [bootclasspath], [AC_HELP_STRING( [--with-bootclasspath=ARG], [use this option to set bootclasspath when using jikes. ARG is a white space seperated list of absolute file or directory names, typically /opt/jdk1.3/jre/lib/rt.jar. In most cases this option is not requird as configure tries to detect rt.jar itself. If configure fails or detects the wrong boot library you may use this option. Note that this option is only used when using jikes. ]) ],[ BOOTCLASSPATH="${withval}" ] ) AX_ARG_WITH( [cxx], [CXX], ) AX_ARG_WITH( [make], [MAKE], ) AX_ARG_WITH( [java], [JAVA], ) AX_ARG_WITH( [javac], [JAVAC], ) AX_ARG_WITH( [jar], [JAR], ) AX_ARG_WITH( [python], [PYTHON], ) AX_ARG_WITH( [csharpc], [CSHARPC], ) #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# # S T A R T T E S T S # #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# # get host_os set AC_CANONICAL_HOST # Detect cygwin or mingw ANTLR_CYGWIN=no ANTLR_MINGW=no AC_MSG_CHECKING(whether this is Cygwin) case $host_os in *cygwin* ) ANTLR_CYGWIN=yes ;; *) AC_MSG_RESULT(no) ;; esac AC_MSG_CHECKING(whether this is MinGW) case $host_os in *mingw* ) ANTLR_MINGW=yes ;; *) AC_MSG_RESULT(no) ;; esac ## Set common file extensions depending on OS we are running on. ## File extensions depend on C++/C compiler in use. This values ## are just guesses and redefined further below. case "${host_os}" in *mingw*|*cygwin*) OBJEXT=".o" LIBEXT=".a" EXEEXT=".exe" ;; *) OBJEXT=".o" LIBEXT=".a" EXEEXT="" ;; esac ## Test whether we have cygpath test -z "$CYGPATH" && AC_PATH_PROGS(CYGPATH, cygpath$EXEEXT ) AC_SUBST([CYGPATH_M]) AC_SUBST([CYGPATH_W]) if test -n "$CYGPATH" ; then CYGPATH_M="${CYGPATH} -m" CYGPATH_W="${CYGPATH} -w" else CYGPATH_M="echo" CYGPATH_W="echo" fi AC_ARG_VAR( [ANTLRFLAGS], [Use environment variable ANTLRFLAGS to pass some extra flags to antlr when compiling grammar (*.g) files. ] ) AX_PATH_PROGS([CP], [/bin/cp /usr/bin/cp cp]) AX_PATH_PROGS([ECHO],[/bin/echo /usr/bin/echo echo]) AX_PATH_PROGS([FIND],[/bin/find /usr/bin/find find]) AX_VAR_HEAD([CP]) AX_VAR_HEAD([ECHO]) AX_VAR_HEAD([FIND]) #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# # MAKE # #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# ## Check whether there's a make program around. We search for a ## couple of well know names within $PATH. A user may skip this ## search by providing variable $MAKE. AC_ARG_VAR( [MAKE], [By default we search for "make", "gmake" and "gnumake" in your PATH as well as "/bin/make" and "/usr/bin/make". You may override this search by using enviromnent variable $MAKE. Note that a GNU make is required to build this package. However, when providing your own candidate a check for GNU make is skipped and all bets are on. ] ) ## @MAKE@ shall contain absolut path name of make program found. ## Search for well known make programs - take user given MAKE ## into account. The result will be a list of valid make prog- ## grams found and will be stored in variable MAKE. user_make="${MAKE}" AX_PATH_PROGS( [MAKE], [make gmake gnumake /bin/make /usr/bin/make] ) ## right now we need to have a GNU make around, other makes are ## not supported and likely to fail. if test "x${user_make}" == "x" ; then AX_GNU_MAKE( [MAKE], [AC_MSG_ERROR( [package requires GNU make])] ) fi ## we lookup 'make' in PATH. If the one found is not the same ## as the configured one we issue a warning message. AC_PATH_PROGS([just_make],[make],[%]) case "${just_make}" in ${MAKE}) ;; *) AC_CONFIG_COMMANDS([notice],[ AC_MSG_NOTICE([ --------------------------------------------------------- * WARNING * This package has been configured to be build by using $MAKE It is very likely that just running "make" from the command line will fail. Please remember therefore to use the configured version. ========================================================= ]) ],[ MAKE="${MAKE}" ] ) ;; esac #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# # JAVA # #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# ## @JAVAC@ shall contain absolut path name of javac program and ## similar to CXXFLAGS, @JAVACFLAGS@ shall contain all options ## required to compile JAVA source files. AC_ARG_VAR( [JAVAC], [By default we search for "jikes", "javac" and "gcj" in your $PATH on how to comile Java source files. You may override this search by using enviromnent variable $JAVAC. JAVAC may contain a list of candidates, either as absolute path names or as a relative one. In case a relative name is given, a search in $PATH will take place, otherwise the absolute name is tried. ] ) AC_ARG_VAR( [JAVACFLAGS], [Environment variable JAVACFLAGS can be used to change or override all flags required to compile Java source files. Note that JAVACFLAGS understands the following: "+ flag1 flag2 .." append "flag1 flag2 .." to precomputed list "- flag1 flag2 .." prepend "flag1 flag2 .." to precomputed list "= flag1 flag2 .. override with flag1 flag2 ..". If there is a need to hardwire additional flags then edit scripts/javac.sh.in and run "CONFIG_FILES=scripts/javac.sh ./config.status" again. ] ) ## @JAVA@ shall contain absolut path name of java program and ## similar to CXXFLAGS, @JAVAFLAGS@ shall contain all options ## required to run JAVA class files. AC_ARG_VAR( [JAVA], [By default we search for "java" and "gij" in your PATH on how to run Java class files. You may override this search by using enviromnent variable $JAVA. JAVA may contain a list of candidates, either as absolute path name or as a relative one. In case of a relative name, a search in $PATH will take place. Otherwise the absolute name will be accepted if existing. ] ) AC_ARG_VAR( [JAVAFLAGS], [Shall contain all flags required to run Java class files. You may override by using environment variable JAVAFLAGS. ] ) AX_JAVA_PROGS( [JAVA], [java gij], [AX_VAR_HEAD([JAVA])] ) AX_JAVA_PROGS( [JAVAC], [jikes javac gcj], [AX_VAR_HEAD([JAVAC])] ) AX_JAVA_PROGS( [JAR], [fastjar jar], [ AX_VAR_HEAD([JAR]) ] ) case $LANG_JAVA in 1) jar="`basename $JAR`" jar="`echo ${jar}|sed 's,\..*$,,'`" ## This macro tries to determine which javac compiler is ## being used. Well known compilers are gcj, jikes and ## javac. A unknown compiler is treated as if javac has ## been given in the very, very naive hope that all ## javac compiler have at least the same options as the ## original, ie. javac. ## If your compiler is not in the list and does not be- ## have like javac, then you need to extend this macro ## by writing a specialized test. AX_WHICH_JAVAC([javac]) ## Jikes cannot live without having a Java around. Have ## therefore a look into Java installations found for ## a 'rt.jar'. test -n "${BOOTCLASSPATH}" && { for f in ${BOOTCLASSPATH} ; do AC_MSG_CHECKING([bootclasspath \"$f\"]) test -f "${f}" -o -d "${f}" || { AC_MSG_RESULT([does not exist]) AC_MSG_ERROR( [ =================================================================== Please check arguments given to --with-bootclasspath or \${BOOTCLASSPATH} Each argument must be a valid file or directory. Use whitespace to seperate your args. =================================================================== ]) } AC_MSG_RESULT([good]) done } test -z "${BOOTCLASSPATH}" && { case "${javac}" in jikes) BOOTCLASSPATH="" set x ${JAVA} while test $# -gt 1 ; do x="$2" ; shift d=`dirname $x` test -d "$d" || continue d=`(cd $d && cd .. && pwd)` test -d "$d" || continue test -f "$d/jre/lib/rt.jar" && { BOOTCLASSPATH="$d/jre/lib/rt.jar" ## we need to try whether jikes accept .. (tbd) break } test -f "$d/lib/rt.jar" && { BOOTCLASSPATH="$d/lib/rt.jar" ## we need to try whether jikes accept .. (tbd) break } done ## go for some unusual locations (MacOS) test -z "${BOOTCLASSPATH}" && { fwdir=/System/Library/Frameworks/JavaVM.framework/Versions for x in 1.4.1 1.3.1 ; do if test -f "$fwdir/$x/Classes/classes.jar" ; then BOOTCLASSPATH="$fwdir/$x/Classes/classes.jar" break fi done } ## give up in case we can't set. test -z "${BOOTCLASSPATH}" && { AC_MSG_ERROR( [Unable to set BOOTCLASSPATH - there is no rt.jar around.]) } ;; *) BOOTCLASSPATH="" ;; esac } test -n "${BOOTCLASSPATH}" && { ## Finalize BOOTCLASSPATH. Depending on platform join arguments using ## a different seperator. case $build_os in cygwin) sep=";" ;; *) sep=":" ;; esac set x $BOOTCLASSPATH ; shift BOOTCLASSPATH="$1" shift while test $# -gt 0 ; do BOOTCLASSPATH="${BOOTCLASSPATH}${sep}${1}" shift done } ## Use Java first in list. AX_VAR_HEAD([JAVA]) ;; esac #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# # C++ # #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# case $LANG_CXX in 1) AX_PATH_PROGS( [AR], [tlib lib ar /usr/bin/ar] ) ## Try to figure out what C++ compiler shall be used. Note that CC ## clashes on cygwin. While CC is usually SUN's C++ compiler name, ## CC is also present on Cygwin - it's just an alias for gcc. The ## real alias is actually 'cc' but names are searched in non- ## sensitive manner. To solve this problem we use kind of hack ## here and list compilers availabe to known operating systems. case $build_os in cygwin*|mingw*) ## On Cygwin/Microsoft we are aware of Borland C++, Microsoft ## C++ and GNU. cxx_compiler_list="bcc32 cl g++" # FIXME: for bcc32 c_compiler_list="cl gcc" ;; *) ## On other platforms we now HP C++ (aCC), IBM C++ (xlC*) and ## of course GNU. If there's a GNU compiler around we prefer ## GNU. This avoids also a problem with vendors having CC ## a symbolic link to "gcc" instead of "g++". cxx_compiler_list="g++ aCC CC xlC xlC_r cxx c++" # FIXME: for other unix flavours c_compiler_list="cc gcc xlc_r acc" ;; esac ## Find a compiler for me. If compiler is not in list you can al- ## ways override by using environment varialbe CXX. AC_PROG_CXX([${cxx_compiler_list}]) AC_PROG_CC([${c_compiler_list}]) ## just overrule what autoconf figured out - we never asked for ## this anyway. Our handling of compiler options is done below ## in the fine tuning section. CXXFLAGS="" ## 'cxx' shall be the canonical compiler name. For example, gcc ## cl, bcc, CC, etc. Note that this is in general not equal to CXX. ## For example, CYGWIN appears to have c++ as name for g++ and cc ## as alias for gcc. ## CXX is used to call the compiler, 'cxx' shall be used for ## decisions based on compiler in use. cxx="" if test "x$GXX" = xyes; then cxx="gcc" else cxx=`basename $CXX` cxx=`echo ${cxx}|sed 's,\.@<:@^.@:>@*$,,'` fi case ${cxx} in gcc*) cxx='gcc' ;; cl*|CL*) cxx='cl' ## check whether this is Microsoft C++ (tbd) ;; bcc32*|BCC32*) cxx='bcc32' ## check whether this is Borland C++ (tbd) ;; CC*) ## check whether this is SUN C++ (tbd) cxx="CC" ;; xlC*|xlC_r*) cxx="xlC" ## check whether this is IBM C++ (tbd) ;; aCC*) cxx='aCC' ## check whether this is HP C++ (tbd) ;; cxx*) cxx='cxx' ## check for Digital UNIX cxx (Tru64)?? ;; *) ## unknown compiler - good luck. AX_MSG_UNKOWN_CXX ;; esac ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## COMPILER TUNING SECTION ## ##==============================================================## ## compiler tuning has basically removed from this configure ## script as it appears not to be handy and practical. All ## compiler flags are set in cxx.sh.in. If there is any ## change required, go there and change. ## Note that flags given in this file may overrule settings ## given in cxx.sh.in. Therefore, if you "add" flags here, ## put a "+" in front of variable CXXFLAGS. For example, let's ## say you want to add "-g". Then do this: ## ## CXXFLAGS="-g" ## .. ## CXXFLAGS="+ ${CXXFLAGS}" ## ## The addition of "+" CXXFLAGS should be the last action for ## that variable. The net effect is that "-g" will be added to ## flags set in cxx.sh.in. So the result may look like ## gcc -Wall -c -g .. ## ## Similar, put a "-" in front to get "gcc -g -Wall -c .." and ## put nothing or a "=" in front to get "gcc -g ..". ## ## Similar to CXXFLAGS are LDFLAGS and ARFLAGS for linking ## and making a static library. case "${cxx}" in cl|bcc32) OBJEXT=".obj" LIBEXT=".lib" EXEEXT=".exe" ANTLR_LIB="$abs_this_builddir/lib/cpp/src/antlr.lib" CPP="${cxx}" CPPFLAGS="-EP" ;; *) OBJEXT=".o" ;; esac LDFLAGS= AX_VAR_HEAD([AR]) ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## END COMPILER TUNING SECTION ## ##==============================================================## # Checks for header files. AC_HEADER_STDC AC_CHECK_HEADERS([stdlib.h unistd.h]) # Checks for typedefs, structures, and compiler characteristics. AC_C_CONST AC_C_INLINE # Checks for library functions. AC_FUNC_MALLOC AC_CHECK_FUNCS([strcasecmp]) ## Some further specific test required as are using std C++. ## (tbd) ;; esac ## test -z "$DOXYGEN" && AC_PATH_PROG(DOXYGEN, doxygen, doxygen, ) # This seems to convince configure to use an absolute path to the backup # install-sh script. ac_install_sh="$PWD/scripts/install-sh" AC_PROG_INSTALL AC_PROG_RANLIB test -z "$MKDIR" && AC_PATH_PROG(MKDIR, mkdir$EXEEXT, mkdir$EXEEXT ) test -z "$RM" && AC_PATH_PROG(RM, rm$EXEEXT, rm$EXEEXT ) AX_PATH_PROGS( [TAR], [gnutar tar], [AX_VAR_HEAD([TAR])] ) AX_PATH_PROGS( [TOUCH], [/bin/touch /usr/bin/touch touch], [AX_VAR_HEAD([TOUCH])] ) test -z "$CHMOD" && AC_PATH_PROG(CHMOD, chmod$EXEEXT, chmod$EXEEXT ) test -z "$SED" && AC_PATH_PROG(SED, sed$EXEEXT, sed$EXEEXT ) test -z "$CAT" && AC_PATH_PROG(CAT, cat$EXEEXT, cat$EXEEXT ) test -z "$GREP" && AC_PATH_PROG(GREP, grep$EXEEXT, grep$EXEEXT ) #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# # PYTHON # #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# AC_ARG_VAR([PYTHON], [By default we search for "python" in $PATH to execute Python files. Override this by providing a list of candidates in environment variable $PYTHON and use whitespace as spereration character. A candidate can be either a relative or absolute path name. In the former case a lookup in $PATH takes place, in the latter, the absolute path name must exist.]) AC_ARG_VAR([PYTHONFLAGS],[ Shall contain all flags required to run Python. Override the default by using environment variable $PYTHONFLAGS. ]) AX_PYTHON_PROGS( [PYTHON], [python], [AX_VAR_HEAD([PYTHON])] ) case $LANG_PY in 1) # We need a script that wrap Python calls in order to make Python # ANTLR aware. This script needs to be executable. AC_CONFIG_FILES( [scripts/python.sh], [${CHMOD} a+x scripts/python.sh], [CHMOD=${CHMOD}] ) AC_CONFIG_FILES( [scripts/pyantlr.sh:scripts/pyinst.sh.in], [${CHMOD} a+x scripts/pyantlr.sh], [CHMOD=${CHMOD}] ) AC_CONFIG_FILES( [lib/python/Makefile] ) # We have a Makefile that loops through all python examples. case $WITH_EXAMPLES in 1 ) AC_CONFIG_FILES( [examples/python/Makefile] ) AC_CONFIG_FILES([ examples/python/asn1/Makefile \ examples/python/ASTsupport/Makefile \ examples/python/calc/Makefile \ examples/python/columns/Makefile \ examples/python/exprAST/Makefile \ examples/python/filter/Makefile \ examples/python/filterWithRule/Makefile \ examples/python/heteroAST/Makefile \ examples/python/HTML/Makefile \ examples/python/IDL/Makefile \ examples/python/imagNodeAST/Makefile \ examples/python/includeFile/Makefile \ examples/python/inherit.tinyc/Makefile \ examples/python/java/Makefile \ examples/python/lexerTester/Makefile \ examples/python/lexRewrite/Makefile \ examples/python/linkChecker/Makefile \ examples/python/multiLexer/Makefile \ examples/python/multiParser/Makefile \ examples/python/parseBinary/Makefile \ examples/python/pascal/Makefile \ examples/python/cpp/Makefile \ examples/python/preserveWhiteSpace/Makefile \ examples/python/tinybasic/Makefile \ examples/python/tinyc/Makefile \ examples/python/transform/Makefile \ examples/python/treewalk/Makefile \ examples/python/unicode/Makefile \ examples/python/unicode.IDENTs/Makefile \ examples/python/xml/Makefile ]) ;; esac ;; esac #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# # CSHARP # #xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx# AC_ARG_VAR([CSHARPC], [By default we search for "cscc", "msc" and "csc" in $PATH to compile C# files. Override this by providing a list of candidates in environment variable $CSHARP and use whitespace as spereration character. A candidate can be either a relative or absolute path name. In the former case a lookup in $PATH takes place, in the latter, the absolute path name must exist.]) AC_ARG_VAR([CSHARPCFLAGS],[ Shall contain all flags required to compile a #C file. Override the default by using environment variable $CSHARPCFLAGS. ]) AX_CSHARP_PROGS( [CSHARPC], [cscc mcs csc /usr/local/bin/cscc /usr/local/bin/mcs /opt/bin/cscc /opt/bin/mcs], [AX_VAR_HEAD([CSHARPC])] ) ## get the basename of C# compiler. Depending on basename we try to ## decide about the CLR. test -n "${CSHARPC}" && { csharpc=`basename ${CSHARPC}` csharpc_d=`dirname ${CSHARPC}` } case $csharpc in cscc*) AX_CSHARP_PROGS( [CLR], [${csharpc_d}/ilrun ilrun /usr/local/bin/ilrun /opt/bin/ilrun], [AX_VAR_HEAD([CLR])] ) ;; mcs*) AX_CSHARP_PROGS( [CLR], [${csharpc_d}/mono mono /usr/local/bin/mono /opt/bin/mono], [AX_VAR_HEAD([CLR])] ) ;; esac case $LANG_CS in 1) AC_CONFIG_FILES( [scripts/csc.sh], [${CHMOD} a+x scripts/csc.sh], [CHMOD=${CHMOD}] ) AC_CONFIG_FILES([lib/csharp/Makefile]) AC_CONFIG_FILES([lib/csharp/antlr.runtime/Makefile]) AC_CONFIG_FILES([lib/csharp/antlr.astframe/Makefile]) # We have a Makefile that loops through all python examples. case $WITH_EXAMPLES in 1) AC_CONFIG_FILES([examples/csharp/ASTsupport/Makefile]) AC_CONFIG_FILES([examples/csharp/HTML/Makefile]) AC_CONFIG_FILES([examples/csharp/IDL/Makefile]) AC_CONFIG_FILES([examples/csharp/ParseTreeDebug/Makefile]) AC_CONFIG_FILES([examples/csharp/TokenStreamRewrite/Makefile]) AC_CONFIG_FILES([examples/csharp/calc/Makefile]) AC_CONFIG_FILES([examples/csharp/columns/Makefile]) AC_CONFIG_FILES([examples/csharp/exprAST/Makefile]) AC_CONFIG_FILES([examples/csharp/filter/Makefile]) AC_CONFIG_FILES([examples/csharp/filterWithRule/Makefile]) AC_CONFIG_FILES([examples/csharp/heteroAST/Makefile]) AC_CONFIG_FILES([examples/csharp/java/Makefile]) AC_CONFIG_FILES([examples/csharp/multiLexer/Makefile]) AC_CONFIG_FILES([examples/csharp/parseBinary/Makefile]) AC_CONFIG_FILES([examples/csharp/preserveWhiteSpace/Makefile]) AC_CONFIG_FILES([examples/csharp/tinyc/Makefile]) AC_CONFIG_FILES([examples/csharp/unicode/Makefile]) AC_CONFIG_FILES([examples/csharp/Makefile]) ;; esac ;; esac # We need a script that wrap java calls in order to make Java # ANTLR aware. This script needs to be executable. AC_CONFIG_FILES( [scripts/java.sh], [${CHMOD} a+x scripts/java.sh]) # We need a script that wrap jar calls in order to make Java # ANTLR aware. This script needs to be executable. AC_CONFIG_FILES( [scripts/jar.sh], [${CHMOD} a+x scripts/jar.sh]) # We need a script that wrap javac calls in order to make Javac # ANTLR aware. This script needs to be executable. AC_CONFIG_FILES( [scripts/javac.sh], [${CHMOD} a+x scripts/javac.sh]) # We need a script that wraps antlr calls AC_CONFIG_FILES( [scripts/antlr.sh], [${CHMOD} a+x scripts/antlr.sh]) case $LANG_CXX in 1) # We need a script that wraps how we compile C++ AC_CONFIG_FILES([scripts/cxx.sh],[${CHMOD} a+x scripts/cxx.sh]) # # We need a script that wraps how we link C++ AC_CONFIG_FILES([scripts/link.sh],[${CHMOD} a+x scripts/link.sh]) # There's a few C files around so make sure we can compile those as well AC_CONFIG_FILES([scripts/c.sh],[${CHMOD} a+x scripts/c.sh]) # We need a script that wraps how we build a (static?) library AC_CONFIG_FILES([scripts/lib.sh],[${CHMOD} a+x scripts/lib.sh]) # We need a script that wraps how we run the preprocessor AC_CONFIG_FILES([scripts/cpp.sh],[${CHMOD} a+x scripts/cpp.sh]) # C++ library AC_CONFIG_FILES([lib/cpp/Makefile]) AC_CONFIG_FILES([lib/cpp/antlr/Makefile]) AC_CONFIG_FILES([lib/cpp/src/Makefile]) # C++ examples case $WITH_EXAMPLES in 1) AC_CONFIG_FILES([examples/cpp/Makefile]) AC_CONFIG_FILES([examples/cpp/ASTsupport/Makefile]) AC_CONFIG_FILES([examples/cpp/calc/Makefile]) AC_CONFIG_FILES([examples/cpp/exprAST/Makefile]) AC_CONFIG_FILES([examples/cpp/filter/Makefile]) AC_CONFIG_FILES([examples/cpp/filterWithRule/Makefile]) AC_CONFIG_FILES([examples/cpp/flexLexer/Makefile]) AC_CONFIG_FILES([examples/cpp/HTML/Makefile]) AC_CONFIG_FILES([examples/cpp/heteroAST/Makefile]) AC_CONFIG_FILES([examples/cpp/IDL/Makefile]) AC_CONFIG_FILES([examples/cpp/imagNodeAST/Makefile]) AC_CONFIG_FILES([examples/cpp/includeFile/Makefile]) AC_CONFIG_FILES([examples/cpp/inherit.tinyc/Makefile]) AC_CONFIG_FILES([examples/cpp/java/Makefile]) AC_CONFIG_FILES([examples/cpp/lexRewrite/Makefile]) AC_CONFIG_FILES([examples/cpp/multiLexer/Makefile]) AC_CONFIG_FILES([examples/cpp/multiParser/Makefile]) AC_CONFIG_FILES([examples/cpp/parseBinary/Makefile]) AC_CONFIG_FILES([examples/cpp/preserveWhiteSpace/Makefile]) AC_CONFIG_FILES([examples/cpp/tinyc/Makefile]) AC_CONFIG_FILES([examples/cpp/tokenStreamRewrite/Makefile]) AC_CONFIG_FILES([examples/cpp/transform/Makefile]) AC_CONFIG_FILES([examples/cpp/treewalk/Makefile]) AC_CONFIG_FILES([examples/cpp/unicode/Makefile]) ;; esac ;; esac # Makefile to build supplementary libraries .. AC_CONFIG_FILES([lib/Makefile]) case $WITH_EXAMPLES in 1) AC_CONFIG_FILES([examples/Makefile]) ;; esac AC_CONFIG_FILES([doc/Makefile]) AC_CONFIG_FILES([Makefile]) AC_CONFIG_FILES([scripts/antlr-config scripts/run-antlr scripts/antlr.spec]) case $LANG_JAVA in 1) AC_CONFIG_FILES([antlr/Makefile]) case $WITH_EXAMPLES in 1) AC_CONFIG_FILES([examples/java/ASTsupport/Makefile]) AC_CONFIG_FILES([examples/java/HTML/Makefile]) AC_CONFIG_FILES([examples/java/IDL/Makefile]) AC_CONFIG_FILES([examples/java/calc/Makefile]) AC_CONFIG_FILES([examples/java/columns/Makefile]) AC_CONFIG_FILES([examples/java/exprAST/Makefile]) AC_CONFIG_FILES([examples/java/filter/Makefile]) AC_CONFIG_FILES([examples/java/filterWithRule/Makefile]) AC_CONFIG_FILES([examples/java/heteroAST/Makefile]) AC_CONFIG_FILES([examples/java/imagNodeAST/Makefile]) AC_CONFIG_FILES([examples/java/includeFile/Makefile]) AC_CONFIG_FILES([examples/java/inherit.tinyc/Makefile]) AC_CONFIG_FILES([examples/java/java/Makefile]) AC_CONFIG_FILES([examples/java/lexRewrite/Makefile]) AC_CONFIG_FILES([examples/java/linkChecker/Makefile]) AC_CONFIG_FILES([examples/java/multiLexer/Makefile]) AC_CONFIG_FILES([examples/java/parseBinary/Makefile]) AC_CONFIG_FILES([examples/java/pascal/Makefile]) AC_CONFIG_FILES([examples/java/preserveWhiteSpace/Makefile]) AC_CONFIG_FILES([examples/java/tinybasic/Makefile]) AC_CONFIG_FILES([examples/java/tinyc/Makefile]) AC_CONFIG_FILES([examples/java/transform/Makefile]) AC_CONFIG_FILES([examples/java/treewalk/Makefile]) AC_CONFIG_FILES([examples/java/unicode.IDENTs/Makefile]) AC_CONFIG_FILES([examples/java/unicode/Makefile]) AC_CONFIG_FILES([examples/java/xml/Makefile]) AC_CONFIG_FILES([examples/java/Makefile]) AC_CONFIG_FILES([examples/java/cpp/Makefile]) ;; esac ANTLR_ACTION_FILES="" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/cpp/ActionLexer.java" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/cpp/ActionLexerTokenTypes.java" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/csharp/ActionLexer.java" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/csharp/ActionLexerTokenTypes.java" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/java/ActionLexer.java" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/java/ActionLexerTokenTypes.java" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/python/ActionLexer.java" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/python/ActionLexerTokenTypes.java" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/python/CodeLexer.java" ANTLR_ACTION_FILES="${ANTLR_ACTION_FILES} actions/python/CodeLexerTokenTypes.java" ANTLR_ANTLR_FILES="" ANTLR_ANTLR_FILES="${ANTLR_ANTLR_FILES} ANTLRParser.java" ANTLR_ANTLR_FILES="${ANTLR_ANTLR_FILES} ANTLRTokenTypes.java" ANTLR_ANTLR_FILES="${ANTLR_ANTLR_FILES} ANTLRLexer.java" ANTLR_TOKDEF_FILES="" ANTLR_TOKDEF_FILES="${ANTLR_TOKDEF_FILES} ANTLRTokdefParser.java" ANTLR_TOKDEF_FILES="${ANTLR_TOKDEF_FILES} ANTLRTokdefLexer.java" ANTLR_TOKDEF_FILES="${ANTLR_TOKDEF_FILES} ANTLRTokdefParserTokenTypes.java" ## This variables can be used in antlr/Makefile file_list="${ANTLR_ACTION_FILES} ${ANTLR_ANTLR_FILES} ${ANTLR_TOKDEF_FILES}" if test "x${file_list}" == "x" ; then : else ANTLR_CONFIG_FILES="" ANTLR_FILE_LIST="" ## iterate over my file list. If a file exists then don't copy ## this file - autoconf's behaviour is to delete existing files. for x in ${file_list} ; do f="antlr/${x}" if test -f "${f}" ; then : else f="${f}:${f}" ANTLR_CONFIG_FILES="${ANTLR_CONFIG_FILES} ${f}" ANTLR_FILE_LIST="${ANTLR_FILE_LIST} antlr/${x}" fi done ## copy files into build directory and make them writeable (in ## case we copy them from our depot. The actions necessary here ## to execute a command (chmod) on a list of files is bit ## hackish - it may depend on autoconf version in use (works ## fine for autoconf 2.59). ## The problem is that autoconf takes the file list literally, ## ie. we end up in config.status by something like ## ## case $ac_file in ## .. ## $ANTLR_CONFIG_FILES) chmod a+w .. ;; ## esac ## ## To make this work I'm introducing ANTLR_CONFIG_FILES as kind ## of 'catch-all' variable. The side effect is that every ## file with no explicit action will get a "chmod a+w ..." But ## that should be ok for Makefiles etc. AC_CONFIG_FILES([ ${ANTLR_CONFIG_FILES} ],[ ### echo "config.status: chmod a+w ${ac_file} .." ${CHMOD} a+w "${ac_file}" ],[ ANTLR_CONFIG_FILES='*' ANTLR_FILE_LIST="${ANTLR_FILE_LIST}" CHMOD="${CHMOD}" ] ) fi ;; esac ## compute basename of core libraries antlr_jar=`basename ${ANTLR_JAR}` antlr_net=`basename ${ANTLR_NET}` antlr_lib=`basename ${ANTLR_LIB}` antlr_py=`basename ${ANTLR_PY}` astframe_net=`basename ${ASTFRAME_NET}` test -z "${JAVA}" && { JAVA=java } ### cygwin has no (supported) Java - users are requested to have java ### in their PATH in order to execute "bin/antlr.sh". To support this ### I'm making sure that just the basename is used. case $host_os in *cygwin* |*mingw*|*msys*) AX_BASENAME([JAVA]) ;; esac # we assume that we have standard rm arround. This should be checked. RMF="$RM -r -f" AC_OUTPUT antlr-2.7.7/CHANGES.txt0000644000175000017500000001067210522211616014420 0ustar twernertwernerChanges with Antlr 2.7.7 # Terence Parr * updated BaseAST.java to make the doWorkForAll method static. Same behaviour, except no ClassCastExceptions when sibling.getFirstChild() happens to return an AST that doesn't extend BaseAST. Oliver Wong contributed the patch. * updated TokenStreamRewriteEngine.java to reflect bug fixes discovered in v3 counterpart. # Wolfgang Haefelinger * changing version to 2.7.7 * configure*,scripts/csc.sh.in,*/AssemblyInfo.cs building of strong assemblies supported - default is building of "weak" assemblies as before. Option --with-strong-assemblies=ARG enables strong assemblies (ARG must point to a keyfile). Such an assembly allows to be called by a partially trusted caller. This can be disabled by --disable-allow-partially-trusted-callers (if APTC is disabled, examples may not compile if located on network drive). * antlr/Makefile.in: couple of missing *.java files - missing in 2.7.6 - added. * examples/cpp/heteroAST/*: fixed problem of missing include dir; example compiles and tests fine. * examples/cpp/Makefile.in: enabled all directories (+ heteroAST and flexLexer). * configure*, scripts/config.deps.in: fixes to reflect directory naming changes in C#. * examples/csharp/Makefile.in: disabled "broken" example HTML. * lib/python/antlr/antlr.py: fixed "hideen" type reported by Andrew McCulloh Changes with Antlr 2.7.6 # Terence Parr o added size, index methods to TokenStreamRewriteEngine.java o bug in syn preds for tree parsers. Submitted by Ole Kniemeyer. o all Class.forName yanked out; uses thread context loader o option to prevent System.exit termination o added recover() method to lexers o fixed code gen bug for syn preds in tree parsers. Thanks to Marc Horowitz. o BaseAST was not checking for null text in toString() o Scott added java line ouput in code gen o Prashant tweaked a few things for ANTLRStudio; a few new classes in ASdebug package # Ric Klaren: - Give errors if the user attempts to set k>1 in a TreeWalker - Added missing Makefile.in for C++ heteroAST example and enabled it in configure.in - Many small C++ support code and codegen tweaks fixes to increase portability. (Compaq Tru64 UNIX V5.1, VC's) - Prevent '\' entering the bitset dump comments, might occur at end of line. Some compilers continue the comment to the next line (not sure if this is a compiler bug, should look it up) For the thanks & kudos: Among others thanks to Stuart Dootson, Bryan Ewbank, Kurt McCall and a number of others who submitted patches. (my administration wasn't too well this time) # Wolfgang Haefelinger *) configure.* : support for 'fastjar' added -fastjar will be preferred if jar and fastjar are available [geronimo, Nov 30th 2005]. *) configure.*: fixed --with-bootclasspath error; configure --help incorrectly displayed this option as "--bootclasspath". Thanks to Paul Jenner for reporting [geronimo, Nov 13th 2005]. *) antlr/PythonCodeGenerator.java: fixed problems with raising exceptions `SemanticException' and `MismatchedTokenException' unknown in current namespace. Thanks to Klaas Hofstra and Benjamin Niemann for reporting [geronimo, Nov 13th 2005]. *) examples/python/tinybasic/basic.g: fixed the unary MINUS problem reported by Klaas Hofstra [geronimo, Nov 13th 2005]. *) configure*,scripts/[cxx|link].sh.in: Changes made by Kurt McCall added to support Tru64 Unix (version V5.1, Rev. 732) along with COMPAC/DEC CXX (version V6.5-042) [geronimo, Nov 13th 2005]. *) antlr/PythonCodeGenerator.java: 'inputState.guessing' bug reported by Chris Minnoy and Benjamin Niemann fixed [Mark Kole, Jul 4th 2005]. *) lib/python/antlr/antlr.py: removed all 'tabs'; applied changes of patch `1126872211596/antlr.py.patch' regarding getting line and column number. Unknown patch contribution [geronimo, Nov 13th 2005]. # Changes for C# support (by Kunle Odutola & Micheal Jordan): -- Fixed isssue where an empty input stream caused an exception in the lexer due to the caching of LA1 and LA2. StringTemplate triggers this. -- The rewind() method did not take into account the value of the case insensitive flag. Reported by Dr. Hartmut Kocher. -- ***BREAKING CHANGE*** ASTPair object pool had a race condition in multi-threaded environments. ASTPair is now a struct. -- C# examples now properly initialize the ASTFactory before a tree parser is used. antlr-2.7.7/lib/0000755000175000017500000000000010522211615013346 5ustar twernertwernerantlr-2.7.7/lib/Makefile.in0000644000175000017500000000041610522211615015414 0ustar twernertwerner############################################################################### # $Id:$ ############################################################################### ## do not change this value subdir=lib @stdvars@ SUBDIRS = cpp python csharp @stdmake@ @stddeps@ antlr-2.7.7/lib/python/0000755000175000017500000000000010522211615014667 5ustar twernertwernerantlr-2.7.7/lib/python/Makefile.in0000644000175000017500000000260410522211615016736 0ustar twernertwerner## do not change this value subdir=lib/python ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stdvars@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx PYTHON_SETUP = \ setup.py \ $(eol) antlr_python_FILES = \ $(eol) all : test: install: install-this clean: distclean: clean @RMF@ Makefile antlr_py_DIR = \ $(datadir)/$(versioneddir) $(eol) antlr_py_FILES = \ @abs_top_srcdir@/lib/python/antlr/antlr.py \ $(eol) antlr_py1_FILES = \ @abs_top_srcdir@/lib/python/antlr/__init__.py \ @abs_top_srcdir@/lib/python/antlr/antlr.py \ $(eol) docdir = $(datadir)/doc/$(versioneddir) extradir = $(datadir)/$(versioneddir) install-this: $(MKDIR) -p "$(antlr_py_DIR)" $(MKDIR) -p "$(libdir)" @@ECHO@ "install python files .. " @for f in $(antlr_py_FILES) ; do \ @ECHO@ "install $${f}" ; \ if test -f "$${f}" ; then \ $(INSTALL) -m 444 "$${f}" "$(libdir)" ; \ fi ;\ done @for f in $(antlr_py1_FILES) ; do \ @ECHO@ "install $${f}" ; \ if test -f "$${f}" ; then \ $(INSTALL) -m 444 "$${f}" "$(antlr_py_DIR)" ; \ fi ;\ done @f="$(objdir)/scripts/pyantlr.sh"; \ if test -f "$${f}" ; then \ @ECHO@ "install Python installer" ; \ $(MKDIR) -p "$(sbindir)" ; \ $(INSTALL) -m 555 "$${f}" "$(sbindir)" ; \ fi ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stddeps@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx antlr-2.7.7/lib/python/setup.py0000644000175000017500000000051710522211615016404 0ustar twernertwerner#!/usr/bin/env python from distutils.core import setup setup(name="antlr", version="2.7.5RC1", description="Python runtime support for ANTLR-generated parsers", author="Wolfgang Haefelinger / Marq Kole", author_email="ora.et.labora@web.de", url="http://www.antlr.org/", packages=['antlr'], ) antlr-2.7.7/lib/python/antlr/0000755000175000017500000000000010522211615016007 5ustar twernertwernerantlr-2.7.7/lib/python/antlr/__init__.py0000644000175000017500000000025210522211615020117 0ustar twernertwerner# # ANTLR Translator Generator # Project led by Terence Parr at http://www.jGuru.com # Software rights: http://www.antlr.org/license.html # # $Id$ # from antlr import * antlr-2.7.7/lib/python/antlr/antlr.py0000644000175000017500000024123410522211615017507 0ustar twernertwerner## This file is part of PyANTLR. See LICENSE.txt for license ## details..........Copyright (C) Wolfgang Haefelinger, 2004. ## get sys module import sys version = sys.version.split()[0] if version < '2.2.1': False = 0 if version < '2.3': True = not False ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### global symbols ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### ANTLR Standard Tokens SKIP = -1 INVALID_TYPE = 0 EOF_TYPE = 1 EOF = 1 NULL_TREE_LOOKAHEAD = 3 MIN_USER_TYPE = 4 ### ANTLR's EOF Symbol EOF_CHAR = '' ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### general functions ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ## Version should be automatically derived from configure.in. For now, ## we need to bump it ourselfs. Don't remove the tags. ## def version(): r = { 'major' : '2', 'minor' : '7', 'micro' : '5', 'patch' : '' , 'version': '2.7.5' } return r ## def error(fmt,*args): if fmt: print "error: ", fmt % tuple(args) def ifelse(cond,_then,_else): if cond : r = _then else: r = _else return r def is_string_type(x): return (isinstance(x,str) or isinstance(x,unicode)) def assert_string_type(x): assert is_string_type(x) pass ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### ANTLR Exceptions ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class ANTLRException(Exception): def __init__(self, *args): Exception.__init__(self, *args) class RecognitionException(ANTLRException): def __init__(self, *args): ANTLRException.__init__(self, *args) self.fileName = None self.line = -1 self.column = -1 if len(args) >= 2: self.fileName = args[1] if len(args) >= 3: self.line = args[2] if len(args) >= 4: self.column = args[3] def __str__(self): buf = [''] if self.fileName: buf.append(self.fileName + ":") if self.line != -1: if not self.fileName: buf.append("line ") buf.append(str(self.line)) if self.column != -1: buf.append(":" + str(self.column)) buf.append(":") buf.append(" ") return str('').join(buf) __repr__ = __str__ class NoViableAltException(RecognitionException): def __init__(self, *args): RecognitionException.__init__(self, *args) self.token = None self.node = None if isinstance(args[0],AST): self.node = args[0] elif isinstance(args[0],Token): self.token = args[0] else: raise TypeError("NoViableAltException requires Token or AST argument") def __str__(self): if self.token: line = self.token.getLine() col = self.token.getColumn() text = self.token.getText() return "unexpected symbol at line %s (column %s): \"%s\"" % (line,col,text) if self.node == ASTNULL: return "unexpected end of subtree" assert self.node ### hackish, we assume that an AST contains method getText return "unexpected node: %s" % (self.node.getText()) __repr__ = __str__ class NoViableAltForCharException(RecognitionException): def __init__(self, *args): self.foundChar = None if len(args) == 2: self.foundChar = args[0] scanner = args[1] RecognitionException.__init__(self, "NoViableAlt", scanner.getFilename(), scanner.getLine(), scanner.getColumn()) elif len(args) == 4: self.foundChar = args[0] fileName = args[1] line = args[2] column = args[3] RecognitionException.__init__(self, "NoViableAlt", fileName, line, column) else: RecognitionException.__init__(self, "NoViableAlt", '', -1, -1) def __str__(self): mesg = "unexpected char: " if self.foundChar >= ' ' and self.foundChar <= '~': mesg += "'" + self.foundChar + "'" elif self.foundChar: mesg += "0x" + hex(ord(self.foundChar)).upper()[2:] else: mesg += "" return mesg __repr__ = __str__ class SemanticException(RecognitionException): def __init__(self, *args): RecognitionException.__init__(self, *args) class MismatchedCharException(RecognitionException): NONE = 0 CHAR = 1 NOT_CHAR = 2 RANGE = 3 NOT_RANGE = 4 SET = 5 NOT_SET = 6 def __init__(self, *args): self.args = args if len(args) == 5: # Expected range / not range if args[3]: self.mismatchType = MismatchedCharException.NOT_RANGE else: self.mismatchType = MismatchedCharException.RANGE self.foundChar = args[0] self.expecting = args[1] self.upper = args[2] self.scanner = args[4] RecognitionException.__init__(self, "Mismatched char range", self.scanner.getFilename(), self.scanner.getLine(), self.scanner.getColumn()) elif len(args) == 4 and is_string_type(args[1]): # Expected char / not char if args[2]: self.mismatchType = MismatchedCharException.NOT_CHAR else: self.mismatchType = MismatchedCharException.CHAR self.foundChar = args[0] self.expecting = args[1] self.scanner = args[3] RecognitionException.__init__(self, "Mismatched char", self.scanner.getFilename(), self.scanner.getLine(), self.scanner.getColumn()) elif len(args) == 4 and isinstance(args[1], BitSet): # Expected BitSet / not BitSet if args[2]: self.mismatchType = MismatchedCharException.NOT_SET else: self.mismatchType = MismatchedCharException.SET self.foundChar = args[0] self.set = args[1] self.scanner = args[3] RecognitionException.__init__(self, "Mismatched char set", self.scanner.getFilename(), self.scanner.getLine(), self.scanner.getColumn()) else: self.mismatchType = MismatchedCharException.NONE RecognitionException.__init__(self, "Mismatched char") ## Append a char to the msg buffer. If special, # then show escaped version # def appendCharName(self, sb, c): if not c or c == 65535: # 65535 = (char) -1 = EOF sb.append("''") elif c == '\n': sb.append("'\\n'") elif c == '\r': sb.append("'\\r'"); elif c == '\t': sb.append("'\\t'") else: sb.append('\'' + c + '\'') ## # Returns an error message with line number/column information # def __str__(self): sb = [''] sb.append(RecognitionException.__str__(self)) if self.mismatchType == MismatchedCharException.CHAR: sb.append("expecting ") self.appendCharName(sb, self.expecting) sb.append(", found ") self.appendCharName(sb, self.foundChar) elif self.mismatchType == MismatchedCharException.NOT_CHAR: sb.append("expecting anything but '") self.appendCharName(sb, self.expecting) sb.append("'; got it anyway") elif self.mismatchType in [MismatchedCharException.RANGE, MismatchedCharException.NOT_RANGE]: sb.append("expecting char ") if self.mismatchType == MismatchedCharException.NOT_RANGE: sb.append("NOT ") sb.append("in range: ") appendCharName(sb, self.expecting) sb.append("..") appendCharName(sb, self.upper) sb.append(", found ") appendCharName(sb, self.foundChar) elif self.mismatchType in [MismatchedCharException.SET, MismatchedCharException.NOT_SET]: sb.append("expecting ") if self.mismatchType == MismatchedCharException.NOT_SET: sb.append("NOT ") sb.append("one of (") for i in range(len(self.set)): self.appendCharName(sb, self.set[i]) sb.append("), found ") self.appendCharName(sb, self.foundChar) return str().join(sb).strip() __repr__ = __str__ class MismatchedTokenException(RecognitionException): NONE = 0 TOKEN = 1 NOT_TOKEN = 2 RANGE = 3 NOT_RANGE = 4 SET = 5 NOT_SET = 6 def __init__(self, *args): self.args = args self.tokenNames = [] self.token = None self.tokenText = '' self.node = None if len(args) == 6: # Expected range / not range if args[3]: self.mismatchType = MismatchedTokenException.NOT_RANGE else: self.mismatchType = MismatchedTokenException.RANGE self.tokenNames = args[0] self.expecting = args[2] self.upper = args[3] self.fileName = args[5] elif len(args) == 4 and isinstance(args[2], int): # Expected token / not token if args[3]: self.mismatchType = MismatchedTokenException.NOT_TOKEN else: self.mismatchType = MismatchedTokenException.TOKEN self.tokenNames = args[0] self.expecting = args[2] elif len(args) == 4 and isinstance(args[2], BitSet): # Expected BitSet / not BitSet if args[3]: self.mismatchType = MismatchedTokenException.NOT_SET else: self.mismatchType = MismatchedTokenException.SET self.tokenNames = args[0] self.set = args[2] else: self.mismatchType = MismatchedTokenException.NONE RecognitionException.__init__(self, "Mismatched Token: expecting any AST node", "", -1, -1) if len(args) >= 2: if isinstance(args[1],Token): self.token = args[1] self.tokenText = self.token.getText() RecognitionException.__init__(self, "Mismatched Token", self.fileName, self.token.getLine(), self.token.getColumn()) elif isinstance(args[1],AST): self.node = args[1] self.tokenText = str(self.node) RecognitionException.__init__(self, "Mismatched Token", "", self.node.getLine(), self.node.getColumn()) else: self.tokenText = "" RecognitionException.__init__(self, "Mismatched Token", "", -1, -1) def appendTokenName(self, sb, tokenType): if tokenType == INVALID_TYPE: sb.append("") elif tokenType < 0 or tokenType >= len(self.tokenNames): sb.append("<" + str(tokenType) + ">") else: sb.append(self.tokenNames[tokenType]) ## # Returns an error message with line number/column information # def __str__(self): sb = [''] sb.append(RecognitionException.__str__(self)) if self.mismatchType == MismatchedTokenException.TOKEN: sb.append("expecting ") self.appendTokenName(sb, self.expecting) sb.append(", found " + self.tokenText) elif self.mismatchType == MismatchedTokenException.NOT_TOKEN: sb.append("expecting anything but '") self.appendTokenName(sb, self.expecting) sb.append("'; got it anyway") elif self.mismatchType in [MismatchedTokenException.RANGE, MismatchedTokenException.NOT_RANGE]: sb.append("expecting token ") if self.mismatchType == MismatchedTokenException.NOT_RANGE: sb.append("NOT ") sb.append("in range: ") appendTokenName(sb, self.expecting) sb.append("..") appendTokenName(sb, self.upper) sb.append(", found " + self.tokenText) elif self.mismatchType in [MismatchedTokenException.SET, MismatchedTokenException.NOT_SET]: sb.append("expecting ") if self.mismatchType == MismatchedTokenException.NOT_SET: sb.append("NOT ") sb.append("one of (") for i in range(len(self.set)): self.appendTokenName(sb, self.set[i]) sb.append("), found " + self.tokenText) return str().join(sb).strip() __repr__ = __str__ class TokenStreamException(ANTLRException): def __init__(self, *args): ANTLRException.__init__(self, *args) # Wraps an Exception in a TokenStreamException class TokenStreamIOException(TokenStreamException): def __init__(self, *args): if args and isinstance(args[0], Exception): io = args[0] TokenStreamException.__init__(self, str(io)) self.io = io else: TokenStreamException.__init__(self, *args) self.io = self # Wraps a RecognitionException in a TokenStreamException class TokenStreamRecognitionException(TokenStreamException): def __init__(self, *args): if args and isinstance(args[0], RecognitionException): recog = args[0] TokenStreamException.__init__(self, str(recog)) self.recog = recog else: raise TypeError("TokenStreamRecognitionException requires RecognitionException argument") def __str__(self): return str(self.recog) __repr__ = __str__ class TokenStreamRetryException(TokenStreamException): def __init__(self, *args): TokenStreamException.__init__(self, *args) class CharStreamException(ANTLRException): def __init__(self, *args): ANTLRException.__init__(self, *args) # Wraps an Exception in a CharStreamException class CharStreamIOException(CharStreamException): def __init__(self, *args): if args and isinstance(args[0], Exception): io = args[0] CharStreamException.__init__(self, str(io)) self.io = io else: CharStreamException.__init__(self, *args) self.io = self class TryAgain(Exception): pass ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### Token ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class Token(object): SKIP = -1 INVALID_TYPE = 0 EOF_TYPE = 1 EOF = 1 NULL_TREE_LOOKAHEAD = 3 MIN_USER_TYPE = 4 def __init__(self,**argv): try: self.type = argv['type'] except: self.type = INVALID_TYPE try: self.text = argv['text'] except: self.text = "" def isEOF(self): return (self.type == EOF_TYPE) def getColumn(self): return 0 def getLine(self): return 0 def getFilename(self): return None def setFilename(self,name): return self def getText(self): return "" def setText(self,text): if is_string_type(text): pass else: raise TypeError("Token.setText requires string argument") return self def setColumn(self,column): return self def setLine(self,line): return self def getType(self): return self.type def setType(self,type): if isinstance(type,int): self.type = type else: raise TypeError("Token.setType requires integer argument") return self def toString(self): ## not optimal type_ = self.type if type_ == 3: tval = 'NULL_TREE_LOOKAHEAD' elif type_ == 1: tval = 'EOF_TYPE' elif type_ == 0: tval = 'INVALID_TYPE' elif type_ == -1: tval = 'SKIP' else: tval = type_ return '["%s",<%s>]' % (self.getText(),tval) __str__ = toString __repr__ = toString ### static attribute .. Token.badToken = Token( type=INVALID_TYPE, text="") if __name__ == "__main__": print "testing .." T = Token.badToken print T ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### CommonToken ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class CommonToken(Token): def __init__(self,**argv): Token.__init__(self,**argv) self.line = 0 self.col = 0 try: self.line = argv['line'] except: pass try: self.col = argv['col'] except: pass def getLine(self): return self.line def getText(self): return self.text def getColumn(self): return self.col def setLine(self,line): self.line = line return self def setText(self,text): self.text = text return self def setColumn(self,col): self.col = col return self def toString(self): ## not optimal type_ = self.type if type_ == 3: tval = 'NULL_TREE_LOOKAHEAD' elif type_ == 1: tval = 'EOF_TYPE' elif type_ == 0: tval = 'INVALID_TYPE' elif type_ == -1: tval = 'SKIP' else: tval = type_ d = { 'text' : self.text, 'type' : tval, 'line' : self.line, 'colm' : self.col } fmt = '["%(text)s",<%(type)s>,line=%(line)s,col=%(colm)s]' return fmt % d __str__ = toString __repr__ = toString if __name__ == '__main__' : T = CommonToken() print T T = CommonToken(col=15,line=1,text="some text", type=5) print T T = CommonToken() T.setLine(1).setColumn(15).setText("some text").setType(5) print T print T.getLine() print T.getColumn() print T.getText() print T.getType() ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### CommonHiddenStreamToken ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class CommonHiddenStreamToken(CommonToken): def __init__(self,*args): CommonToken.__init__(self,*args) self.hiddenBefore = None self.hiddenAfter = None def getHiddenAfter(self): return self.hiddenAfter def getHiddenBefore(self): return self.hiddenBefore def setHiddenAfter(self,t): self.hiddenAfter = t def setHiddenBefore(self, t): self.hiddenBefore = t ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### Queue ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ## Shall be a circular buffer on tokens .. class Queue(object): def __init__(self): self.buffer = [] # empty list def append(self,item): self.buffer.append(item) def elementAt(self,index): return self.buffer[index] def reset(self): self.buffer = [] def removeFirst(self): self.buffer.pop(0) def length(self): return len(self.buffer) def __str__(self): return str(self.buffer) ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### InputBuffer ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class InputBuffer(object): def __init__(self): self.nMarkers = 0 self.markerOffset = 0 self.numToConsume = 0 self.queue = Queue() def __str__(self): return "(%s,%s,%s,%s)" % ( self.nMarkers, self.markerOffset, self.numToConsume, self.queue) def __repr__(self): return str(self) def commit(self): self.nMarkers -= 1 def consume(self) : self.numToConsume += 1 ## probably better to return a list of items ## because of unicode. Or return a unicode ## string .. def getLAChars(self) : i = self.markerOffset n = self.queue.length() s = '' while i 0: if self.nMarkers > 0: # guess mode -- leave leading characters and bump offset. self.markerOffset += 1 else: # normal mode -- remove first character self.queue.removeFirst() self.numToConsume -= 1 ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### CharBuffer ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class CharBuffer(InputBuffer): def __init__(self,reader): ##assert isinstance(reader,file) super(CharBuffer,self).__init__() ## a reader is supposed to be anything that has ## a method 'read(int)'. self.input = reader def __str__(self): base = super(CharBuffer,self).__str__() return "CharBuffer{%s,%s" % (base,str(input)) def fill(self,amount): try: self.syncConsume() while self.queue.length() < (amount + self.markerOffset) : ## retrieve just one char - what happend at end ## of input? c = self.input.read(1) ### python's behaviour is to return the empty string on ### EOF, ie. no exception whatsoever is thrown. An empty ### python string has the nice feature that it is of ### type 'str' and "not ''" would return true. Contrary, ### one can't do this: '' in 'abc'. This should return ### false, but all we get is then a TypeError as an ### empty string is not a character. ### Let's assure then that we have either seen a ### character or an empty string (EOF). assert len(c) == 0 or len(c) == 1 ### And it shall be of type string (ASCII or UNICODE). assert is_string_type(c) ### Just append EOF char to buffer. Note that buffer may ### contain then just more than one EOF char .. ### use unicode chars instead of ASCII .. self.queue.append(c) except Exception,e: raise CharStreamIOException(e) ##except: # (mk) Cannot happen ... ##error ("unexpected exception caught ..") ##assert 0 ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### LexerSharedInputState ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class LexerSharedInputState(object): def __init__(self,ibuf): assert isinstance(ibuf,InputBuffer) self.input = ibuf self.column = 1 self.line = 1 self.tokenStartColumn = 1 self.tokenStartLine = 1 self.guessing = 0 self.filename = None def reset(self): self.column = 1 self.line = 1 self.tokenStartColumn = 1 self.tokenStartLine = 1 self.guessing = 0 self.filename = None self.input.reset() def LA(self,k): return self.input.LA(k) ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### TokenStream ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class TokenStream(object): def nextToken(self): pass def __iter__(self): return TokenStreamIterator(self) ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### TokenStreamIterator ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class TokenStreamIterator(object): def __init__(self,inst): if isinstance(inst,TokenStream): self.inst = inst return raise TypeError("TokenStreamIterator requires TokenStream object") def next(self): assert self.inst item = self.inst.nextToken() if not item or item.isEOF(): raise StopIteration() return item ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### TokenStreamSelector ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class TokenStreamSelector(TokenStream): def __init__(self): self._input = None self._stmap = {} self._stack = [] def addInputStream(self,stream,key): self._stmap[key] = stream def getCurrentStream(self): return self._input def getStream(self,sname): try: stream = self._stmap[sname] except: raise ValueError("TokenStream " + sname + " not found"); return stream; def nextToken(self): while 1: try: return self._input.nextToken() except TokenStreamRetryException,r: ### just retry "forever" pass def pop(self): stream = self._stack.pop(); self.select(stream); return stream; def push(self,arg): self._stack.append(self._input); self.select(arg) def retry(self): raise TokenStreamRetryException() def select(self,arg): if isinstance(arg,TokenStream): self._input = arg return if is_string_type(arg): self._input = self.getStream(arg) return raise TypeError("TokenStreamSelector.select requires " + "TokenStream or string argument") ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### TokenStreamBasicFilter ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class TokenStreamBasicFilter(TokenStream): def __init__(self,input): self.input = input; self.discardMask = BitSet() def discard(self,arg): if isinstance(arg,int): self.discardMask.add(arg) return if isinstance(arg,BitSet): self.discardMark = arg return raise TypeError("TokenStreamBasicFilter.discard requires" + "integer or BitSet argument") def nextToken(self): tok = self.input.nextToken() while tok and self.discardMask.member(tok.getType()): tok = self.input.nextToken() return tok ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### TokenStreamHiddenTokenFilter ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class TokenStreamHiddenTokenFilter(TokenStreamBasicFilter): def __init__(self,input): TokenStreamBasicFilter.__init__(self,input) self.hideMask = BitSet() self.nextMonitoredToken = None self.lastHiddenToken = None self.firstHidden = None def consume(self): self.nextMonitoredToken = self.input.nextToken() def consumeFirst(self): self.consume() p = None; while self.hideMask.member(self.LA(1).getType()) or \ self.discardMask.member(self.LA(1).getType()): if self.hideMask.member(self.LA(1).getType()): if not p: p = self.LA(1) else: p.setHiddenAfter(self.LA(1)) self.LA(1).setHiddenBefore(p) p = self.LA(1) self.lastHiddenToken = p if not self.firstHidden: self.firstHidden = p self.consume() def getDiscardMask(self): return self.discardMask def getHiddenAfter(self,t): return t.getHiddenAfter() def getHiddenBefore(self,t): return t.getHiddenBefore() def getHideMask(self): return self.hideMask def getInitialHiddenToken(self): return self.firstHidden def hide(self,m): if isinstance(m,int): self.hideMask.add(m) return if isinstance(m.BitMask): self.hideMask = m return def LA(self,i): return self.nextMonitoredToken def nextToken(self): if not self.LA(1): self.consumeFirst() monitored = self.LA(1) monitored.setHiddenBefore(self.lastHiddenToken) self.lastHiddenToken = None self.consume() p = monitored while self.hideMask.member(self.LA(1).getType()) or \ self.discardMask.member(self.LA(1).getType()): if self.hideMask.member(self.LA(1).getType()): p.setHiddenAfter(self.LA(1)) if p != monitored: self.LA(1).setHiddenBefore(p) p = self.lastHiddenToken = self.LA(1) self.consume() return monitored ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### StringBuffer ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class StringBuffer: def __init__(self,string=None): if string: self.text = list(string) else: self.text = [] def setLength(self,sz): if not sz : self.text = [] return assert sz>0 if sz >= self.length(): return ### just reset to empty buffer self.text = self.text[0:sz] def length(self): return len(self.text) def append(self,c): self.text.append(c) ### return buffer as string. Arg 'a' is used as index ## into the buffer and 2nd argument shall be the length. ## If 2nd args is absent, we return chars till end of ## buffer starting with 'a'. def getString(self,a=None,length=None): if not a : a = 0 assert a>=0 if a>= len(self.text) : return "" if not length: ## no second argument L = self.text[a:] else: assert (a+length) <= len(self.text) b = a + length L = self.text[a:b] s = "" for x in L : s += x return s toString = getString ## alias def __str__(self): return str(self.text) ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### Reader ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ## When reading Japanese chars, it happens that a stream returns a ## 'char' of length 2. This looks like a bug in the appropriate ## codecs - but I'm rather unsure about this. Anyway, if this is ## the case, I'm going to split this string into a list of chars ## and put them on hold, ie. on a buffer. Next time when called ## we read from buffer until buffer is empty. ## wh: nov, 25th -> problem does not appear in Python 2.4.0.c1. class Reader(object): def __init__(self,stream): self.cin = stream self.buf = [] def read(self,num): assert num==1 if len(self.buf): return self.buf.pop() ## Read a char - this may return a string. ## Is this a bug in codecs/Python? c = self.cin.read(1) if not c or len(c)==1: return c L = list(c) L.reverse() for x in L: self.buf.append(x) ## read one char .. return self.read(1) ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### CharScanner ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class CharScanner(TokenStream): ## class members NO_CHAR = 0 EOF_CHAR = '' ### EOF shall be the empty string. def __init__(self, *argv, **kwargs): super(CharScanner, self).__init__() self.saveConsumedInput = True self.tokenClass = None self.caseSensitive = True self.caseSensitiveLiterals = True self.literals = None self.tabsize = 8 self._returnToken = None self.commitToPath = False self.traceDepth = 0 self.text = StringBuffer() self.hashString = hash(self) self.setTokenObjectClass(CommonToken) self.setInput(*argv) def __iter__(self): return CharScannerIterator(self) def setInput(self,*argv): ## case 1: ## if there's no arg we default to read from ## standard input if not argv: import sys self.setInput(sys.stdin) return ## get 1st argument arg1 = argv[0] ## case 2: ## if arg1 is a string, we assume it's a file name ## and open a stream using 2nd argument as open ## mode. If there's no 2nd argument we fall back to ## mode '+rb'. if is_string_type(arg1): f = open(arg1,"rb") self.setInput(f) self.setFilename(arg1) return ## case 3: ## if arg1 is a file we wrap it by a char buffer ( ## some additional checks?? No, can't do this in ## general). if isinstance(arg1,file): self.setInput(CharBuffer(arg1)) return ## case 4: ## if arg1 is of type SharedLexerInputState we use ## argument as is. if isinstance(arg1,LexerSharedInputState): self.inputState = arg1 return ## case 5: ## check whether argument type is of type input ## buffer. If so create a SharedLexerInputState and ## go ahead. if isinstance(arg1,InputBuffer): self.setInput(LexerSharedInputState(arg1)) return ## case 6: ## check whether argument type has a method read(int) ## If so create CharBuffer ... try: if arg1.read: rd = Reader(arg1) cb = CharBuffer(rd) ss = LexerSharedInputState(cb) self.inputState = ss return except: pass ## case 7: ## raise wrong argument exception raise TypeError(argv) def setTabSize(self,size) : self.tabsize = size def getTabSize(self) : return self.tabsize def setCaseSensitive(self,t) : self.caseSensitive = t def setCommitToPath(self,commit) : self.commitToPath = commit def setFilename(self,f) : self.inputState.filename = f def setLine(self,line) : self.inputState.line = line def setText(self,s) : self.resetText() self.text.append(s) def getCaseSensitive(self) : return self.caseSensitive def getCaseSensitiveLiterals(self) : return self.caseSensitiveLiterals def getColumn(self) : return self.inputState.column def setColumn(self,c) : self.inputState.column = c def getCommitToPath(self) : return self.commitToPath def getFilename(self) : return self.inputState.filename def getInputBuffer(self) : return self.inputState.input def getInputState(self) : return self.inputState def setInputState(self,state) : assert isinstance(state,LexerSharedInputState) self.inputState = state def getLine(self) : return self.inputState.line def getText(self) : return str(self.text) def getTokenObject(self) : return self._returnToken def LA(self,i) : c = self.inputState.input.LA(i) if not self.caseSensitive: ### E0006 c = c.__class__.lower(c) return c def makeToken(self,type) : try: ## dynamically load a class assert self.tokenClass tok = self.tokenClass() tok.setType(type) tok.setColumn(self.inputState.tokenStartColumn) tok.setLine(self.inputState.tokenStartLine) return tok except: self.panic("unable to create new token") return Token.badToken def mark(self) : return self.inputState.input.mark() def _match_bitset(self,b) : if b.member(self.LA(1)): self.consume() else: raise MismatchedCharException(self.LA(1), b, False, self) def _match_string(self,s) : for c in s: if self.LA(1) == c: self.consume() else: raise MismatchedCharException(self.LA(1), c, False, self) def match(self,item): if is_string_type(item): return self._match_string(item) else: return self._match_bitset(item) def matchNot(self,c) : if self.LA(1) != c: self.consume() else: raise MismatchedCharException(self.LA(1), c, True, self) def matchRange(self,c1,c2) : if self.LA(1) < c1 or self.LA(1) > c2 : raise MismatchedCharException(self.LA(1), c1, c2, False, self) else: self.consume() def newline(self) : self.inputState.line += 1 self.inputState.column = 1 def tab(self) : c = self.getColumn() nc = ( ((c-1)/self.tabsize) + 1) * self.tabsize + 1 self.setColumn(nc) def panic(self,s='') : print "CharScanner: panic: " + s sys.exit(1) def reportError(self,ex) : print ex def reportError(self,s) : if not self.getFilename(): print "error: " + str(s) else: print self.getFilename() + ": error: " + str(s) def reportWarning(self,s) : if not self.getFilename(): print "warning: " + str(s) else: print self.getFilename() + ": warning: " + str(s) def resetText(self) : self.text.setLength(0) self.inputState.tokenStartColumn = self.inputState.column self.inputState.tokenStartLine = self.inputState.line def rewind(self,pos) : self.inputState.input.rewind(pos) def setTokenObjectClass(self,cl): self.tokenClass = cl def testForLiteral(self,token): if not token: return assert isinstance(token,Token) _type = token.getType() ## special tokens can't be literals if _type in [SKIP,INVALID_TYPE,EOF_TYPE,NULL_TREE_LOOKAHEAD] : return _text = token.getText() if not _text: return assert is_string_type(_text) _type = self.testLiteralsTable(_text,_type) token.setType(_type) return _type def testLiteralsTable(self,*args): if is_string_type(args[0]): s = args[0] i = args[1] else: s = self.text.getString() i = args[0] ## check whether integer has been given if not isinstance(i,int): assert isinstance(i,int) ## check whether we have a dict assert isinstance(self.literals,dict) try: ## E0010 if not self.caseSensitiveLiterals: s = s.__class__.lower(s) i = self.literals[s] except: pass return i def toLower(self,c): return c.__class__.lower() def traceIndent(self): print ' ' * self.traceDepth def traceIn(self,rname): self.traceDepth += 1 self.traceIndent() print "> lexer %s c== %s" % (rname,self.LA(1)) def traceOut(self,rname): self.traceIndent() print "< lexer %s c== %s" % (rname,self.LA(1)) self.traceDepth -= 1 def uponEOF(self): pass def append(self,c): if self.saveConsumedInput : self.text.append(c) def commit(self): self.inputState.input.commit() def consume(self): if not self.inputState.guessing: c = self.LA(1) if self.caseSensitive: self.append(c) else: # use input.LA(), not LA(), to get original case # CharScanner.LA() would toLower it. c = self.inputState.input.LA(1) self.append(c) if c and c in "\t": self.tab() else: self.inputState.column += 1 self.inputState.input.consume() ## Consume chars until one matches the given char def consumeUntil_char(self,c): while self.LA(1) != EOF_CHAR and self.LA(1) != c: self.consume() ## Consume chars until one matches the given set def consumeUntil_bitset(self,bitset): while self.LA(1) != EOF_CHAR and not self.set.member(self.LA(1)): self.consume() ### If symbol seen is EOF then generate and set token, otherwise ### throw exception. def default(self,la1): if not la1 : self.uponEOF() self._returnToken = self.makeToken(EOF_TYPE) else: self.raise_NoViableAlt(la1) def filterdefault(self,la1,*args): if not la1: self.uponEOF() self._returnToken = self.makeToken(EOF_TYPE) return if not args: self.consume() raise TryAgain() else: ### apply filter object self.commit(); try: func=args[0] args=args[1:] apply(func,args) except RecognitionException, e: ## catastrophic failure self.reportError(e); self.consume(); raise TryAgain() def raise_NoViableAlt(self,la1=None): if not la1: la1 = self.LA(1) fname = self.getFilename() line = self.getLine() col = self.getColumn() raise NoViableAltForCharException(la1,fname,line,col) def set_return_token(self,_create,_token,_ttype,_offset): if _create and not _token and (not _ttype == SKIP): string = self.text.getString(_offset) _token = self.makeToken(_ttype) _token.setText(string) self._returnToken = _token return _token ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### CharScannerIterator ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class CharScannerIterator: def __init__(self,inst): if isinstance(inst,CharScanner): self.inst = inst return raise TypeError("CharScannerIterator requires CharScanner object") def next(self): assert self.inst item = self.inst.nextToken() if not item or item.isEOF(): raise StopIteration() return item ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### BitSet ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### I'm assuming here that a long is 64bits. It appears however, that ### a long is of any size. That means we can use a single long as the ### bitset (!), ie. Python would do almost all the work (TBD). class BitSet(object): BITS = 64 NIBBLE = 4 LOG_BITS = 6 MOD_MASK = BITS -1 def __init__(self,data=None): if not data: BitSet.__init__(self,[long(0)]) return if isinstance(data,int): BitSet.__init__(self,[long(data)]) return if isinstance(data,long): BitSet.__init__(self,[data]) return if not isinstance(data,list): raise TypeError("BitSet requires integer, long, or " + "list argument") for x in data: if not isinstance(x,long): raise TypeError(self,"List argument item is " + "not a long: %s" % (x)) self.data = data def __str__(self): bits = len(self.data) * BitSet.BITS s = "" for i in xrange(0,bits): if self.at(i): s += "1" else: s += "o" if not ((i+1) % 10): s += '|%s|' % (i+1) return s def __repr__(self): return str(self) def member(self,item): if not item: return False if isinstance(item,int): return self.at(item) if not is_string_type(item): raise TypeError(self,"char or unichar expected: %s" % (item)) ## char is a (unicode) string with at most lenght 1, ie. ## a char. if len(item) != 1: raise TypeError(self,"char expected: %s" % (item)) ### handle ASCII/UNICODE char num = ord(item) ### check whether position num is in bitset return self.at(num) def wordNumber(self,bit): return bit >> BitSet.LOG_BITS def bitMask(self,bit): pos = bit & BitSet.MOD_MASK ## bit mod BITS return (1L << pos) def set(self,bit,on=True): # grow bitset as required (use with care!) i = self.wordNumber(bit) mask = self.bitMask(bit) if i>=len(self.data): d = i - len(self.data) + 1 for x in xrange(0,d): self.data.append(0L) assert len(self.data) == i+1 if on: self.data[i] |= mask else: self.data[i] &= (~mask) ### make add an alias for set add = set def off(self,bit,off=True): self.set(bit,not off) def at(self,bit): i = self.wordNumber(bit) v = self.data[i] m = self.bitMask(bit) return v & m ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### some further funcs ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### def illegalarg_ex(func): raise ValueError( "%s is only valid if parser is built for debugging" % (func.func_name)) def runtime_ex(func): raise RuntimeException( "%s is only valid if parser is built for debugging" % (func.func_name)) ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### TokenBuffer ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class TokenBuffer(object): def __init__(self,stream): self.input = stream self.nMarkers = 0 self.markerOffset = 0 self.numToConsume = 0 self.queue = Queue() def reset(self) : self.nMarkers = 0 self.markerOffset = 0 self.numToConsume = 0 self.queue.reset() def consume(self) : self.numToConsume += 1 def fill(self, amount): self.syncConsume() while self.queue.length() < (amount + self.markerOffset): self.queue.append(self.input.nextToken()) def getInput(self): return self.input def LA(self,k) : self.fill(k) return self.queue.elementAt(self.markerOffset + k - 1).type def LT(self,k) : self.fill(k) return self.queue.elementAt(self.markerOffset + k - 1) def mark(self) : self.syncConsume() self.nMarkers += 1 return self.markerOffset def rewind(self,mark) : self.syncConsume() self.markerOffset = mark self.nMarkers -= 1 def syncConsume(self) : while self.numToConsume > 0: if self.nMarkers > 0: # guess mode -- leave leading characters and bump offset. self.markerOffset += 1 else: # normal mode -- remove first character self.queue.removeFirst() self.numToConsume -= 1 def __str__(self): return "(%s,%s,%s,%s,%s)" % ( self.input, self.nMarkers, self.markerOffset, self.numToConsume, self.queue) def __repr__(self): return str(self) ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### ParserSharedInputState ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class ParserSharedInputState(object): def __init__(self): self.input = None self.reset() def reset(self): self.guessing = 0 self.filename = None if self.input: self.input.reset() ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### Parser ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class Parser(object): def __init__(self, *args, **kwargs): self.tokenNames = None self.returnAST = None self.astFactory = None self.tokenTypeToASTClassMap = {} self.ignoreInvalidDebugCalls = False self.traceDepth = 0 if not args: self.inputState = ParserSharedInputState() return arg0 = args[0] assert isinstance(arg0,ParserSharedInputState) self.inputState = arg0 return def getTokenTypeToASTClassMap(self): return self.tokenTypeToASTClassMap def addMessageListener(self, l): if not self.ignoreInvalidDebugCalls: illegalarg_ex(addMessageListener) def addParserListener(self,l) : if (not self.ignoreInvalidDebugCalls) : illegalarg_ex(addParserListener) def addParserMatchListener(self, l) : if (not self.ignoreInvalidDebugCalls) : illegalarg_ex(addParserMatchListener) def addParserTokenListener(self, l) : if (not self.ignoreInvalidDebugCalls): illegalarg_ex(addParserTokenListener) def addSemanticPredicateListener(self, l) : if (not self.ignoreInvalidDebugCalls): illegalarg_ex(addSemanticPredicateListener) def addSyntacticPredicateListener(self, l) : if (not self.ignoreInvalidDebugCalls): illegalarg_ex(addSyntacticPredicateListener) def addTraceListener(self, l) : if (not self.ignoreInvalidDebugCalls): illegalarg_ex(addTraceListener) def consume(self): raise NotImplementedError() def _consumeUntil_type(self,tokenType): while self.LA(1) != EOF_TYPE and self.LA(1) != tokenType: self.consume() def _consumeUntil_bitset(self, set): while self.LA(1) != EOF_TYPE and not set.member(self.LA(1)): self.consume() def consumeUntil(self,arg): if isinstance(arg,int): self._consumeUntil_type(arg) else: self._consumeUntil_bitset(arg) def defaultDebuggingSetup(self): pass def getAST(self) : return self.returnAST def getASTFactory(self) : return self.astFactory def getFilename(self) : return self.inputState.filename def getInputState(self) : return self.inputState def setInputState(self, state) : self.inputState = state def getTokenName(self,num) : return self.tokenNames[num] def getTokenNames(self) : return self.tokenNames def isDebugMode(self) : return self.false def LA(self, i): raise NotImplementedError() def LT(self, i): raise NotImplementedError() def mark(self): return self.inputState.input.mark() def _match_int(self,t): if (self.LA(1) != t): raise MismatchedTokenException( self.tokenNames, self.LT(1), t, False, self.getFilename()) else: self.consume() def _match_set(self, b): if (not b.member(self.LA(1))): raise MismatchedTokenException( self.tokenNames,self.LT(1), b, False, self.getFilename()) else: self.consume() def match(self,set) : if isinstance(set,int): self._match_int(set) return if isinstance(set,BitSet): self._match_set(set) return raise TypeError("Parser.match requires integer ot BitSet argument") def matchNot(self,t): if self.LA(1) == t: raise MismatchedTokenException( tokenNames, self.LT(1), t, True, self.getFilename()) else: self.consume() def removeMessageListener(self, l) : if (not self.ignoreInvalidDebugCalls): runtime_ex(removeMessageListener) def removeParserListener(self, l) : if (not self.ignoreInvalidDebugCalls): runtime_ex(removeParserListener) def removeParserMatchListener(self, l) : if (not self.ignoreInvalidDebugCalls): runtime_ex(removeParserMatchListener) def removeParserTokenListener(self, l) : if (not self.ignoreInvalidDebugCalls): runtime_ex(removeParserTokenListener) def removeSemanticPredicateListener(self, l) : if (not self.ignoreInvalidDebugCalls): runtime_ex(removeSemanticPredicateListener) def removeSyntacticPredicateListener(self, l) : if (not self.ignoreInvalidDebugCalls): runtime_ex(removeSyntacticPredicateListener) def removeTraceListener(self, l) : if (not self.ignoreInvalidDebugCalls): runtime_ex(removeTraceListener) def reportError(self,x) : fmt = "syntax error:" f = self.getFilename() if f: fmt = ("%s:" % f) + fmt if isinstance(x,Token): line = x.getColumn() col = x.getLine() text = x.getText() fmt = fmt + 'unexpected symbol at line %s (column %s) : "%s"' print >>sys.stderr, fmt % (line,col,text) else: print >>sys.stderr, fmt,str(x) def reportWarning(self,s): f = self.getFilename() if f: print "%s:warning: %s" % (f,str(x)) else: print "warning: %s" % (str(x)) def rewind(self, pos) : self.inputState.input.rewind(pos) def setASTFactory(self, f) : self.astFactory = f def setASTNodeClass(self, cl) : self.astFactory.setASTNodeType(cl) def setASTNodeType(self, nodeType) : self.setASTNodeClass(nodeType) def setDebugMode(self, debugMode) : if (not self.ignoreInvalidDebugCalls): runtime_ex(setDebugMode) def setFilename(self, f) : self.inputState.filename = f def setIgnoreInvalidDebugCalls(self, value) : self.ignoreInvalidDebugCalls = value def setTokenBuffer(self, t) : self.inputState.input = t def traceIndent(self): print " " * self.traceDepth def traceIn(self,rname): self.traceDepth += 1 self.trace("> ", rname) def traceOut(self,rname): self.trace("< ", rname) self.traceDepth -= 1 ### wh: moved from ASTFactory to Parser def addASTChild(self,currentAST, child): if not child: return if not currentAST.root: currentAST.root = child elif not currentAST.child: currentAST.root.setFirstChild(child) else: currentAST.child.setNextSibling(child) currentAST.child = child currentAST.advanceChildToEnd() ### wh: moved from ASTFactory to Parser def makeASTRoot(self,currentAST,root) : if root: ### Add the current root as a child of new root root.addChild(currentAST.root) ### The new current child is the last sibling of the old root currentAST.child = currentAST.root currentAST.advanceChildToEnd() ### Set the new root currentAST.root = root ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### LLkParser ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class LLkParser(Parser): def __init__(self, *args, **kwargs): try: arg1 = args[0] except: arg1 = 1 if isinstance(arg1,int): super(LLkParser,self).__init__() self.k = arg1 return if isinstance(arg1,ParserSharedInputState): super(LLkParser,self).__init__(arg1) self.set_k(1,*args) return if isinstance(arg1,TokenBuffer): super(LLkParser,self).__init__() self.setTokenBuffer(arg1) self.set_k(1,*args) return if isinstance(arg1,TokenStream): super(LLkParser,self).__init__() tokenBuf = TokenBuffer(arg1) self.setTokenBuffer(tokenBuf) self.set_k(1,*args) return ### unknown argument raise TypeError("LLkParser requires integer, " + "ParserSharedInputStream or TokenStream argument") def consume(self): self.inputState.input.consume() def LA(self,i): return self.inputState.input.LA(i) def LT(self,i): return self.inputState.input.LT(i) def set_k(self,index,*args): try: self.k = args[index] except: self.k = 1 def trace(self,ee,rname): print type(self) self.traceIndent() guess = "" if self.inputState.guessing > 0: guess = " [guessing]" print(ee + rname + guess) for i in xrange(1,self.k+1): if i != 1: print(", ") if self.LT(i) : v = self.LT(i).getText() else: v = "null" print "LA(%s) == %s" % (i,v) print("\n") def traceIn(self,rname): self.traceDepth += 1; self.trace("> ", rname); def traceOut(self,rname): self.trace("< ", rname); self.traceDepth -= 1; ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### TreeParserSharedInputState ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class TreeParserSharedInputState(object): def __init__(self): self.guessing = 0 ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### TreeParser ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class TreeParser(object): def __init__(self, *args, **kwargs): self.inputState = TreeParserSharedInputState() self._retTree = None self.tokenNames = [] self.returnAST = None self.astFactory = ASTFactory() self.traceDepth = 0 def getAST(self): return self.returnAST def getASTFactory(self): return self.astFactory def getTokenName(self,num) : return self.tokenNames[num] def getTokenNames(self): return self.tokenNames def match(self,t,set) : assert isinstance(set,int) or isinstance(set,BitSet) if not t or t == ASTNULL: raise MismatchedTokenException(self.getTokenNames(), t,set, False) if isinstance(set,int) and t.getType() != set: raise MismatchedTokenException(self.getTokenNames(), t,set, False) if isinstance(set,BitSet) and not set.member(t.getType): raise MismatchedTokenException(self.getTokenNames(), t,set, False) def matchNot(self,t, ttype) : if not t or (t == ASTNULL) or (t.getType() == ttype): raise MismatchedTokenException(getTokenNames(), t, ttype, True) def reportError(self,ex): print >>sys.stderr,"error:",ex def reportWarning(self, s): print "warning:",s def setASTFactory(self,f): self.astFactory = f def setASTNodeType(self,nodeType): self.setASTNodeClass(nodeType) def setASTNodeClass(self,nodeType): self.astFactory.setASTNodeType(nodeType) def traceIndent(self): print " " * self.traceDepth def traceIn(self,rname,t): self.traceDepth += 1 self.traceIndent() print("> " + rname + "(" + ifelse(t,str(t),"null") + ")" + ifelse(self.inputState.guessing>0,"[guessing]","")) def traceOut(self,rname,t): self.traceIndent() print("< " + rname + "(" + ifelse(t,str(t),"null") + ")" + ifelse(self.inputState.guessing>0,"[guessing]","")) self.traceDepth -= 1 ### wh: moved from ASTFactory to TreeParser def addASTChild(self,currentAST, child): if not child: return if not currentAST.root: currentAST.root = child elif not currentAST.child: currentAST.root.setFirstChild(child) else: currentAST.child.setNextSibling(child) currentAST.child = child currentAST.advanceChildToEnd() ### wh: moved from ASTFactory to TreeParser def makeASTRoot(self,currentAST,root): if root: ### Add the current root as a child of new root root.addChild(currentAST.root) ### The new current child is the last sibling of the old root currentAST.child = currentAST.root currentAST.advanceChildToEnd() ### Set the new root currentAST.root = root ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### funcs to work on trees ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### def rightmost(ast): if ast: while(ast.right): ast = ast.right return ast def cmptree(s,t,partial): while(s and t): ### as a quick optimization, check roots first. if not s.equals(t): return False ### if roots match, do full list match test on children. if not cmptree(s.getFirstChild(),t.getFirstChild(),partial): return False s = s.getNextSibling() t = t.getNextSibling() r = ifelse(partial,not t,not s and not t) return r ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### AST ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class AST(object): def __init__(self): pass def addChild(self, c): pass def equals(self, t): return False def equalsList(self, t): return False def equalsListPartial(self, t): return False def equalsTree(self, t): return False def equalsTreePartial(self, t): return False def findAll(self, tree): return None def findAllPartial(self, subtree): return None def getFirstChild(self): return self def getNextSibling(self): return self def getText(self): return "" def getType(self): return INVALID_TYPE def getLine(self): return 0 def getColumn(self): return 0 def getNumberOfChildren(self): return 0 def initialize(self, t, txt): pass def initialize(self, t): pass def setFirstChild(self, c): pass def setNextSibling(self, n): pass def setText(self, text): pass def setType(self, ttype): pass def toString(self): self.getText() __str__ = toString def toStringList(self): return self.getText() def toStringTree(self): return self.getText() ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### ASTNULLType ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### There is only one instance of this class **/ class ASTNULLType(AST): def __init__(self): AST.__init__(self) pass def getText(self): return "" def getType(self): return NULL_TREE_LOOKAHEAD ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### BaseAST ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class BaseAST(AST): verboseStringConversion = False tokenNames = None def __init__(self): self.down = None ## kid self.right = None ## sibling def addChild(self,node): if node: t = rightmost(self.down) if t: t.right = node else: assert not self.down self.down = node def getNumberOfChildren(self): t = self.down n = 0 while t: n += 1 t = t.right return n def doWorkForFindAll(self,v,target,partialMatch): sibling = self while sibling: c1 = partialMatch and sibling.equalsTreePartial(target) if c1: v.append(sibling) else: c2 = not partialMatch and sibling.equalsTree(target) if c2: v.append(sibling) ### regardless of match or not, check any children for matches if sibling.getFirstChild(): sibling.getFirstChild().doWorkForFindAll(v,target,partialMatch) sibling = sibling.getNextSibling() ### Is node t equal to 'self' in terms of token type and text? def equals(self,t): if not t: return False return self.getText() == t.getText() and self.getType() == t.getType() ### Is t an exact structural and equals() match of this tree. The ### 'self' reference is considered the start of a sibling list. ### def equalsList(self, t): return cmptree(self, t, partial=False) ### Is 't' a subtree of this list? ### The siblings of the root are NOT ignored. ### def equalsListPartial(self,t): return cmptree(self,t,partial=True) ### Is tree rooted at 'self' equal to 't'? The siblings ### of 'self' are ignored. ### def equalsTree(self, t): return self.equals(t) and \ cmptree(self.getFirstChild(), t.getFirstChild(), partial=False) ### Is 't' a subtree of the tree rooted at 'self'? The siblings ### of 'self' are ignored. ### def equalsTreePartial(self, t): if not t: return True return self.equals(t) and cmptree( self.getFirstChild(), t.getFirstChild(), partial=True) ### Walk the tree looking for all exact subtree matches. Return ### an ASTEnumerator that lets the caller walk the list ### of subtree roots found herein. def findAll(self,target): roots = [] ### the empty tree cannot result in an enumeration if not target: return None # find all matches recursively self.doWorkForFindAll(roots, target, False) return roots ### Walk the tree looking for all subtrees. Return ### an ASTEnumerator that lets the caller walk the list ### of subtree roots found herein. def findAllPartial(self,sub): roots = [] ### the empty tree cannot result in an enumeration if not sub: return None self.doWorkForFindAll(roots, sub, True) ### find all matches recursively return roots ### Get the first child of this node None if not children def getFirstChild(self): return self.down ### Get the next sibling in line after this one def getNextSibling(self): return self.right ### Get the token text for this node def getText(self): return "" ### Get the token type for this node def getType(self): return 0 def getLine(self): return 0 def getColumn(self): return 0 ### Remove all children */ def removeChildren(self): self.down = None def setFirstChild(self,c): self.down = c def setNextSibling(self, n): self.right = n ### Set the token text for this node def setText(self, text): pass ### Set the token type for this node def setType(self, ttype): pass ### static def setVerboseStringConversion(verbose,names): verboseStringConversion = verbose tokenNames = names setVerboseStringConversion = staticmethod(setVerboseStringConversion) ### Return an array of strings that maps token ID to it's text. ## @since 2.7.3 def getTokenNames(): return tokenNames def toString(self): return self.getText() ### return tree as lisp string - sibling included def toStringList(self): ts = self.toStringTree() sib = self.getNextSibling() if sib: ts += sib.toStringList() return ts __str__ = toStringList ### return tree as string - siblings ignored def toStringTree(self): ts = "" kid = self.getFirstChild() if kid: ts += " (" ts += " " + self.toString() if kid: ts += kid.toStringList() ts += " )" return ts ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### CommonAST ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### Common AST node implementation class CommonAST(BaseAST): def __init__(self,token=None): super(CommonAST,self).__init__() self.ttype = INVALID_TYPE self.text = "" self.line = 0 self.column= 0 self.initialize(token) #assert self.text ### Get the token text for this node def getText(self): return self.text ### Get the token type for this node def getType(self): return self.ttype ### Get the line for this node def getLine(self): return self.line ### Get the column for this node def getColumn(self): return self.column def initialize(self,*args): if not args: return arg0 = args[0] if isinstance(arg0,int): arg1 = args[1] self.setType(arg0) self.setText(arg1) return if isinstance(arg0,AST) or isinstance(arg0,Token): self.setText(arg0.getText()) self.setType(arg0.getType()) self.line = arg0.getLine() self.column = arg0.getColumn() return ### Set the token text for this node def setText(self,text_): assert is_string_type(text_) self.text = text_ ### Set the token type for this node def setType(self,ttype_): assert isinstance(ttype_,int) self.ttype = ttype_ ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### CommonASTWithHiddenTokens ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class CommonASTWithHiddenTokens(CommonAST): def __init__(self,*args): CommonAST.__init__(self,*args) self.hiddenBefore = None self.hiddenAfter = None def getHiddenAfter(self): return self.hiddenAfter def getHiddenBefore(self): return self.hiddenBefore def initialize(self,*args): CommonAST.initialize(self,*args) if args and isinstance(args[0],Token): assert isinstance(args[0],CommonHiddenStreamToken) self.hiddenBefore = args[0].getHiddenBefore() self.hiddenAfter = args[0].getHiddenAfter() ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### ASTPair ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class ASTPair(object): def __init__(self): self.root = None ### current root of tree self.child = None ### current child to which siblings are added ### Make sure that child is the last sibling */ def advanceChildToEnd(self): if self.child: while self.child.getNextSibling(): self.child = self.child.getNextSibling() ### Copy an ASTPair. Don't call it clone() because we want type-safety */ def copy(self): tmp = ASTPair() tmp.root = self.root tmp.child = self.child return tmp def toString(self): r = ifelse(not root,"null",self.root.getText()) c = ifelse(not child,"null",self.child.getText()) return "[%s,%s]" % (r,c) __str__ = toString __repr__ = toString ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### ASTFactory ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class ASTFactory(object): def __init__(self,table=None): self._class = None self._classmap = ifelse(table,table,None) def create(self,*args): if not args: return self.create(INVALID_TYPE) arg0 = args[0] arg1 = None arg2 = None try: arg1 = args[1] arg2 = args[2] except: pass # ctor(int) if isinstance(arg0,int) and not arg2: ### get class for 'self' type c = self.getASTNodeType(arg0) t = self.create(c) if t: t.initialize(arg0, ifelse(arg1,arg1,"")) return t # ctor(int,something) if isinstance(arg0,int) and arg2: t = self.create(arg2) if t: t.initialize(arg0,arg1) return t # ctor(AST) if isinstance(arg0,AST): t = self.create(arg0.getType()) if t: t.initialize(arg0) return t # ctor(token) if isinstance(arg0,Token) and not arg1: ttype = arg0.getType() assert isinstance(ttype,int) t = self.create(ttype) if t: t.initialize(arg0) return t # ctor(token,class) if isinstance(arg0,Token) and arg1: assert isinstance(arg1,type) assert issubclass(arg1,AST) # this creates instance of 'arg1' using 'arg0' as # argument. Wow, that's magic! t = arg1(arg0) assert t and isinstance(t,AST) return t # ctor(class) if isinstance(arg0,type): ### next statement creates instance of type (!) t = arg0() assert isinstance(t,AST) return t def setASTNodeClass(self,className=None): if not className: return assert isinstance(className,type) assert issubclass(className,AST) self._class = className ### kind of misnomer - use setASTNodeClass instead. setASTNodeType = setASTNodeClass def getASTNodeClass(self): return self._class def getTokenTypeToASTClassMap(self): return self._classmap def setTokenTypeToASTClassMap(self,amap): self._classmap = amap def error(self, e): import sys print >> sys.stderr, e def setTokenTypeASTNodeType(self, tokenType, className): """ Specify a mapping between a token type and a (AST) class. """ if not self._classmap: self._classmap = {} if not className: try: del self._classmap[tokenType] except: pass else: ### here we should also perform actions to ensure that ### a. class can be loaded ### b. class is a subclass of AST ### assert isinstance(className,type) assert issubclass(className,AST) ## a & b ### enter the class self._classmap[tokenType] = className def getASTNodeType(self,tokenType): """ For a given token type return the AST node type. First we lookup a mapping table, second we try _class and finally we resolve to "antlr.CommonAST". """ # first if self._classmap: try: c = self._classmap[tokenType] if c: return c except: pass # second if self._class: return self._class # default return CommonAST ### methods that have been moved to file scope - just listed ### here to be somewhat consistent with original API def dup(self,t): return antlr.dup(t,self) def dupList(self,t): return antlr.dupList(t,self) def dupTree(self,t): return antlr.dupTree(t,self) ### methods moved to other classes ### 1. makeASTRoot -> Parser ### 2. addASTChild -> Parser ### non-standard: create alias for longish method name maptype = setTokenTypeASTNodeType ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### ASTVisitor ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### class ASTVisitor(object): def __init__(self,*args): pass def visit(self,ast): pass ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ### static methods and variables ### ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx### ASTNULL = ASTNULLType() ### wh: moved from ASTFactory as there's nothing ASTFactory-specific ### in this method. def make(*nodes): if not nodes: return None for i in xrange(0,len(nodes)): node = nodes[i] if node: assert isinstance(node,AST) root = nodes[0] tail = None if root: root.setFirstChild(None) for i in xrange(1,len(nodes)): if not nodes[i]: continue if not root: root = tail = nodes[i] elif not tail: root.setFirstChild(nodes[i]) tail = root.getFirstChild() else: tail.setNextSibling(nodes[i]) tail = tail.getNextSibling() ### Chase tail to last sibling while tail.getNextSibling(): tail = tail.getNextSibling() return root def dup(t,factory): if not t: return None if factory: dup_t = factory.create(t.__class__) else: raise TypeError("dup function requires ASTFactory argument") dup_t.initialize(t) return dup_t def dupList(t,factory): result = dupTree(t,factory) nt = result while t: ## for each sibling of the root t = t.getNextSibling() nt.setNextSibling(dupTree(t,factory)) nt = nt.getNextSibling() return result def dupTree(t,factory): result = dup(t,factory) if t: result.setFirstChild(dupList(t.getFirstChild(),factory)) return result ###xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ### $Id: antlr.py,v 1.1.1.1 2005/02/02 10:24:36 geronimo Exp $ # Local Variables: *** # mode: python *** # py-indent-offset: 4 *** # End: *** antlr-2.7.7/lib/csharp/0000755000175000017500000000000010522211615014626 5ustar twernertwernerantlr-2.7.7/lib/csharp/Makefile.in0000644000175000017500000000037010522211615016673 0ustar twernertwerner############################################################################### # $Id:$ ############################################################################### ## do not change this value subdir=lib/csharp @stdvars@ @stdmake@ @stddeps@ antlr-2.7.7/lib/csharp/antlr.net-runtime.sln0000644000175000017500000000342310522211615020734 0ustar twernertwernerMicrosoft Visual Studio Solution File, Format Version 7.00 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "JavaParser", "..\..\examples\csharp\java\JavaParser.csproj", "{A2DB8AF5-4C4B-4F6D-94D1-A6610E1D21B6}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "antlr.runtime", "antlr.runtime\antlr.runtime.csproj", "{CB7CC882-ED47-46C0-AAAE-7A437F22F1C6}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "antlr.astframe", "antlr.astframe\antlr.astframe.csproj", "{AAFA5AC9-5967-443D-A564-71236163239D}" EndProject Global GlobalSection(SolutionConfiguration) = preSolution ConfigName.0 = Debug ConfigName.1 = Release EndGlobalSection GlobalSection(ProjectDependencies) = postSolution EndGlobalSection GlobalSection(ProjectConfiguration) = postSolution {A2DB8AF5-4C4B-4F6D-94D1-A6610E1D21B6}.Debug.ActiveCfg = Debug|.NET {A2DB8AF5-4C4B-4F6D-94D1-A6610E1D21B6}.Debug.Build.0 = Debug|.NET {A2DB8AF5-4C4B-4F6D-94D1-A6610E1D21B6}.Release.ActiveCfg = Release|.NET {A2DB8AF5-4C4B-4F6D-94D1-A6610E1D21B6}.Release.Build.0 = Release|.NET {CB7CC882-ED47-46C0-AAAE-7A437F22F1C6}.Debug.ActiveCfg = Debug|.NET {CB7CC882-ED47-46C0-AAAE-7A437F22F1C6}.Debug.Build.0 = Debug|.NET {CB7CC882-ED47-46C0-AAAE-7A437F22F1C6}.Release.ActiveCfg = Release|.NET {CB7CC882-ED47-46C0-AAAE-7A437F22F1C6}.Release.Build.0 = Release|.NET {AAFA5AC9-5967-443D-A564-71236163239D}.Debug.ActiveCfg = Debug|.NET {AAFA5AC9-5967-443D-A564-71236163239D}.Debug.Build.0 = Debug|.NET {AAFA5AC9-5967-443D-A564-71236163239D}.Release.ActiveCfg = Release|.NET {AAFA5AC9-5967-443D-A564-71236163239D}.Release.Build.0 = Release|.NET EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution EndGlobalSection GlobalSection(ExtensibilityAddIns) = postSolution EndGlobalSection EndGlobal antlr-2.7.7/lib/csharp/antlr.runtime.build0000755000175000017500000002140510522211615020456 0ustar twernertwerner antlr-2.7.7/lib/csharp/antlr.astframe/0000755000175000017500000000000010522211615017547 5ustar twernertwernerantlr-2.7.7/lib/csharp/antlr.astframe/Makefile.in0000644000175000017500000000377510522211615021630 0ustar twernertwerner##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stdvars@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ## do not change this value subdir=lib/csharp/antlr.astframe all :: lib lib: @astframe_net@ ## Get all files in directory. antlr_cs_FILES = $(shell @FIND@ $(_srcdir) -name \*.cs) ## Next rule is about to compile antlr.astframe.dll. The whole ## rule looks bit complicated. ## - First we remove the target. Later we check then wether the ## target has really been created. Due to experience we don't ## trust our tool's exit code. ## - Our target depends on antlr.runtime.dll and so we need to ## add (+) it via appropriate environment variable CSHARPCFLAGS. ## - Then we check whether target really has been made. ## - Finally we make sure to update ASTFRAME_NET. ASTFRAME_NET ## may point anywhere in the filesystem, especially also in this ## directory. We can't therefore remove ASTFRAME_NET as astframe_ ## net and ASTFRAME_NET can be identical. To avoid this we make ## a copy of astframe_net which dies not clash with ASTFRAME_NET ## by adding the process id to the filename. Finally we copy ## and remove temporaries. @astframe_net@ : $(antlr_cs_FILES) @ANTLR_NET@ @-@RMF@ $@ @ @CSHARP_COMPILE_CMD@ $@ $(antlr_cs_FILES) @test -f $@ || exit 1 @ @CP@ $@ $@.$$$$ && @RMF@ @ASTFRAME_NET@ ; \ @CP@ $@.$$$$ @ASTFRAME_NET@ ; \ @CP@ @ASTFRAME_NET@ $@ ; \ @RMF@ $@.$$$$ clean :: @RMF@ *.obj *.o *.a *.lib *.so *.dll *~ @astframe_net@ @ASTFRAME_NET@ distclean :: clean @RMF@ Makefile ## install our target .. install :: @ASTFRAME_NET@ @$(MKDIR) -p "$(libdir)" @@ECHO@ "install C# core files .. " @for f in @ASTFRAME_NET@ ; do \ @ECHO@ "install $${f}" ; \ if test -f "$${f}" ; then \ $(INSTALL) -m 444 "$${f}" "$(libdir)" ; \ $(INSTALL) -m 444 "$${f}" "$(datadir)/$(versioneddir)" ; \ fi ;\ done ## dependencies @astframe_net@ : Makefile @astframe_net@ : @abs_this_builddir@/scripts/csc.sh ## other dependencies to be listed below @stddeps@ antlr-2.7.7/lib/csharp/antlr.astframe/antlr.astframe.csproj0000644000175000017500000001043610522211615023716 0ustar twernertwerner antlr-2.7.7/lib/csharp/antlr.astframe/antlr.debug.misc/0000755000175000017500000000000010522211615022706 5ustar twernertwernerantlr-2.7.7/lib/csharp/antlr.astframe/antlr.debug.misc/ASTFrame.resx0000644000175000017500000001230610522211615025215 0ustar twernertwerner text/microsoft-resx 1.3 System.Resources.ResXResourceReader, System.Windows.Forms, Version=1.0.3300.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 System.Resources.ResXResourceWriter, System.Windows.Forms, Version=1.0.3300.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 ASTFrame antlr-2.7.7/lib/csharp/antlr.astframe/antlr.debug.misc/ASTFrame.cs0000644000175000017500000000533410522211615024644 0ustar twernertwernerusing System; using System.Drawing; //using System.Collections; //using System.ComponentModel; using System.Windows.Forms; //using antlr; using AST = antlr.collections.AST; namespace antlr.debug.misc { /// /// Summary description for myASTFrame. /// public class ASTFrame : System.Windows.Forms.Form { // The initial width and height of the frame private const int WIDTH = 200; private const int HEIGHT = 300; /// /// Required designer variable. /// private System.ComponentModel.Container components = null; private ASTFrame() { // // Required for Windows Form Designer support // InitializeComponent(); // // TODO: Add any constructor code after InitializeComponent call // this.Size = new System.Drawing.Size(WIDTH,HEIGHT); Application.ApplicationExit += new EventHandler(Form_OnExit); } public ASTFrame(string title, AST rootAST) : this() { this.Text = title; JTreeASTPanel treePanel = new JTreeASTPanel(new TreeViewEventHandler(tree_AfterSelect), rootAST); this.Controls.Add(treePanel); treePanel.Location= new Point(5, 5); treePanel.Dock=DockStyle.Fill; treePanel.Anchor=AnchorStyles.Top|AnchorStyles.Left; } private void Form_OnExit(object sender, EventArgs e) { this.Visible = false; this.Dispose(); } private void tree_AfterSelect(object sender, TreeViewEventArgs e) { //System.Console.Out.WriteLine("Selected: " + e.Node.Text); string path = e.Node.FullPath; path = path.Replace(e.Node.TreeView.PathSeparator, "->"); //System.Console.Out.WriteLine(e.Node.FullPath); } /// /// Clean up any resources being used. /// protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } public static void Main(string[] args) { // Create the tree nodes ASTFactory factory = new ASTFactory(); CommonAST r = (CommonAST) factory.create(0, "ROOT"); r.addChild((CommonAST) factory.create(0, "C1")); r.addChild((CommonAST) factory.create(0, "C2")); r.addChild((CommonAST) factory.create(0, "C3")); ASTFrame frame = new ASTFrame("AST JTree Example", r); Application.Run(frame); } #region Windows Form Designer generated code /// /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// private void InitializeComponent() { // // ASTFrame // this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.ClientSize = new System.Drawing.Size(292, 273); this.Name = "ASTFrame"; this.Text = "ASTFrame"; } #endregion } } antlr-2.7.7/lib/csharp/antlr.astframe/antlr.debug.misc/JTreeASTPanel.resx0000644000175000017500000001231310522211615026152 0ustar twernertwerner text/microsoft-resx 1.3 System.Resources.ResXResourceReader, System.Windows.Forms, Version=1.0.3300.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 System.Resources.ResXResourceWriter, System.Windows.Forms, Version=1.0.3300.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 JTreeASTPanel antlr-2.7.7/lib/csharp/antlr.astframe/antlr.debug.misc/JTreeASTPanel.cs0000644000175000017500000000566410522211615025611 0ustar twernertwernerusing System; //using System.Collections; //using System.ComponentModel; //using System.Drawing; //using System.Data; using System.Windows.Forms; using AST = antlr.collections.AST; namespace antlr.debug.misc { /// /// Summary description for myJTreeASTPanel. /// public class JTreeASTPanel : System.Windows.Forms.UserControl { private System.Windows.Forms.TreeView tree; /// /// Required designer variable. /// private System.ComponentModel.Container components = null; private JTreeASTPanel() { // This call is required by the Windows.Forms Form Designer. InitializeComponent(); // TODO: Add any initialization after the InitForm call } public JTreeASTPanel(TreeViewEventHandler afterSelectHandler, AST rootAST) : this() { tree.AfterSelect += afterSelectHandler; tree.BeforeExpand += new TreeViewCancelEventHandler(ASTTreeNode.tree_BeforeExpand); tree.Nodes.Add(new ASTTreeNode(rootAST)); } /// /// Clean up any resources being used. /// protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Component Designer generated code /// /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// private void InitializeComponent() { this.tree = new System.Windows.Forms.TreeView(); this.SuspendLayout(); // // tree // this.tree.Dock = System.Windows.Forms.DockStyle.Fill; this.tree.ImageIndex = -1; this.tree.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.tree.Location = new System.Drawing.Point(5, 5); this.tree.Name = "tree"; this.tree.SelectedImageIndex = -1; this.tree.Size = new System.Drawing.Size(140, 140); this.tree.TabIndex = 0; // // JTreeASTPanel // this.Controls.AddRange(new System.Windows.Forms.Control[] { this.tree}); this.DockPadding.All = 5; this.Name = "JTreeASTPanel"; this.ResumeLayout(false); } #endregion } internal class ASTTreeNode : TreeNode { private AST ASTNode_; internal bool IsAlreadyExpanded = false; public AST ASTNode { get { return ASTNode_; } set { ASTNode_ = value; } } public ASTTreeNode(AST a) { ASTNode_ = a; this.Text = a.ToString(); this.Nodes.Add("Loading....."); } internal static void tree_BeforeExpand(object sender, TreeViewCancelEventArgs e) { ASTTreeNode thisNode = (ASTTreeNode)e.Node; AST parentAST = thisNode.ASTNode; AST childAST; if (!thisNode.IsAlreadyExpanded) { thisNode.Nodes.Clear(); childAST = parentAST.getFirstChild(); while (null != childAST) { thisNode.Nodes.Add(new ASTTreeNode(childAST)); childAST = childAST.getNextSibling(); } thisNode.IsAlreadyExpanded = true; } } } } antlr-2.7.7/lib/csharp/antlr.astframe/AssemblyInfo.cs0000644000175000017500000000227110522211615022473 0ustar twernertwernerusing System; using System.Reflection; using System.Runtime.CompilerServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. // TODO: Review the values of the assembly attributes [assembly: AssemblyTitle("antlr.astframe")] [assembly: AssemblyDescription("ANTLR ASTFrame for .NET")] [assembly: AssemblyCompany("www.antlr.org")] [assembly: AssemblyProduct("")] [assembly: AssemblyCopyright("")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] [assembly: AssemblyVersion("2.7.7.01")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Revision // Build Number // // You can specify all the values or you can default the Revision and Build Numbers // by using the '*' as shown below: [assembly: CLSCompliantAttribute(true)] #if STRONGNAME #pragma warning disable 1699 [assembly: AssemblyDelaySign(false)] [assembly: AssemblyKeyFile("org.antlr.snk")] #pragma warning restore 1699 #endif #if APTC [assembly: System.Security.AllowPartiallyTrustedCallers] #endifantlr-2.7.7/lib/csharp/antlr.runtime/0000755000175000017500000000000010522211615017430 5ustar twernertwernerantlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/0000755000175000017500000000000010522211615021635 5ustar twernertwernerantlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/Listener.cs0000755000175000017500000000023010522211615023747 0ustar twernertwernernamespace antlr.debug { using System; public interface Listener { void doneParsing (object source, TraceEventArgs e); void refresh (); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/DebuggingCharScanner.cs0000755000175000017500000001734310522211615026202 0ustar twernertwernernamespace antlr.debug { using System; using System.Threading; using antlr; using BitSet = antlr.collections.impl.BitSet; public abstract class DebuggingCharScanner : CharScanner, DebuggingParser { private void InitBlock() { eventSupport = new ScannerEventSupport(this); } public virtual void setDebugMode(bool mode) { _notDebugMode = !mode; } private ScannerEventSupport eventSupport; private bool _notDebugMode = false; protected internal string[] ruleNames; protected internal string[] semPredNames; public DebuggingCharScanner(InputBuffer cb) : base(cb) { InitBlock(); } public DebuggingCharScanner(LexerSharedInputState state) : base(state) { InitBlock(); } public virtual void addMessageListener(MessageListener l) { eventSupport.addMessageListener(l); } public virtual void addNewLineListener(NewLineListener l) { eventSupport.addNewLineListener(l); } public virtual void addParserListener(ParserListener l) { eventSupport.addParserListener(l); } public virtual void addParserMatchListener(ParserMatchListener l) { eventSupport.addParserMatchListener(l); } public virtual void addParserTokenListener(ParserTokenListener l) { eventSupport.addParserTokenListener(l); } public virtual void addSemanticPredicateListener(SemanticPredicateListener l) { eventSupport.addSemanticPredicateListener(l); } public virtual void addSyntacticPredicateListener(SyntacticPredicateListener l) { eventSupport.addSyntacticPredicateListener(l); } public virtual void addTraceListener(TraceListener l) { eventSupport.addTraceListener(l); } public override void consume() { int la_1 = - 99; try { la_1 = LA(1); } catch (CharStreamException) { } base.consume(); eventSupport.fireConsume(la_1); } protected internal virtual void fireEnterRule(int num, int data) { if (isDebugMode()) eventSupport.fireEnterRule(num, inputState.guessing, data); } protected internal virtual void fireExitRule(int num, int ttype) { if (isDebugMode()) eventSupport.fireExitRule(num, inputState.guessing, ttype); } protected internal virtual bool fireSemanticPredicateEvaluated(int type, int num, bool condition) { if (isDebugMode()) return eventSupport.fireSemanticPredicateEvaluated(type, num, condition, inputState.guessing); else return condition; } protected internal virtual void fireSyntacticPredicateFailed() { if (isDebugMode()) eventSupport.fireSyntacticPredicateFailed(inputState.guessing); } protected internal virtual void fireSyntacticPredicateStarted() { if (isDebugMode()) eventSupport.fireSyntacticPredicateStarted(inputState.guessing); } protected internal virtual void fireSyntacticPredicateSucceeded() { if (isDebugMode()) eventSupport.fireSyntacticPredicateSucceeded(inputState.guessing); } public virtual string getRuleName(int num) { return ruleNames[num]; } public virtual string getSemPredName(int num) { return semPredNames[num]; } public virtual void goToSleep() { lock(this) { try { Monitor.Wait(this); } catch (System.Threading.ThreadInterruptedException) { } } } public virtual bool isDebugMode() { return !_notDebugMode; } public override char LA(int i) { char la = base.LA(i); eventSupport.fireLA(i, la); return la; } protected internal override IToken makeToken(int t) { // do something with char buffer??? // try { // IToken tok = (Token)tokenObjectClass.newInstance(); // tok.setType(t); // // tok.setText(getText()); done in generated lexer now // tok.setLine(line); // return tok; // } // catch (InstantiationException ie) { // panic("can't instantiate a Token"); // } // catch (IllegalAccessException iae) { // panic("Token class is not accessible"); // } return base.makeToken(t); } public override void match(int c) { char la_1 = LA(1); try { base.match(c); eventSupport.fireMatch(Convert.ToChar(c), inputState.guessing); } catch (MismatchedCharException e) { if (inputState.guessing == 0) eventSupport.fireMismatch(la_1, Convert.ToChar(c), inputState.guessing); throw e; } } public override void match(BitSet b) { string text = this.text.ToString(); char la_1 = LA(1); try { base.match(b); eventSupport.fireMatch(la_1, b, text, inputState.guessing); } catch (MismatchedCharException e) { if (inputState.guessing == 0) eventSupport.fireMismatch(la_1, b, text, inputState.guessing); throw e; } } public override void match(string s) { System.Text.StringBuilder la_s = new System.Text.StringBuilder(""); int len = s.Length; // peek at the next len worth of characters try { for (int i = 1; i <= len; i++) { la_s.Append(base.LA(i)); } } catch (System.Exception) { } try { base.match(s); eventSupport.fireMatch(s, inputState.guessing); } catch (MismatchedCharException e) { if (inputState.guessing == 0) eventSupport.fireMismatch(la_s.ToString(), s, inputState.guessing); throw e; } } public override void matchNot(int c) { char la_1 = LA(1); try { base.matchNot(c); eventSupport.fireMatchNot(la_1, Convert.ToChar(c), inputState.guessing); } catch (MismatchedCharException e) { if (inputState.guessing == 0) eventSupport.fireMismatchNot(la_1, Convert.ToChar(c), inputState.guessing); throw e; } } public override void matchRange(int c1, int c2) { char la_1 = LA(1); try { base.matchRange(c1, c2); eventSupport.fireMatch(la_1, "" + c1 + c2, inputState.guessing); } catch (MismatchedCharException e) { if (inputState.guessing == 0) eventSupport.fireMismatch(la_1, "" + c1 + c2, inputState.guessing); throw e; } } public override void newline() { base.newline(); eventSupport.fireNewLine(getLine()); } public virtual void removeMessageListener(MessageListener l) { eventSupport.removeMessageListener(l); } public virtual void removeNewLineListener(NewLineListener l) { eventSupport.removeNewLineListener(l); } public virtual void removeParserListener(ParserListener l) { eventSupport.removeParserListener(l); } public virtual void removeParserMatchListener(ParserMatchListener l) { eventSupport.removeParserMatchListener(l); } public virtual void removeParserTokenListener(ParserTokenListener l) { eventSupport.removeParserTokenListener(l); } public virtual void removeSemanticPredicateListener(SemanticPredicateListener l) { eventSupport.removeSemanticPredicateListener(l); } public virtual void removeSyntacticPredicateListener(SyntacticPredicateListener l) { eventSupport.removeSyntacticPredicateListener(l); } public virtual void removeTraceListener(TraceListener l) { eventSupport.removeTraceListener(l); } /// Report exception errors caught in nextToken() /// public virtual void reportError(MismatchedCharException e) { eventSupport.fireReportError(e); base.reportError(e); } /// Parser error-reporting function can be overridden in subclass /// public override void reportError(string s) { eventSupport.fireReportError(s); base.reportError(s); } /// Parser warning-reporting function can be overridden in subclass /// public override void reportWarning(string s) { eventSupport.fireReportWarning(s); base.reportWarning(s); } public virtual void setupDebugging() { } public virtual void wakeUp() { lock(this) { Monitor.Pulse(this); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/MessageListener.cs0000755000175000017500000000032110522211615025255 0ustar twernertwernernamespace antlr.debug { using System; public interface MessageListener : Listener { void reportError (object source, MessageEventArgs e); void reportWarning (object source, MessageEventArgs e); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParseTreeDebugParser.cs0000644000175000017500000001056710522211615026213 0ustar twernertwernernamespace antlr.debug { /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // using System; using Stack = System.Collections.Stack; using antlr; using BitSet = antlr.collections.impl.BitSet; /// /// Specifies the behaviour required (i.e. parser modifications) /// specifically to support parse tree debugging and derivation. /// /// /// /// Override the standard matching and rule entry/exit routines /// to build parse trees. This class is useful for 2.7.3 where /// you can specify a superclass like /// /// /// class TinyCParser extends Parser(ParseTreeDebugParser); /// /// public class ParseTreeDebugParser : LLkParser { /// /// Each new rule invocation must have it's own subtree. Tokens are /// added to the current root so we must have a stack of subtree roots. /// protected Stack currentParseTreeRoot = new Stack(); /// /// Track most recently created parse subtree so that when parsing /// is finished, we can get to the root. /// protected ParseTreeRule mostRecentParseTreeRoot = null; /// /// For every rule replacement with a production, we bump up count. /// protected int numberOfDerivationSteps = 1; // n replacements plus step 0 public ParseTreeDebugParser(int k_) : base(k_) { } public ParseTreeDebugParser(ParserSharedInputState state, int k_) : base(state, k_) { } public ParseTreeDebugParser(TokenBuffer tokenBuf, int k_) : base(tokenBuf, k_) { } public ParseTreeDebugParser(TokenStream lexer, int k_) : base(lexer,k_) { } public ParseTree getParseTree() { return mostRecentParseTreeRoot; } public int getNumberOfDerivationSteps() { return numberOfDerivationSteps; } public override void match(int i) // throws MismatchedTokenException, TokenStreamException { addCurrentTokenToParseTree(); base.match(i); } public override void match(BitSet bitSet) // throws MismatchedTokenException, TokenStreamException { addCurrentTokenToParseTree(); base.match(bitSet); } public override void matchNot(int i) // throws MismatchedTokenException, TokenStreamException { addCurrentTokenToParseTree(); base.matchNot(i); } /// /// Adds LT(1) to the current parse subtree. /// /// /// /// Note that the match() routines add the node before checking for /// correct match. This means that, upon mismatched token, there /// will a token node in the tree corresponding to where that token /// was expected. For no viable alternative errors, no node will /// be in the tree as nothing was matched() (the lookahead failed /// to predict an alternative). /// /// protected void addCurrentTokenToParseTree() // throws TokenStreamException { if (inputState.guessing > 0) { return; } ParseTreeRule root = (ParseTreeRule) currentParseTreeRoot.Peek(); ParseTreeToken tokenNode = null; if ( LA(1) == Token.EOF_TYPE ) { tokenNode = new ParseTreeToken(new antlr.CommonToken("EOF")); } else { tokenNode = new ParseTreeToken(LT(1)); } root.addChild(tokenNode); } /// /// Create a rule node, add to current tree, and make it current root /// /// public override void traceIn(string s) // throws TokenStreamException { if (inputState.guessing > 0) { return; } ParseTreeRule subRoot = new ParseTreeRule(s); if ( currentParseTreeRoot.Count > 0 ) { ParseTreeRule oldRoot = (ParseTreeRule) currentParseTreeRoot.Peek(); oldRoot.addChild(subRoot); } currentParseTreeRoot.Push(subRoot); numberOfDerivationSteps++; } /// /// Pop current root; back to adding to old root /// /// public override void traceOut(string s) // throws TokenStreamException { if (inputState.guessing > 0) { return; } mostRecentParseTreeRoot = (ParseTreeRule) currentParseTreeRoot.Pop(); } } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ANTLREventArgs.cs0000755000175000017500000000070310522211615024666 0ustar twernertwernernamespace antlr.debug { using System; public abstract class ANTLREventArgs : EventArgs { public ANTLREventArgs() { } public ANTLREventArgs(int type) { this.Type = type; } public virtual int Type { get { return this.type_; } set { this.type_ = value; } } internal void setValues(int type) { this.Type = type; } /// /// Event type. /// private int type_; } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/IParserDebugSubject.cs0000755000175000017500000000056710522211615026033 0ustar twernertwernernamespace antlr.debug { using System; public interface IParserDebugSubject : IDebugSubject { event MatchEventHandler MatchedToken; event MatchEventHandler MatchedNotToken; event MatchEventHandler MisMatchedToken; event MatchEventHandler MisMatchedNotToken; event TokenEventHandler ConsumedToken; event TokenEventHandler TokenLA; } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/MessageListenerBase.cs0000755000175000017500000000234210522211615026055 0ustar twernertwernernamespace antlr.debug { using System; /// /// Provides an abstract base for implementing subclasses. /// /// /// /// This abstract class is provided to make it easier to create s. /// You should extend this base class rather than creating your own. /// /// public class MessageListenerBase : MessageListener { /// /// Handle the "Done" event. /// /// Event source object /// Event data object public virtual void doneParsing(object source, TraceEventArgs e) { } public virtual void refresh() { } /// /// Handle the "ReportError" event. /// /// Event source object /// Event data object public virtual void reportError(object source, MessageEventArgs e) { } /// /// Handle the "ReportWarning" event. /// /// Event source object /// Event data object public virtual void reportWarning(object source, MessageEventArgs e) { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/InputBufferReporter.cs0000755000175000017500000000156110522211615026146 0ustar twernertwernernamespace antlr.debug { using System; public class InputBufferReporter : InputBufferListenerBase, InputBufferListener { public virtual void inputBufferChanged(object source, InputBufferEventArgs e) { System.Console.Out.WriteLine(e); } /// charBufferConsume method comment. /// public override void inputBufferConsume(object source, InputBufferEventArgs e) { System.Console.Out.WriteLine(e); } /// charBufferLA method comment. /// public override void inputBufferLA(object source, InputBufferEventArgs e) { System.Console.Out.WriteLine(e); } public override void inputBufferMark(object source, InputBufferEventArgs e) { System.Console.Out.WriteLine(e); } public override void inputBufferRewind(object source, InputBufferEventArgs e) { System.Console.Out.WriteLine(e); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserListener.cs0000755000175000017500000000032310522211615025127 0ustar twernertwernernamespace antlr.debug { using System; public interface ParserListener : SemanticPredicateListener, ParserMatchListener, MessageListener, ParserTokenListener, TraceListener, SyntacticPredicateListener { } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/IDebugSubject.cs0000755000175000017500000000116710522211615024653 0ustar twernertwernernamespace antlr.debug { using System; //using EventHandlerList = System.ComponentModel.EventHandlerList; public interface IDebugSubject { /* EventHandlerList Events { get; } */ event TraceEventHandler EnterRule; event TraceEventHandler ExitRule; event TraceEventHandler Done; event MessageEventHandler ErrorReported; event MessageEventHandler WarningReported; event SemanticPredicateEventHandler SemPredEvaluated; event SyntacticPredicateEventHandler SynPredStarted; event SyntacticPredicateEventHandler SynPredFailed; event SyntacticPredicateEventHandler SynPredSucceeded; } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/SemanticPredicateListener.cs0000755000175000017500000000027010522211615027260 0ustar twernertwernernamespace antlr.debug { using System; public interface SemanticPredicateListener : Listener { void semanticPredicateEvaluated(object source, SemanticPredicateEventArgs e); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/MessageEventArgs.cs0000755000175000017500000000135210522211615025373 0ustar twernertwernernamespace antlr.debug { using System; public class MessageEventArgs : ANTLREventArgs { public MessageEventArgs() { } public MessageEventArgs(int type, string text) { setValues(type, text); } public virtual string Text { get { return text_; } set { this.text_ = value; } } private string text_; public static int WARNING = 0; public static int ERROR = 1; /// This should NOT be called from anyone other than ParserEventSupport! /// internal void setValues(int type, string text) { setValues(type); this.Text = text; } public override string ToString() { return "ParserMessageEvent [" + (Type == WARNING?"warning,":"error,") + Text + "]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/DebuggingInputBuffer.cs0000755000175000017500000000333210522211615026235 0ustar twernertwernernamespace antlr.debug { using System; using ArrayList = System.Collections.ArrayList; public class DebuggingInputBuffer : InputBuffer { public virtual ArrayList InputBufferListeners { get { return inputBufferEventSupport.InputBufferListeners; } } public virtual bool DebugMode { set { debugMode = value; } } private InputBuffer buffer; private InputBufferEventSupport inputBufferEventSupport; private bool debugMode = true; public DebuggingInputBuffer(InputBuffer buffer) { this.buffer = buffer; inputBufferEventSupport = new InputBufferEventSupport(this); } public virtual void addInputBufferListener(InputBufferListener l) { inputBufferEventSupport.addInputBufferListener(l); } public override char consume() { char la = ' '; try { la = buffer.LA(1); } catch (CharStreamException) { } // vaporize it... buffer.consume(); if (debugMode) inputBufferEventSupport.fireConsume(la); return la; } public override void fill(int a) { buffer.fill(a); } public virtual bool isDebugMode() { return debugMode; } public override bool isMarked() { return buffer.isMarked(); } public override char LA(int i) { char la = buffer.LA(i); if (debugMode) inputBufferEventSupport.fireLA(la, i); return la; } public override int mark() { int m = buffer.mark(); inputBufferEventSupport.fireMark(m); return m; } public virtual void removeInputBufferListener(InputBufferListener l) { if (inputBufferEventSupport != null) inputBufferEventSupport.removeInputBufferListener(l); } public override void rewind(int mark) { buffer.rewind(mark); inputBufferEventSupport.fireRewind(mark); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserEventSupport.cs0000755000175000017500000004233410522211615026030 0ustar twernertwernernamespace antlr.debug { using System; using System.Reflection; using Hashtable = System.Collections.Hashtable; using DictionaryEntry = System.Collections.DictionaryEntry; using ArrayList = System.Collections.ArrayList; using antlr.collections.impl; public delegate void MessageEventHandler(object sender, MessageEventArgs e); public delegate void NewLineEventHandler(object sender, NewLineEventArgs e); public delegate void MatchEventHandler(object sender, MatchEventArgs e); public delegate void TokenEventHandler(object sender, TokenEventArgs e); public delegate void SemanticPredicateEventHandler(object sender, SemanticPredicateEventArgs e); public delegate void SyntacticPredicateEventHandler(object sender, SyntacticPredicateEventArgs e); public delegate void TraceEventHandler(object sender, TraceEventArgs e); /// A class to assist in firing parser events /// NOTE: I intentionally _did_not_ synchronize the event firing and /// add/remove listener methods. This is because the add/remove should /// _only_ be called by the parser at its start/end, and the _same_thread_ /// should be performing the parsing. This should help performance a tad... /// public class ParserEventSupport { private object source; private Hashtable listeners; private MatchEventArgs matchEvent; private MessageEventArgs messageEvent; private TokenEventArgs tokenEvent; private SemanticPredicateEventArgs semPredEvent; private SyntacticPredicateEventArgs synPredEvent; private TraceEventArgs traceEvent; private NewLineEventArgs newLineEvent; private ParserController controller; private int ruleDepth = 0; public ParserEventSupport(object source) { matchEvent = new MatchEventArgs(); messageEvent = new MessageEventArgs(); tokenEvent = new TokenEventArgs(); traceEvent = new TraceEventArgs(); semPredEvent = new SemanticPredicateEventArgs(); synPredEvent = new SyntacticPredicateEventArgs(); newLineEvent = new NewLineEventArgs(); listeners = new Hashtable(); this.source = source; } public virtual void checkController() { if (controller != null) controller.checkBreak(); } public virtual void addDoneListener(Listener l) { ((Parser)source).Done += new TraceEventHandler(l.doneParsing); listeners[l] = l; } public virtual void addMessageListener(MessageListener l) { ((Parser)source).ErrorReported += new MessageEventHandler(l.reportError); ((Parser)source).WarningReported += new MessageEventHandler(l.reportWarning); //messageListeners.Add(l); addDoneListener(l); } public virtual void addParserListener(ParserListener l) { if (l is ParserController) { ((ParserController) l).ParserEventSupport = this; controller = (ParserController) l; } addParserMatchListener(l); addParserTokenListener(l); addMessageListener(l); addTraceListener(l); addSemanticPredicateListener(l); addSyntacticPredicateListener(l); } public virtual void addParserMatchListener(ParserMatchListener l) { ((Parser)source).MatchedToken += new MatchEventHandler(l.parserMatch); ((Parser)source).MatchedNotToken += new MatchEventHandler(l.parserMatchNot); ((Parser)source).MisMatchedToken += new MatchEventHandler(l.parserMismatch); ((Parser)source).MisMatchedNotToken += new MatchEventHandler(l.parserMismatchNot); //matchListeners.Add(l); addDoneListener(l); } public virtual void addParserTokenListener(ParserTokenListener l) { ((Parser)source).ConsumedToken += new TokenEventHandler(l.parserConsume); ((Parser)source).TokenLA += new TokenEventHandler(l.parserLA); //tokenListeners.Add(l); addDoneListener(l); } public virtual void addSemanticPredicateListener(SemanticPredicateListener l) { ((Parser)source).SemPredEvaluated += new SemanticPredicateEventHandler(l.semanticPredicateEvaluated); //semPredListeners.Add(l); addDoneListener(l); } public virtual void addSyntacticPredicateListener(SyntacticPredicateListener l) { ((Parser)source).SynPredStarted += new SyntacticPredicateEventHandler(l.syntacticPredicateStarted); ((Parser)source).SynPredFailed += new SyntacticPredicateEventHandler(l.syntacticPredicateFailed); ((Parser)source).SynPredSucceeded += new SyntacticPredicateEventHandler(l.syntacticPredicateSucceeded); //synPredListeners.Add(l); addDoneListener(l); } public virtual void addTraceListener(TraceListener l) { ((Parser)source).EnterRule += new TraceEventHandler(l.enterRule); ((Parser)source).ExitRule += new TraceEventHandler(l.exitRule); //traceListeners.Add(l); addDoneListener(l); } public virtual void fireConsume(int c) { TokenEventHandler eventDelegate = (TokenEventHandler)((Parser)source).Events[Parser.LAEventKey]; if (eventDelegate != null) { tokenEvent.setValues(TokenEventArgs.CONSUME, 1, c); eventDelegate(source, tokenEvent); } checkController(); } public virtual void fireDoneParsing() { TraceEventHandler eventDelegate = (TraceEventHandler)((Parser)source).Events[Parser.DoneEventKey]; if (eventDelegate != null) { traceEvent.setValues(TraceEventArgs.DONE_PARSING, 0, 0, 0); eventDelegate(source, traceEvent); } checkController(); } public virtual void fireEnterRule(int ruleNum, int guessing, int data) { ruleDepth++; TraceEventHandler eventDelegate = (TraceEventHandler)((Parser)source).Events[Parser.EnterRuleEventKey]; if (eventDelegate != null) { traceEvent.setValues(TraceEventArgs.ENTER, ruleNum, guessing, data); eventDelegate(source, traceEvent); } checkController(); } public virtual void fireExitRule(int ruleNum, int guessing, int data) { TraceEventHandler eventDelegate = (TraceEventHandler)((Parser)source).Events[Parser.ExitRuleEventKey]; if (eventDelegate != null) { traceEvent.setValues(TraceEventArgs.EXIT, ruleNum, guessing, data); eventDelegate(source, traceEvent); } checkController(); ruleDepth--; if (ruleDepth == 0) fireDoneParsing(); } public virtual void fireLA(int k, int la) { TokenEventHandler eventDelegate = (TokenEventHandler)((Parser)source).Events[Parser.LAEventKey]; if (eventDelegate != null) { tokenEvent.setValues(TokenEventArgs.LA, k, la); eventDelegate(source, tokenEvent); } checkController(); } public virtual void fireMatch(char c, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR, c, c, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(char c, BitSet b, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR_BITSET, c, b, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(char c, string target, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR_RANGE, c, target, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(int c, BitSet b, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.BITSET, c, b, text, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(int n, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.TOKEN, n, n, text, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(string s, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.STRING, 0, s, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatchNot(char c, char n, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MatchNotEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR, c, n, null, guessing, true, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatchNot(int c, int n, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MatchNotEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.TOKEN, c, n, text, guessing, true, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(char c, char n, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR, c, n, null, guessing, false, false); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(char c, BitSet b, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR_BITSET, c, b, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(char c, string target, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR_RANGE, c, target, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(int i, int n, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.TOKEN, i, n, text, guessing, false, false); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(int i, BitSet b, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.BITSET, i, b, text, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(string s, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.STRING, 0, text, s, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatchNot(char v, char c, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MisMatchNotEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR, v, c, null, guessing, true, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatchNot(int i, int n, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((Parser)source).Events[Parser.MisMatchNotEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.TOKEN, i, n, text, guessing, true, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireReportError(System.Exception e) { MessageEventHandler eventDelegate = (MessageEventHandler)((Parser)source).Events[Parser.ReportErrorEventKey]; if (eventDelegate != null) { messageEvent.setValues(MessageEventArgs.ERROR, e.ToString()); eventDelegate(source, messageEvent); } checkController(); } public virtual void fireReportError(string s) { MessageEventHandler eventDelegate = (MessageEventHandler)((Parser)source).Events[Parser.ReportErrorEventKey]; if (eventDelegate != null) { messageEvent.setValues(MessageEventArgs.ERROR, s); eventDelegate(source, messageEvent); } checkController(); } public virtual void fireReportWarning(string s) { MessageEventHandler eventDelegate = (MessageEventHandler)((Parser)source).Events[Parser.ReportWarningEventKey]; if (eventDelegate != null) { messageEvent.setValues(MessageEventArgs.WARNING, s); eventDelegate(source, messageEvent); } checkController(); } public virtual bool fireSemanticPredicateEvaluated(int type, int condition, bool result, int guessing) { SemanticPredicateEventHandler eventDelegate = (SemanticPredicateEventHandler)((Parser)source).Events[Parser.SemPredEvaluatedEventKey]; if (eventDelegate != null) { semPredEvent.setValues(type, condition, result, guessing); eventDelegate(source, semPredEvent); } checkController(); return result; } public virtual void fireSyntacticPredicateFailed(int guessing) { SyntacticPredicateEventHandler eventDelegate = (SyntacticPredicateEventHandler)((Parser)source).Events[Parser.SynPredFailedEventKey]; if (eventDelegate != null) { synPredEvent.setValues(0, guessing); eventDelegate(source, synPredEvent); } checkController(); } public virtual void fireSyntacticPredicateStarted(int guessing) { SyntacticPredicateEventHandler eventDelegate = (SyntacticPredicateEventHandler)((Parser)source).Events[Parser.SynPredStartedEventKey]; if (eventDelegate != null) { synPredEvent.setValues(0, guessing); eventDelegate(source, synPredEvent); } checkController(); } public virtual void fireSyntacticPredicateSucceeded(int guessing) { SyntacticPredicateEventHandler eventDelegate = (SyntacticPredicateEventHandler)((Parser)source).Events[Parser.SynPredSucceededEventKey]; if (eventDelegate != null) { synPredEvent.setValues(0, guessing); eventDelegate(source, synPredEvent); } checkController(); } public virtual void refreshListeners() { Hashtable clonedTable; lock(listeners.SyncRoot) { clonedTable = (Hashtable)listeners.Clone(); } foreach (DictionaryEntry entry in clonedTable) { if (entry.Value != null) { ((Listener) entry.Value).refresh(); } } } public virtual void removeDoneListener(Listener l) { ((Parser)source).Done -= new TraceEventHandler(l.doneParsing); listeners.Remove(l); } public virtual void removeMessageListener(MessageListener l) { ((Parser)source).ErrorReported -= new MessageEventHandler(l.reportError); ((Parser)source).WarningReported -= new MessageEventHandler(l.reportWarning); //messageListeners.Remove(l); removeDoneListener(l); } public virtual void removeParserListener(ParserListener l) { removeParserMatchListener(l); removeMessageListener(l); removeParserTokenListener(l); removeTraceListener(l); removeSemanticPredicateListener(l); removeSyntacticPredicateListener(l); } public virtual void removeParserMatchListener(ParserMatchListener l) { ((Parser)source).MatchedToken -= new MatchEventHandler(l.parserMatch); ((Parser)source).MatchedNotToken -= new MatchEventHandler(l.parserMatchNot); ((Parser)source).MisMatchedToken -= new MatchEventHandler(l.parserMismatch); ((Parser)source).MisMatchedNotToken -= new MatchEventHandler(l.parserMismatchNot); //matchListeners.Remove(l); removeDoneListener(l); } public virtual void removeParserTokenListener(ParserTokenListener l) { ((Parser)source).ConsumedToken -= new TokenEventHandler(l.parserConsume); ((Parser)source).TokenLA -= new TokenEventHandler(l.parserLA); //tokenListeners.Remove(l); removeDoneListener(l); } public virtual void removeSemanticPredicateListener(SemanticPredicateListener l) { ((Parser)source).SemPredEvaluated -= new SemanticPredicateEventHandler(l.semanticPredicateEvaluated); //semPredListeners.Remove(l); removeDoneListener(l); } public virtual void removeSyntacticPredicateListener(SyntacticPredicateListener l) { ((Parser)source).SynPredStarted -= new SyntacticPredicateEventHandler(l.syntacticPredicateStarted); ((Parser)source).SynPredFailed -= new SyntacticPredicateEventHandler(l.syntacticPredicateFailed); ((Parser)source).SynPredSucceeded -= new SyntacticPredicateEventHandler(l.syntacticPredicateSucceeded); //synPredListeners.Remove(l); removeDoneListener(l); } public virtual void removeTraceListener(TraceListener l) { ((Parser)source).EnterRule -= new TraceEventHandler(l.enterRule); ((Parser)source).ExitRule -= new TraceEventHandler(l.exitRule); //traceListeners.Remove(l); removeDoneListener(l); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/InputBufferEventArgs.cs0000755000175000017500000000205110522211615026235 0ustar twernertwernernamespace antlr.debug { using System; public class InputBufferEventArgs : ANTLREventArgs { public InputBufferEventArgs() { } public InputBufferEventArgs(int type, char c, int lookaheadAmount) { setValues(type, c, lookaheadAmount); } public virtual char Char { get { return this.c_; } set { this.c_ = value; } } public virtual int LookaheadAmount { get { return this.lookaheadAmount_; } set { this.lookaheadAmount_ = value; } } internal char c_; internal int lookaheadAmount_; // amount of lookahead public const int CONSUME = 0; public const int LA = 1; public const int MARK = 2; public const int REWIND = 3; /// This should NOT be called from anyone other than ParserEventSupport! /// internal void setValues(int type, char c, int la) { setValues(type); this.Char = c; this.LookaheadAmount = la; } public override string ToString() { return "CharBufferEvent [" + (Type == CONSUME?"CONSUME, ":"LA, ") + Char + "," + LookaheadAmount + "]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserController.cs0000755000175000017500000000027210522211615025470 0ustar twernertwernernamespace antlr.debug { using System; public interface ParserController : ParserListener { ParserEventSupport ParserEventSupport { set; } void checkBreak(); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/NewLineEventArgs.cs0000755000175000017500000000077010522211615025353 0ustar twernertwernernamespace antlr.debug { using System; public class NewLineEventArgs : ANTLREventArgs { public NewLineEventArgs() { } public NewLineEventArgs(int line) { Line = line; } public virtual int Line { get { return this.line_; } set { this.line_ = value; } } private int line_; /// This should NOT be called from anyone other than ParserEventSupport! /// public override string ToString() { return "NewLineEvent [" + line_ + "]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserTokenEventArgs.cs0000755000175000017500000000170510522211615026246 0ustar twernertwernernamespace antlr.debug { using System; public class TokenEventArgs : ANTLREventArgs { public TokenEventArgs() { } public TokenEventArgs(int type, int amount, int val) { setValues(type, amount, val); } public virtual int Amount { get { return amount; } set { this.amount = value; } } public virtual int Value { get { return this.value_; } set { this.value_ = value; } } private int value_; private int amount; public static int LA = 0; public static int CONSUME = 1; /// This should NOT be called from anyone other than ParserEventSupport! /// internal void setValues(int type, int amount, int val) { base.setValues(type); this.Amount = amount; this.Value = val; } public override string ToString() { if (Type == LA) return "ParserTokenEvent [LA," + Amount + "," + Value + "]"; else return "ParserTokenEvent [consume,1," + Value + "]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/SemanticPredicateListenerBase.cs0000755000175000017500000000204210522211615030052 0ustar twernertwernernamespace antlr.debug { using System; /// /// Provides an abstract base for implementing subclasses. /// /// /// /// This abstract class is provided to make it easier to create s. /// You should extend this base class rather than creating your own. /// /// public class SemanticPredicateListenerBase : SemanticPredicateListener { /// /// Handle the "Done" event. /// /// Event source object /// Event data object public virtual void doneParsing(object source, TraceEventArgs e) { } public virtual void refresh() { } /// /// Handle the "SemPreEvaluated" event. /// /// Event source object /// Event data object public virtual void semanticPredicateEvaluated(object source, SemanticPredicateEventArgs e) { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/DebuggingParser.cs0000755000175000017500000000032210522211615025234 0ustar twernertwernernamespace antlr.debug { using System; /// This type was created in VisualAge. /// public interface DebuggingParser { string getRuleName(int n); string getSemPredName(int n); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ICharScannerDebugSubject.cs0000755000175000017500000000064110522211615026757 0ustar twernertwernernamespace antlr.debug { using System; public interface ICharScannerDebugSubject : IDebugSubject { event NewLineEventHandler HitNewLine; event MatchEventHandler MatchedChar; event MatchEventHandler MatchedNotChar; event MatchEventHandler MisMatchedChar; event MatchEventHandler MisMatchedNotChar; event TokenEventHandler ConsumedChar; event TokenEventHandler CharLA; } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/GuessingEventArgs.cs0000755000175000017500000000105210522211615025570 0ustar twernertwernernamespace antlr.debug { using System; public abstract class GuessingEventArgs : ANTLREventArgs { public GuessingEventArgs() { } public GuessingEventArgs(int type) : base(type) { } public virtual int Guessing { get { return guessing_; } set { this.guessing_ = value; } } private int guessing_; /// This should NOT be called from anyone other than ParserEventSupport! /// public virtual void setValues(int type, int guessing) { setValues(type); this.Guessing = guessing; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/TraceListenerBase.cs0000755000175000017500000000231210522211615025524 0ustar twernertwernernamespace antlr.debug { using System; /// /// Provides an abstract base for implementing subclasses. /// /// /// /// This abstract class is provided to make it easier to create s. /// You should extend this base class rather than creating your own. /// /// public abstract class TraceListenerBase : TraceListener { /// /// Handle the "Done" event. /// /// Event source object /// Event data object public virtual void doneParsing(object source, TraceEventArgs e) { } /// /// Handle the "EnterRule" event /// /// Event source object /// Event data object public virtual void enterRule(object source, TraceEventArgs e) { } /// /// Handle the "ExitRule" event /// /// Event source object /// Event data object public virtual void exitRule(object source, TraceEventArgs e) { } public virtual void refresh() { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserMatchEventArgs.cs0000755000175000017500000000402010522211615026213 0ustar twernertwernernamespace antlr.debug { using System; public class MatchEventArgs : GuessingEventArgs { public MatchEventArgs() { } public MatchEventArgs(int type, int val, object target, string text, int guessing, bool inverse, bool matched) { setValues(type, val, target, text, guessing, inverse, matched); } public virtual object Target { get { return this.target_; } set { this.target_ = value; } } public virtual string Text { get { return this.text_; } set { this.text_ = value; } } public virtual int Value { get { return this.val_; } set { this.val_ = value; } } internal bool Inverse { set { this.inverse_ = value; } } internal bool Matched { set { this.matched_ = value; } } // NOTE: for a mismatch on type STRING, the "text" is used as the lookahead // value. Normally "value" is this public enum ParserMatchEnums { TOKEN = 0, BITSET = 1, CHAR = 2, CHAR_BITSET = 3, STRING = 4, CHAR_RANGE = 5, } public static int TOKEN = 0; public static int BITSET = 1; public static int CHAR = 2; public static int CHAR_BITSET = 3; public static int STRING = 4; public static int CHAR_RANGE = 5; private bool inverse_; private bool matched_; private object target_; private int val_; private string text_; public virtual bool isInverse() { return inverse_; } public virtual bool isMatched() { return matched_; } /// This should NOT be called from anyone other than ParserEventSupport! /// internal void setValues(int type, int val, object target, string text, int guessing, bool inverse, bool matched) { base.setValues(type, guessing); this.Value = val; this.Target = target; this.Inverse = inverse; this.Matched = matched; this.Text = text; } public override string ToString() { return "ParserMatchEvent [" + (isMatched()?"ok,":"bad,") + (isInverse()?"NOT ":"") + (Type == TOKEN?"token,":"bitset,") + Value + "," + Target + "," + Guessing + "]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/LLkDebuggingParser.cs0000755000175000017500000002037010522211615025644 0ustar twernertwernernamespace antlr.debug { using System; using System.Threading; using antlr.collections.impl; public class LLkDebuggingParser : LLkParser, DebuggingParser { private void InitBlock() { parserEventSupport = new ParserEventSupport(this); } public override void setDebugMode(bool mode) { _notDebugMode = !mode; } protected internal ParserEventSupport parserEventSupport; private bool _notDebugMode = false; protected internal string[] ruleNames; protected internal string[] semPredNames; public LLkDebuggingParser(int k_):base(k_) { InitBlock(); } public LLkDebuggingParser(ParserSharedInputState state, int k_):base(state, k_) { InitBlock(); } public LLkDebuggingParser(TokenBuffer tokenBuf, int k_):base(tokenBuf, k_) { InitBlock(); } public LLkDebuggingParser(TokenStream lexer, int k_):base(lexer, k_) { InitBlock(); } public override void addMessageListener(MessageListener l) { parserEventSupport.addMessageListener(l); } public override void addParserListener(ParserListener l) { parserEventSupport.addParserListener(l); } public override void addParserMatchListener(ParserMatchListener l) { parserEventSupport.addParserMatchListener(l); } public override void addParserTokenListener(ParserTokenListener l) { parserEventSupport.addParserTokenListener(l); } public override void addSemanticPredicateListener(SemanticPredicateListener l) { parserEventSupport.addSemanticPredicateListener(l); } public override void addSyntacticPredicateListener(SyntacticPredicateListener l) { parserEventSupport.addSyntacticPredicateListener(l); } public override void addTraceListener(TraceListener l) { parserEventSupport.addTraceListener(l); } /// Get another token object from the token stream /// public override void consume() { int la_1 = - 99; la_1 = LA(1); base.consume(); parserEventSupport.fireConsume(la_1); } protected internal virtual void fireEnterRule(int num, int data) { if (isDebugMode()) parserEventSupport.fireEnterRule(num, inputState.guessing, data); } protected internal virtual void fireExitRule(int num, int data) { if (isDebugMode()) parserEventSupport.fireExitRule(num, inputState.guessing, data); } protected internal virtual bool fireSemanticPredicateEvaluated(int type, int num, bool condition) { if (isDebugMode()) return parserEventSupport.fireSemanticPredicateEvaluated(type, num, condition, inputState.guessing); else return condition; } protected internal virtual void fireSyntacticPredicateFailed() { if (isDebugMode()) parserEventSupport.fireSyntacticPredicateFailed(inputState.guessing); } protected internal virtual void fireSyntacticPredicateStarted() { if (isDebugMode()) parserEventSupport.fireSyntacticPredicateStarted(inputState.guessing); } protected internal virtual void fireSyntacticPredicateSucceeded() { if (isDebugMode()) parserEventSupport.fireSyntacticPredicateSucceeded(inputState.guessing); } public virtual string getRuleName(int num) { return ruleNames[num]; } public virtual string getSemPredName(int num) { return semPredNames[num]; } public virtual void goToSleep() { lock(this) { try { Monitor.Wait(this); } catch (System.Threading.ThreadInterruptedException) { } } } public override bool isDebugMode() { return !_notDebugMode; } public virtual bool isGuessing() { return inputState.guessing > 0; } /// Return the token type of the ith token of lookahead where i=1 /// is the current token being examined by the parser (i.e., it /// has not been matched yet). /// public override int LA(int i) { int la = base.LA(i); parserEventSupport.fireLA(i, la); return la; } /// Make sure current lookahead symbol matches token type t. /// Throw an exception upon mismatch, which is catch by either the /// error handler or by the syntactic predicate. /// public override void match(int t) { string text = LT(1).getText(); int la_1 = LA(1); try { base.match(t); parserEventSupport.fireMatch(t, text, inputState.guessing); } catch (MismatchedTokenException e) { if (inputState.guessing == 0) parserEventSupport.fireMismatch(la_1, t, text, inputState.guessing); throw e; } } /// Make sure current lookahead symbol matches the given set /// Throw an exception upon mismatch, which is catch by either the /// error handler or by the syntactic predicate. /// public override void match(BitSet b) { string text = LT(1).getText(); int la_1 = LA(1); try { base.match(b); parserEventSupport.fireMatch(la_1, b, text, inputState.guessing); } catch (MismatchedTokenException e) { if (inputState.guessing == 0) parserEventSupport.fireMismatch(la_1, b, text, inputState.guessing); throw e; } } public override void matchNot(int t) { string text = LT(1).getText(); int la_1 = LA(1); try { base.matchNot(t); parserEventSupport.fireMatchNot(la_1, t, text, inputState.guessing); } catch (MismatchedTokenException e) { if (inputState.guessing == 0) parserEventSupport.fireMismatchNot(la_1, t, text, inputState.guessing); throw e; } } public override void removeMessageListener(MessageListener l) { parserEventSupport.removeMessageListener(l); } public override void removeParserListener(ParserListener l) { parserEventSupport.removeParserListener(l); } public override void removeParserMatchListener(ParserMatchListener l) { parserEventSupport.removeParserMatchListener(l); } public override void removeParserTokenListener(ParserTokenListener l) { parserEventSupport.removeParserTokenListener(l); } public override void removeSemanticPredicateListener(SemanticPredicateListener l) { parserEventSupport.removeSemanticPredicateListener(l); } public override void removeSyntacticPredicateListener(SyntacticPredicateListener l) { parserEventSupport.removeSyntacticPredicateListener(l); } public override void removeTraceListener(TraceListener l) { parserEventSupport.removeTraceListener(l); } /// Parser error-reporting function can be overridden in subclass /// public override void reportError(RecognitionException ex) { parserEventSupport.fireReportError(ex); base.reportError(ex); } /// Parser error-reporting function can be overridden in subclass /// public override void reportError(string s) { parserEventSupport.fireReportError(s); base.reportError(s); } /// Parser warning-reporting function can be overridden in subclass /// public override void reportWarning(string s) { parserEventSupport.fireReportWarning(s); base.reportWarning(s); } public virtual void setupDebugging(TokenBuffer tokenBuf) { setupDebugging(null, tokenBuf); } public virtual void setupDebugging(TokenStream lexer) { setupDebugging(lexer, null); } /// User can override to do their own debugging /// protected internal virtual void setupDebugging(TokenStream lexer, TokenBuffer tokenBuf) { setDebugMode(true); // default parser debug setup is ParseView try { // try // { // System.Type.GetType("javax.swing.JButton"); // } // catch (System.Exception) // { // System.Console.Error.WriteLine("Swing is required to use ParseView, but is not present in your CLASSPATH"); // System.Environment.Exit(1); // } System.Type c = System.Type.GetType("antlr.parseview.ParseView"); System.Reflection.ConstructorInfo constructor = c.GetConstructor(new System.Type[]{typeof(LLkDebuggingParser), typeof(TokenStream), typeof(TokenBuffer)}); constructor.Invoke(new object[]{this, lexer, tokenBuf}); } catch (System.Exception e) { System.Console.Error.WriteLine("Error initializing ParseView: " + e); System.Console.Error.WriteLine("Please report this to Scott Stanchfield, thetick@magelang.com"); System.Environment.Exit(1); } } public virtual void wakeUp() { lock(this) { Monitor.Pulse(this); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/TraceEventArgs.cs0000755000175000017500000000176310522211615025053 0ustar twernertwernernamespace antlr.debug { using System; public class TraceEventArgs : GuessingEventArgs { public TraceEventArgs() { } public TraceEventArgs(int type, int ruleNum, int guessing, int data) { setValues(type, ruleNum, guessing, data); } public virtual int Data { get { return this.data_; } set { this.data_ = value; } } public virtual int RuleNum { get { return this.ruleNum_; } set { this.ruleNum_ = value; } } private int ruleNum_; private int data_; public static int ENTER = 0; public static int EXIT = 1; public static int DONE_PARSING = 2; /// This should NOT be called from anyone other than ParserEventSupport! /// internal void setValues(int type, int ruleNum, int guessing, int data) { base.setValues(type, guessing); RuleNum = ruleNum; Data = data; } public override string ToString() { return "ParserTraceEvent [" + (Type == ENTER?"enter,":"exit,") + RuleNum + "," + Guessing + "]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserReporter.cs0000755000175000017500000000333010522211615025145 0ustar twernertwernernamespace antlr.debug { using System; public class ParserReporter : Tracer, ParserListener { public virtual void parserConsume(object source, TokenEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void parserLA(object source, TokenEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void parserMatch(object source, MatchEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void parserMatchNot(object source, MatchEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void parserMismatch(object source, MatchEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void parserMismatchNot(object source, MatchEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void reportError(object source, MessageEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void reportWarning(object source, MessageEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void semanticPredicateEvaluated(object source, SemanticPredicateEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void syntacticPredicateFailed(object source, SyntacticPredicateEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void syntacticPredicateStarted(object source, SyntacticPredicateEventArgs e) { System.Console.Out.WriteLine(indentString + e); } public virtual void syntacticPredicateSucceeded(object source, SyntacticPredicateEventArgs e) { System.Console.Out.WriteLine(indentString + e); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ScannerEventSupport.cs0000755000175000017500000004210510522211615026161 0ustar twernertwernernamespace antlr.debug { using System; using System.Reflection; using Hashtable = System.Collections.Hashtable; using ArrayList = System.Collections.ArrayList; using antlr.collections.impl; /// A class to assist in firing parser events /// NOTE: I intentionally _did_not_ synchronize the event firing and /// add/remove listener methods. This is because the add/remove should /// _only_ be called by the parser at its start/end, and the _same_thread_ /// should be performing the parsing. This should help performance a tad... /// public class ScannerEventSupport { private object source; private Hashtable listeners; private MatchEventArgs matchEvent; private MessageEventArgs messageEvent; private TokenEventArgs tokenEvent; private SemanticPredicateEventArgs semPredEvent; private SyntacticPredicateEventArgs synPredEvent; private TraceEventArgs traceEvent; private NewLineEventArgs newLineEvent; //private ParserController controller; private int ruleDepth = 0; public ScannerEventSupport(object source) { matchEvent = new MatchEventArgs(); messageEvent = new MessageEventArgs(); tokenEvent = new TokenEventArgs(); traceEvent = new TraceEventArgs(); semPredEvent = new SemanticPredicateEventArgs(); synPredEvent = new SyntacticPredicateEventArgs(); newLineEvent = new NewLineEventArgs(); listeners = new Hashtable(); this.source = source; } public virtual void checkController() { //if (controller != null) // controller.checkBreak(); } public virtual void addDoneListener(Listener l) { ((CharScanner)source).Done += new TraceEventHandler(l.doneParsing); listeners[l] = l; } public virtual void addMessageListener(MessageListener l) { ((CharScanner)source).ErrorReported += new MessageEventHandler(l.reportError); ((CharScanner)source).WarningReported += new MessageEventHandler(l.reportWarning); addDoneListener(l); } public virtual void addNewLineListener(NewLineListener l) { ((CharScanner)source).HitNewLine += new NewLineEventHandler(l.hitNewLine); addDoneListener(l); } public virtual void addParserListener(ParserListener l) { if (l is ParserController) { //((ParserController) l).ParserEventSupport = this; //controller = (ParserController) l; } addParserMatchListener(l); addParserTokenListener(l); addMessageListener(l); addTraceListener(l); addSemanticPredicateListener(l); addSyntacticPredicateListener(l); } public virtual void addParserMatchListener(ParserMatchListener l) { ((CharScanner)source).MatchedChar += new MatchEventHandler(l.parserMatch); ((CharScanner)source).MatchedNotChar += new MatchEventHandler(l.parserMatchNot); ((CharScanner)source).MisMatchedChar += new MatchEventHandler(l.parserMismatch); ((CharScanner)source).MisMatchedNotChar += new MatchEventHandler(l.parserMismatchNot); addDoneListener(l); } public virtual void addParserTokenListener(ParserTokenListener l) { ((CharScanner)source).ConsumedChar += new TokenEventHandler(l.parserConsume); ((CharScanner)source).CharLA += new TokenEventHandler(l.parserLA); addDoneListener(l); } public virtual void addSemanticPredicateListener(SemanticPredicateListener l) { ((CharScanner)source).SemPredEvaluated += new SemanticPredicateEventHandler(l.semanticPredicateEvaluated); addDoneListener(l); } public virtual void addSyntacticPredicateListener(SyntacticPredicateListener l) { ((CharScanner)source).SynPredStarted += new SyntacticPredicateEventHandler(l.syntacticPredicateStarted); ((CharScanner)source).SynPredFailed += new SyntacticPredicateEventHandler(l.syntacticPredicateFailed); ((CharScanner)source).SynPredSucceeded += new SyntacticPredicateEventHandler(l.syntacticPredicateSucceeded); addDoneListener(l); } public virtual void addTraceListener(TraceListener l) { ((CharScanner)source).EnterRule += new TraceEventHandler(l.enterRule); ((CharScanner)source).ExitRule += new TraceEventHandler(l.exitRule); addDoneListener(l); } public virtual void fireConsume(int c) { TokenEventHandler eventDelegate = (TokenEventHandler)((CharScanner)source).Events[Parser.LAEventKey]; if (eventDelegate != null) { tokenEvent.setValues(TokenEventArgs.CONSUME, 1, c); eventDelegate(source, tokenEvent); } checkController(); } public virtual void fireDoneParsing() { TraceEventHandler eventDelegate = (TraceEventHandler)((CharScanner)source).Events[Parser.DoneEventKey]; if (eventDelegate != null) { traceEvent.setValues(TraceEventArgs.DONE_PARSING, 0, 0, 0); eventDelegate(source, traceEvent); } checkController(); } public virtual void fireEnterRule(int ruleNum, int guessing, int data) { ruleDepth++; TraceEventHandler eventDelegate = (TraceEventHandler)((CharScanner)source).Events[Parser.EnterRuleEventKey]; if (eventDelegate != null) { traceEvent.setValues(TraceEventArgs.ENTER, ruleNum, guessing, data); eventDelegate(source, traceEvent); } checkController(); } public virtual void fireExitRule(int ruleNum, int guessing, int data) { TraceEventHandler eventDelegate = (TraceEventHandler)((CharScanner)source).Events[Parser.ExitRuleEventKey]; if (eventDelegate != null) { traceEvent.setValues(TraceEventArgs.EXIT, ruleNum, guessing, data); eventDelegate(source, traceEvent); } checkController(); ruleDepth--; if (ruleDepth == 0) fireDoneParsing(); } public virtual void fireLA(int k, int la) { TokenEventHandler eventDelegate = (TokenEventHandler)((CharScanner)source).Events[Parser.LAEventKey]; if (eventDelegate != null) { tokenEvent.setValues(TokenEventArgs.LA, k, la); eventDelegate(source, tokenEvent); } checkController(); } public virtual void fireMatch(char c, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR, c, c, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(char c, BitSet b, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR_BITSET, c, b, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(char c, string target, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR_RANGE, c, target, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(int c, BitSet b, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.BITSET, c, b, text, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(int n, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.TOKEN, n, n, text, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatch(string s, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.STRING, 0, s, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatchNot(char c, char n, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MatchNotEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR, c, n, null, guessing, true, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMatchNot(int c, int n, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MatchNotEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.TOKEN, c, n, text, guessing, true, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(char c, char n, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR, c, n, null, guessing, false, false); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(char c, BitSet b, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR_BITSET, c, b, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(char c, string target, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR_RANGE, c, target, null, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(int i, int n, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.TOKEN, i, n, text, guessing, false, false); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(int i, BitSet b, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.BITSET, i, b, text, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatch(string s, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MisMatchEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.STRING, 0, text, s, guessing, false, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatchNot(char v, char c, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MisMatchNotEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.CHAR, v, c, null, guessing, true, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireMismatchNot(int i, int n, string text, int guessing) { MatchEventHandler eventDelegate = (MatchEventHandler)((CharScanner)source).Events[Parser.MisMatchNotEventKey]; if (eventDelegate != null) { matchEvent.setValues(MatchEventArgs.TOKEN, i, n, text, guessing, true, true); eventDelegate(source, matchEvent); } checkController(); } public virtual void fireNewLine(int line) { NewLineEventHandler eventDelegate = (NewLineEventHandler)((CharScanner)source).Events[Parser.NewLineEventKey]; if (eventDelegate != null) { newLineEvent.Line = line; eventDelegate(source, newLineEvent); } checkController(); } public virtual void fireReportError(System.Exception e) { MessageEventHandler eventDelegate = (MessageEventHandler)((CharScanner)source).Events[Parser.ReportErrorEventKey]; if (eventDelegate != null) { messageEvent.setValues(MessageEventArgs.ERROR, e.ToString()); eventDelegate(source, messageEvent); } checkController(); } public virtual void fireReportError(string s) { MessageEventHandler eventDelegate = (MessageEventHandler)((CharScanner)source).Events[Parser.ReportErrorEventKey]; if (eventDelegate != null) { messageEvent.setValues(MessageEventArgs.ERROR, s); eventDelegate(source, messageEvent); } checkController(); } public virtual void fireReportWarning(string s) { MessageEventHandler eventDelegate = (MessageEventHandler)((CharScanner)source).Events[Parser.ReportWarningEventKey]; if (eventDelegate != null) { messageEvent.setValues(MessageEventArgs.WARNING, s); eventDelegate(source, messageEvent); } checkController(); } public virtual bool fireSemanticPredicateEvaluated(int type, int condition, bool result, int guessing) { SemanticPredicateEventHandler eventDelegate = (SemanticPredicateEventHandler)((CharScanner)source).Events[Parser.SemPredEvaluatedEventKey]; if (eventDelegate != null) { semPredEvent.setValues(type, condition, result, guessing); eventDelegate(source, semPredEvent); } checkController(); return result; } public virtual void fireSyntacticPredicateFailed(int guessing) { SyntacticPredicateEventHandler eventDelegate = (SyntacticPredicateEventHandler)((CharScanner)source).Events[Parser.SynPredFailedEventKey]; if (eventDelegate != null) { synPredEvent.setValues(0, guessing); eventDelegate(source, synPredEvent); } checkController(); } public virtual void fireSyntacticPredicateStarted(int guessing) { SyntacticPredicateEventHandler eventDelegate = (SyntacticPredicateEventHandler)((CharScanner)source).Events[Parser.SynPredStartedEventKey]; if (eventDelegate != null) { synPredEvent.setValues(0, guessing); eventDelegate(source, synPredEvent); } checkController(); } public virtual void fireSyntacticPredicateSucceeded(int guessing) { SyntacticPredicateEventHandler eventDelegate = (SyntacticPredicateEventHandler)((CharScanner)source).Events[Parser.SynPredSucceededEventKey]; if (eventDelegate != null) { synPredEvent.setValues(0, guessing); eventDelegate(source, synPredEvent); } checkController(); } public virtual void refreshListeners() { Hashtable clonedTable; lock(listeners.SyncRoot) { clonedTable = (Hashtable)listeners.Clone(); } foreach (Listener l in clonedTable) { l.refresh(); } } public virtual void removeDoneListener(Listener l) { ((CharScanner)source).Done -= new TraceEventHandler(l.doneParsing); listeners.Remove(l); } public virtual void removeMessageListener(MessageListener l) { ((CharScanner)source).ErrorReported -= new MessageEventHandler(l.reportError); ((CharScanner)source).WarningReported -= new MessageEventHandler(l.reportWarning); removeDoneListener(l); } public virtual void removeNewLineListener(NewLineListener l) { ((CharScanner)source).HitNewLine -= new NewLineEventHandler(l.hitNewLine); removeDoneListener(l); } public virtual void removeParserListener(ParserListener l) { removeParserMatchListener(l); removeMessageListener(l); removeParserTokenListener(l); removeTraceListener(l); removeSemanticPredicateListener(l); removeSyntacticPredicateListener(l); } public virtual void removeParserMatchListener(ParserMatchListener l) { ((CharScanner)source).MatchedChar -= new MatchEventHandler(l.parserMatch); ((CharScanner)source).MatchedNotChar -= new MatchEventHandler(l.parserMatchNot); ((CharScanner)source).MisMatchedChar -= new MatchEventHandler(l.parserMismatch); ((CharScanner)source).MisMatchedNotChar -= new MatchEventHandler(l.parserMismatchNot); removeDoneListener(l); } public virtual void removeParserTokenListener(ParserTokenListener l) { ((CharScanner)source).ConsumedChar -= new TokenEventHandler(l.parserConsume); ((CharScanner)source).CharLA -= new TokenEventHandler(l.parserLA); removeDoneListener(l); } public virtual void removeSemanticPredicateListener(SemanticPredicateListener l) { ((CharScanner)source).SemPredEvaluated -= new SemanticPredicateEventHandler(l.semanticPredicateEvaluated); removeDoneListener(l); } public virtual void removeSyntacticPredicateListener(SyntacticPredicateListener l) { ((CharScanner)source).SynPredStarted -= new SyntacticPredicateEventHandler(l.syntacticPredicateStarted); ((CharScanner)source).SynPredFailed -= new SyntacticPredicateEventHandler(l.syntacticPredicateFailed); ((CharScanner)source).SynPredSucceeded -= new SyntacticPredicateEventHandler(l.syntacticPredicateSucceeded); removeDoneListener(l); } public virtual void removeTraceListener(TraceListener l) { ((CharScanner)source).EnterRule -= new TraceEventHandler(l.enterRule); ((CharScanner)source).ExitRule -= new TraceEventHandler(l.exitRule); removeDoneListener(l); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/InputBufferListener.cs0000755000175000017500000000055410522211615026132 0ustar twernertwernernamespace antlr.debug { using System; public interface InputBufferListener : Listener { void inputBufferConsume (object source, InputBufferEventArgs e); void inputBufferLA (object source, InputBufferEventArgs e); void inputBufferMark (object source, InputBufferEventArgs e); void inputBufferRewind (object source, InputBufferEventArgs e); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/SemanticPredicateEventArgs.cs0000755000175000017500000000172210522211615027374 0ustar twernertwernernamespace antlr.debug { using System; public class SemanticPredicateEventArgs : GuessingEventArgs { public SemanticPredicateEventArgs() { } public SemanticPredicateEventArgs(int type) : base(type) { } public virtual int Condition { get { return this.condition_; } set { this.condition_ = value; } } public virtual bool Result { get { return this.result_; } set { this.result_ = value; } } public const int VALIDATING = 0; public const int PREDICTING = 1; private int condition_; private bool result_; /// This should NOT be called from anyone other than ParserEventSupport! /// internal void setValues(int type, int condition, bool result, int guessing) { base.setValues(type, guessing); this.Condition = condition; this.Result = result; } public override string ToString() { return "SemanticPredicateEvent [" + Condition + "," + Result + "," + Guessing + "]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserMatchListener.cs0000755000175000017500000000051210522211615026104 0ustar twernertwernernamespace antlr.debug { using System; public interface ParserMatchListener : Listener { void parserMatch (object source, MatchEventArgs e); void parserMatchNot (object source, MatchEventArgs e); void parserMismatch (object source, MatchEventArgs e); void parserMismatchNot (object source, MatchEventArgs e); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserMatchListenerBase.cs0000755000175000017500000000335110522211615026703 0ustar twernertwernernamespace antlr.debug { using System; /// /// Provides an abstract base for implementing subclasses. /// /// /// /// This abstract class is provided to make it easier to create s. /// You should extend this base class rather than creating your own. /// /// public abstract class ParserMatchListenerBase : ParserMatchListener { /// /// Handle the "Done" event. /// /// Event source object /// Event data object public virtual void doneParsing(object source, TraceEventArgs e) { } /// /// Handle the "Match" event. /// /// Event source object /// Event data object public virtual void parserMatch(object source, MatchEventArgs e) { } /// /// Handle the "MatchNot" event. /// /// Event source object /// Event data object public virtual void parserMatchNot(object source, MatchEventArgs e) { } /// /// Handle the "MisMatch" event. /// /// Event source object /// Event data object public virtual void parserMismatch(object source, MatchEventArgs e) { } /// /// Handle the "MisMatchNot" event. /// /// Event source object /// Event data object public virtual void parserMismatchNot(object source, MatchEventArgs e) { } public virtual void refresh() { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserTokenListener.cs0000755000175000017500000000031710522211615026133 0ustar twernertwernernamespace antlr.debug { using System; public interface ParserTokenListener : Listener { void parserConsume (object source, TokenEventArgs e); void parserLA (object source, TokenEventArgs e); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserListenerBase.cs0000755000175000017500000001041710522211615025727 0ustar twernertwernernamespace antlr.debug { using System; /// /// Provides an abstract base for implementing subclasses. /// /// /// /// This abstract class is provided to make it easier to create s. /// You should extend this base class rather than creating your own. /// /// public class ParserListenerBase : ParserListener { /// /// Handle the "Done" event. /// /// Event source object /// Event data object public virtual void doneParsing(object source, TraceEventArgs e) { } /// /// Handle the "EnterRule" event /// /// Event source object /// Event data object public virtual void enterRule(object source, TraceEventArgs e) { } /// /// Handle the "ExitRule" event /// /// Event source object /// Event data object public virtual void exitRule(object source, TraceEventArgs e) { } /// /// Handle the "Consume" event. /// /// Event source object /// Event data object public virtual void parserConsume(object source, TokenEventArgs e) { } /// /// Handle the "ParserLA" event. /// /// Event source object /// Event data object public virtual void parserLA(object source, TokenEventArgs e) { } /// /// Handle the "Match" event. /// /// Event source object /// Event data object public virtual void parserMatch(object source, MatchEventArgs e) { } /// /// Handle the "MatchNot" event. /// /// Event source object /// Event data object public virtual void parserMatchNot(object source, MatchEventArgs e) { } /// /// Handle the "MisMatch" event. /// /// Event source object /// Event data object public virtual void parserMismatch(object source, MatchEventArgs e) { } /// /// Handle the "MisMatchNot" event. /// /// Event source object /// Event data object public virtual void parserMismatchNot(object source, MatchEventArgs e) { } /// /// Handle the "ReportError" event. /// /// Event source object /// Event data object public virtual void reportError(object source, MessageEventArgs e) { } /// /// Handle the "ReportWarning" event. /// /// Event source object /// Event data object public virtual void reportWarning(object source, MessageEventArgs e) { } /// /// Handle the "SemPreEvaluated" event. /// /// Event source object /// Event data object public virtual void semanticPredicateEvaluated(object source, SemanticPredicateEventArgs e) { } /// /// Handle the "SynPredFailed" event. /// /// Event source object /// Event data object public virtual void syntacticPredicateFailed(object source, SyntacticPredicateEventArgs e) { } /// /// Handle the "SynPredStarted" event. /// /// Event source object /// Event data object public virtual void syntacticPredicateStarted(object source, SyntacticPredicateEventArgs e) { } /// /// Handle the "SynPredSucceeded" event. /// /// Event source object /// Event data object public virtual void syntacticPredicateSucceeded(object source, SyntacticPredicateEventArgs e) { } public virtual void refresh() { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/SyntacticPredicateEventArgs.cs0000755000175000017500000000051410522211615027570 0ustar twernertwernernamespace antlr.debug { using System; public class SyntacticPredicateEventArgs : GuessingEventArgs { public SyntacticPredicateEventArgs() { } public SyntacticPredicateEventArgs(int type) : base(type) { } public override string ToString() { return "SyntacticPredicateEvent [" + Guessing + "]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/InputBufferListenerBase.cs0000755000175000017500000000340010522211615026716 0ustar twernertwernernamespace antlr.debug { using System; /// /// Provides an abstract base for implementing subclasses. /// /// /// /// This abstract class is provided to make it easier to create s. /// You should extend this base class rather than creating your own. /// /// public abstract class InputBufferListenerBase : InputBufferListener { /// /// Handle the "Done" event. /// /// Event source object /// Event data object public virtual void doneParsing(object source, TraceEventArgs e) { } /// /// Handle the "CharConsumed" event. /// /// Event source object /// Event data object public virtual void inputBufferConsume(object source, InputBufferEventArgs e) { } /// /// Handle the "CharLA" event. /// /// Event source object /// Event data object public virtual void inputBufferLA(object source, InputBufferEventArgs e) { } /// /// Handle the "Mark" event. /// /// Event source object /// Event data object public virtual void inputBufferMark(object source, InputBufferEventArgs e) { } /// /// Handle the "Rewind" event. /// /// Event source object /// Event data object public virtual void inputBufferRewind(object source, InputBufferEventArgs e) { } public virtual void refresh() { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/NewLineListener.cs0000755000175000017500000000022310522211615025233 0ustar twernertwernernamespace antlr.debug { using System; public interface NewLineListener : Listener { void hitNewLine(object source, NewLineEventArgs e); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/InputBufferEventSupport.cs0000755000175000017500000000611110522211615027016 0ustar twernertwernernamespace antlr.debug { using System; using ArrayList = System.Collections.ArrayList; public class InputBufferEventSupport { public virtual ArrayList InputBufferListeners { get { return inputBufferListeners; } } private object source; private ArrayList inputBufferListeners; private InputBufferEventArgs inputBufferEvent; protected internal const int CONSUME = 0; protected internal const int LA = 1; protected internal const int MARK = 2; protected internal const int REWIND = 3; public InputBufferEventSupport(object source) { inputBufferEvent = new InputBufferEventArgs(); this.source = source; } public virtual void addInputBufferListener(InputBufferListener l) { if (inputBufferListeners == null) inputBufferListeners = new ArrayList(); inputBufferListeners.Add(l); } public virtual void fireConsume(char c) { inputBufferEvent.setValues(InputBufferEventArgs.CONSUME, c, 0); fireEvents(CONSUME, inputBufferListeners); } public virtual void fireEvent(int type, Listener l) { switch (type) { case CONSUME: ((InputBufferListener) l).inputBufferConsume(source, inputBufferEvent); break; case LA: ((InputBufferListener) l).inputBufferLA(source, inputBufferEvent); break; case MARK: ((InputBufferListener) l).inputBufferMark(source, inputBufferEvent); break; case REWIND: ((InputBufferListener) l).inputBufferRewind(source, inputBufferEvent); break; default: throw new System.ArgumentException("bad type " + type + " for fireEvent()"); } } public virtual void fireEvents(int type, ArrayList listeners) { ArrayList targets = null; Listener l = null; lock(this) { if (listeners == null) return ; targets = (ArrayList) listeners.Clone(); } if (targets != null) for (int i = 0; i < targets.Count; i++) { l = (Listener) targets[i]; fireEvent(type, l); } } public virtual void fireLA(char c, int la) { inputBufferEvent.setValues(InputBufferEventArgs.LA, c, la); fireEvents(LA, inputBufferListeners); } public virtual void fireMark(int pos) { inputBufferEvent.setValues(InputBufferEventArgs.MARK, ' ', pos); fireEvents(MARK, inputBufferListeners); } public virtual void fireRewind(int pos) { inputBufferEvent.setValues(InputBufferEventArgs.REWIND, ' ', pos); fireEvents(REWIND, inputBufferListeners); } protected internal virtual void refresh(ArrayList listeners) { ArrayList v; lock(listeners) { v = (ArrayList) listeners.Clone(); } if (v != null) for (int i = 0; i < v.Count; i++) ((Listener) v[i]).refresh(); } public virtual void refreshListeners() { refresh(inputBufferListeners); } public virtual void removeInputBufferListener(InputBufferListener l) { if (inputBufferListeners != null) { ArrayList temp_arraylist; object temp_object; temp_arraylist = inputBufferListeners; temp_object = l; temp_arraylist.Contains(temp_object); temp_arraylist.Remove(temp_object); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/SyntacticPredicateListenerBase.cs0000755000175000017500000000314410522211615030254 0ustar twernertwernernamespace antlr.debug { using System; /// /// Provides an abstract base for implementing subclasses. /// /// /// /// This abstract class is provided to make it easier to create s. /// You should extend this base class rather than creating your own. /// /// public abstract class SyntacticPredicateListenerBase : SyntacticPredicateListener { /// /// Handle the "Done" event. /// /// Event source object /// Event data object public virtual void doneParsing(object source, TraceEventArgs e) { } public virtual void refresh() { } /// /// Handle the "SynPredFailed" event. /// /// Event source object /// Event data object public virtual void syntacticPredicateFailed(object source, SyntacticPredicateEventArgs e) { } /// /// Handle the "SynPredStarted" event. /// /// Event source object /// Event data object public virtual void syntacticPredicateStarted(object source, SyntacticPredicateEventArgs e) { } /// /// Handle the "SynPredSucceeded" event. /// /// Event source object /// Event data object public virtual void syntacticPredicateSucceeded(object source, SyntacticPredicateEventArgs e) { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/SyntacticPredicateListener.cs0000755000175000017500000000054110522211615027457 0ustar twernertwernernamespace antlr.debug { using System; public interface SyntacticPredicateListener : Listener { void syntacticPredicateFailed (object source, SyntacticPredicateEventArgs e); void syntacticPredicateStarted (object source, SyntacticPredicateEventArgs e); void syntacticPredicateSucceeded (object source, SyntacticPredicateEventArgs e); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/ParserTokenListenerBase.cs0000755000175000017500000000234710522211615026733 0ustar twernertwernernamespace antlr.debug { using System; /// /// Provides an abstract base for implementing subclasses. /// /// /// /// This abstract class is provided to make it easier to create s. /// You should extend this base class rather than creating your own. /// /// public abstract class ParserTokenListenerBase : ParserTokenListener { /// /// Handle the "Done" event. /// /// Event source object /// Event data object public virtual void doneParsing(object source, TraceEventArgs e) { } public virtual void refresh() { } /// /// Handle the "Consume" event. /// /// Event source object /// Event data object public virtual void parserConsume(object source, TokenEventArgs e) { } /// /// Handle the "ParserLA" event. /// /// Event source object /// Event data object public virtual void parserLA(object source, TokenEventArgs e) { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/Tracer.cs0000755000175000017500000000127010522211615023407 0ustar twernertwernernamespace antlr.debug { using System; public class Tracer : TraceListenerBase, TraceListener { protected string indentString = ""; // TBD: should be StringBuffer protected internal virtual void dedent() { if (indentString.Length < 2) indentString = ""; else indentString = indentString.Substring(2); } public override void enterRule(object source, TraceEventArgs e) { System.Console.Out.WriteLine(indentString + e); indent(); } public override void exitRule(object source, TraceEventArgs e) { dedent(); System.Console.Out.WriteLine(indentString + e); } protected internal virtual void indent() { indentString += " "; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.debug/TraceListener.cs0000755000175000017500000000030410522211615024730 0ustar twernertwernernamespace antlr.debug { using System; public interface TraceListener : Listener { void enterRule (object source, TraceEventArgs e); void exitRule (object source, TraceEventArgs e); } }antlr-2.7.7/lib/csharp/antlr.runtime/Makefile.in0000755000175000017500000002062210522211615021502 0ustar twernertwerner##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stdvars@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ## do not change this value subdir=lib/csharp/antlr.runtime all :: lib lib: @ANTLR_NET@ antlr_cs_FILES = \ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ANTLRException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ANTLRPanicException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ASTFactory.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ASTNodeCreator.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ASTNULLType.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ASTPair.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ASTVisitor.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/BaseAST.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ByteBuffer.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/CharBuffer.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/CharQueue.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/CharScanner.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/CharStreamException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/CharStreamIOException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/CommonAST.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/CommonASTWithHiddenTokens.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/CommonHiddenStreamToken.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/CommonToken.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/DefaultFileLineFormatter.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/DumpASTVisitor.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/FileLineFormatter.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/IHiddenStreamToken.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/InputBuffer.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/IToken.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/LexerSharedInputState.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/LLkParser.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/MismatchedCharException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/MismatchedTokenException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/NoViableAltException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/NoViableAltForCharException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/Parser.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ParserSharedInputState.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ParseTree.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ParseTreeRule.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/ParseTreeToken.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/RecognitionException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/SemanticException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/StringUtils.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenBuffer.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/Token.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenCreator.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenQueue.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenStreamBasicFilter.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenStream.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenStreamException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenStreamHiddenTokenFilter.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenStreamIOException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenStreamRecognitionException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenStreamRetryException.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenStreamRewriteEngine.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenStreamSelector.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TokenWithIndex.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TreeParser.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr/TreeParserSharedInputState.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.collections/AST.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.collections.impl/ASTArray.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.collections.impl/BitSet.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ANTLREventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/DebuggingCharScanner.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/DebuggingInputBuffer.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/DebuggingParser.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/GuessingEventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ICharScannerDebugSubject.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/IDebugSubject.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/InputBufferEventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/InputBufferEventSupport.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/InputBufferListenerBase.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/InputBufferListener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/InputBufferReporter.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/IParserDebugSubject.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/Listener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/LLkDebuggingParser.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/MessageEventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/MessageListenerBase.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/MessageListener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/NewLineEventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/NewLineListener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserController.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserEventSupport.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserListenerBase.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserListener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserMatchEventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserMatchListenerBase.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserMatchListener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserReporter.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserTokenEventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserTokenListenerBase.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParserTokenListener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ParseTreeDebugParser.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/ScannerEventSupport.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/SemanticPredicateEventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/SemanticPredicateListenerBase.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/SemanticPredicateListener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/SyntacticPredicateEventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/SyntacticPredicateListenerBase.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/SyntacticPredicateListener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/TraceEventArgs.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/TraceListenerBase.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/TraceListener.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/antlr.debug/Tracer.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/SupportClass.cs\ @abs_top_srcdir@/lib/csharp/antlr.runtime/AssemblyInfo.cs\ $(eof) clean-target :: @ @RMF@ @ANTLR_NET@ @ANTLR_NET@ :: $(antlr_cs_FILES) @- @RMF@ $@ @ @CSHARP_COMPILE_CMD@ $@ $(antlr_cs_FILES) @ test -f $@ || exit 1 clean :: @RMF@ *.obj *.o *.a *.lib *.so *.dll *~ @ANTLR_NET@ ## use this target if you just want to rebuild the lib without ## compiling again. clean-lib : @RMF@ @ANTLR_NET@ distclean :: clean @RMF@ Makefile install :: @ANTLR_NET@ @$(MKDIR) -p "$(libdir)" @@ECHO@ "install C# core files .. " @for f in @ANTLR_NET@ ; do \ @ECHO@ "install $${f}" ; \ if test -f "$${f}" ; then \ $(INSTALL) -m 444 "$${f}" "$(libdir)" ; \ $(INSTALL) -m 444 "$${f}" "$(datadir)/$(versioneddir)" ; \ fi ;\ done ## dependencies @ANTLR_NET@ :: Makefile @ANTLR_NET@ :: @abs_this_builddir@/scripts/csc.sh ## other dependencies to be listed below @stddeps@ antlr-2.7.7/lib/csharp/antlr.runtime/antlr/0000755000175000017500000000000010522211615020550 5ustar twernertwernerantlr-2.7.7/lib/csharp/antlr.runtime/antlr/CharStreamException.cs0000755000175000017500000000131610522211615025013 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /* * Anything that goes wrong while generating a stream of characters */ [Serializable] public class CharStreamException : ANTLRException { /* * CharStreamException constructor comment. */ public CharStreamException(string s) : base(s) { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ByteBuffer.cs0000755000175000017500000000450710522211615023145 0ustar twernertwernerusing System; using System.Runtime.InteropServices; using Stream = System.IO.Stream; using BinaryReader = System.IO.BinaryReader; using IOException = System.IO.IOException; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A Stream of characters fed to the lexer from a InputStream that can * be rewound via mark()/rewind() methods. *

* A dynamic array is used to buffer up all the input characters. Normally, * "k" characters are stored in the buffer. More characters may be stored during * guess mode (testing syntactic predicate), or when LT(i>k) is referenced. * Consumption of characters is deferred. In other words, reading the next * character is not done by conume(), but deferred until needed by LA or LT. *

*/ // SAS: added this class to handle Binary input w/ FileInputStream public class ByteBuffer:InputBuffer { // char source [NonSerialized()] internal Stream input; private const int BUF_SIZE = 16; ///

/// Small buffer used to avoid reading individual chars /// private byte[] buf = new byte[BUF_SIZE]; /*Create a character buffer */ public ByteBuffer(Stream input_) : base() { input = input_; } /*Ensure that the character buffer is sufficiently full */ override public void fill(int amount) { // try // { syncConsume(); // Fill the buffer sufficiently to hold needed characters int bytesToRead = (amount + markerOffset) - queue.Count; int c; while (bytesToRead > 0) { // Read a few characters c = input.Read(buf, 0, BUF_SIZE); for (int i = 0; i < c; i++) { // Append the next character queue.Add(unchecked((char) buf[i])); } if (c < BUF_SIZE) { while ((bytesToRead-- > 0) && (queue.Count < BUF_SIZE)) { queue.Add(CharScanner.EOF_CHAR); } break; } bytesToRead -= c; } // } // catch (IOException io) // { // throw new CharStreamIOException(io); // } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/IHiddenStreamToken.cs0000644000175000017500000000125610522211615024564 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public interface IHiddenStreamToken : IToken { IHiddenStreamToken getHiddenAfter(); void setHiddenAfter(IHiddenStreamToken t); IHiddenStreamToken getHiddenBefore(); void setHiddenBefore(IHiddenStreamToken t); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/CharStreamIOException.cs0000755000175000017500000000136310522211615025245 0ustar twernertwernerusing System; using IOException = System.IO.IOException; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /* * Wrap an IOException in a CharStreamException */ [Serializable] public class CharStreamIOException : CharStreamException { public IOException io; public CharStreamIOException(IOException io) : base(io.Message) { this.io = io; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/InputBuffer.cs0000755000175000017500000001037210522211615023336 0ustar twernertwernernamespace antlr { using System; using ArrayList = System.Collections.ArrayList; using StringBuilder = System.Text.StringBuilder; /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // // SAS: Added this class to genericise the input buffers for scanners // This allows a scanner to use a binary (FileInputStream) or // text (FileReader) stream of data; the generated scanner // subclass will define the input stream // There are two subclasses to this: CharBuffer and ByteBuffer /// /// Represents a stream of characters fed to the lexer from that can be rewound /// via mark()/rewind() methods. /// /// /// /// A dynamic array is used to buffer up all the input characters. Normally, /// "k" characters are stored in the buffer. More characters may be stored /// during guess mode (testing syntactic predicate), or when LT(i>k) is referenced. /// Consumption of characters is deferred. In other words, reading the next /// character is not done by conume(), but deferred until needed by LA or LT. /// /// public abstract class InputBuffer { // Number of active markers protected internal int nMarkers = 0; // Additional offset used when markers are active protected internal int markerOffset = 0; // Number of calls to consume() since last LA() or LT() call protected internal int numToConsume = 0; // Circular queue protected ArrayList queue; /*Create an input buffer */ public InputBuffer() { queue = new ArrayList(); } /*This method updates the state of the input buffer so that * the text matched since the most recent mark() is no longer * held by the buffer. So, you either do a mark/rewind for * failed predicate or mark/commit to keep on parsing without * rewinding the input. */ public virtual void commit() { nMarkers--; } /*Mark another character for deferred consumption */ public virtual char consume() { numToConsume++; return LA(1); } /*Ensure that the input buffer is sufficiently full */ public abstract void fill(int amount); public virtual string getLAChars() { StringBuilder la = new StringBuilder(); // copy buffer contents to array before looping thru contents (it's usually faster) char[] fastBuf = new char[queue.Count-markerOffset]; queue.CopyTo(fastBuf, markerOffset); la.Append(fastBuf); return la.ToString(); } public virtual string getMarkedChars() { StringBuilder marked = new StringBuilder(); // copy buffer contents to array before looping thru contents (it's usually faster) char[] fastBuf = new char[queue.Count-markerOffset]; queue.CopyTo(fastBuf, markerOffset); marked.Append(fastBuf); return marked.ToString(); } public virtual bool isMarked() { return (nMarkers != 0); } /*Get a lookahead character */ public virtual char LA(int i) { fill(i); return (char) queue[markerOffset + i - 1]; } /*Return an integer marker that can be used to rewind the buffer to * its current state. */ public virtual int mark() { syncConsume(); nMarkers++; return markerOffset; } /*Rewind the character buffer to a marker. * @param mark Marker returned previously from mark() */ public virtual void rewind(int mark) { syncConsume(); markerOffset = mark; nMarkers--; } /*Reset the input buffer */ public virtual void reset() { nMarkers = 0; markerOffset = 0; numToConsume = 0; queue.Clear(); } /*Sync up deferred consumption */ protected internal virtual void syncConsume() { if (numToConsume > 0) { if (nMarkers > 0) { // guess mode -- leave leading characters and bump offset. markerOffset += numToConsume; } else { // normal mode -- remove "consumed" characters from buffer queue.RemoveRange(0, numToConsume); } numToConsume = 0; } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ANTLRException.cs0000755000175000017500000000127210522211615023643 0ustar twernertwernernamespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // using System; [Serializable] public class ANTLRException : Exception { public ANTLRException() : base() { } public ANTLRException(string s) : base(s) { } public ANTLRException(string s, Exception inner) : base(s, inner) { } } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenStreamHiddenTokenFilter.cs0000755000175000017500000001125010522211615026620 0ustar twernertwernerusing System; using BitSet = antlr.collections.impl.BitSet; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*This object filters a token stream coming from a lexer * or another TokenStream so that only certain token channels * get transmitted to the parser. * * Any of the channels can be filtered off as "hidden" channels whose * tokens can be accessed from the parser. */ public class TokenStreamHiddenTokenFilter : TokenStreamBasicFilter, TokenStream { // protected BitSet discardMask; protected internal BitSet hideMask; private IHiddenStreamToken nextMonitoredToken; /*track tail of hidden list emanating from previous * monitored token */ protected internal IHiddenStreamToken lastHiddenToken; protected internal IHiddenStreamToken firstHidden = null; public TokenStreamHiddenTokenFilter(TokenStream input) : base(input) { hideMask = new BitSet(); } protected internal virtual void consume() { nextMonitoredToken = (IHiddenStreamToken) input.nextToken(); } private void consumeFirst() { consume(); // get first token of input stream // Handle situation where hidden or discarded tokens // appear first in input stream IHiddenStreamToken p = null; // while hidden or discarded scarf tokens while (hideMask.member(LA(1).Type) || discardMask.member(LA(1).Type)) { if (hideMask.member(LA(1).Type)) { if (p == null) { p = LA(1); } else { p.setHiddenAfter(LA(1)); LA(1).setHiddenBefore(p); // double-link p = LA(1); } lastHiddenToken = p; if (firstHidden == null) { firstHidden = p; // record hidden token if first } } consume(); } } public virtual BitSet getDiscardMask() { return discardMask; } /*Return a ptr to the hidden token appearing immediately after * token t in the input stream. */ public virtual IHiddenStreamToken getHiddenAfter(IHiddenStreamToken t) { return t.getHiddenAfter(); } /*Return a ptr to the hidden token appearing immediately before * token t in the input stream. */ public virtual IHiddenStreamToken getHiddenBefore(IHiddenStreamToken t) { return t.getHiddenBefore(); } public virtual BitSet getHideMask() { return hideMask; } /*Return the first hidden token if one appears * before any monitored token. */ public virtual IHiddenStreamToken getInitialHiddenToken() { return firstHidden; } public virtual void hide(int m) { hideMask.add(m); } public virtual void hide(BitSet mask) { hideMask = mask; } protected internal virtual IHiddenStreamToken LA(int i) { return nextMonitoredToken; } /*Return the next monitored token. * Test the token following the monitored token. * If following is another monitored token, save it * for the next invocation of nextToken (like a single * lookahead token) and return it then. * If following is unmonitored, nondiscarded (hidden) * channel token, add it to the monitored token. * * Note: EOF must be a monitored Token. */ override public IToken nextToken() { // handle an initial condition; don't want to get lookahead // token of this splitter until first call to nextToken if (LA(1) == null) { consumeFirst(); } // we always consume hidden tokens after monitored, thus, // upon entry LA(1) is a monitored token. IHiddenStreamToken monitored = LA(1); // point to hidden tokens found during last invocation monitored.setHiddenBefore(lastHiddenToken); lastHiddenToken = null; // Look for hidden tokens, hook them into list emanating // from the monitored tokens. consume(); IHiddenStreamToken p = monitored; // while hidden or discarded scarf tokens while (hideMask.member(LA(1).Type) || discardMask.member(LA(1).Type)) { if (hideMask.member(LA(1).Type)) { // attach the hidden token to the monitored in a chain // link forwards p.setHiddenAfter(LA(1)); // link backwards if (p != monitored) { //hidden cannot point to monitored tokens LA(1).setHiddenBefore(p); } p = (lastHiddenToken = LA(1)); } consume(); } return monitored; } public virtual void resetState() { firstHidden = null; lastHiddenToken = null; nextMonitoredToken = null; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/BaseAST.cs0000755000175000017500000003346310522211615022335 0ustar twernertwernerusing System; using StringBuilder = System.Text.StringBuilder; using ISerializable = System.Runtime.Serialization.ISerializable; using TextWriter = System.IO.TextWriter; using ArrayList = System.Collections.ArrayList; using IEnumerator = System.Collections.IEnumerator; using AST = antlr.collections.AST; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /* * A Child-Sibling Tree. * * A tree with PLUS at the root and with two children 3 and 4 is * structured as: * * PLUS * | * 3 -- 4 * * and can be specified easily in LISP notation as * * (PLUS 3 4) * * where every '(' starts a new subtree. * * These trees are particular useful for translators because of * the flexibility of the children lists. They are also very easy * to walk automatically, whereas trees with specific children * reference fields can't easily be walked automatically. * * This class contains the basic support for an AST. * Most people will create ASTs that are subclasses of * BaseAST or of CommonAST. */ [Serializable()] public abstract class BaseAST : AST { protected internal BaseAST down; protected internal BaseAST right; private static bool verboseStringConversion = false; private static string[] tokenNames = null; /*Add a node to the end of the child list for this node */ public virtual void addChild(AST node) { if (node == null) return ; BaseAST t = this.down; if (t != null) { while (t.right != null) { t = t.right; } t.right = (BaseAST) node; } else { this.down = (BaseAST) node; } } private void doWorkForFindAll(ArrayList v, AST target, bool partialMatch) { AST sibling; // Start walking sibling lists, looking for matches. //siblingWalk: for (sibling = this; sibling != null; sibling = sibling.getNextSibling()) { if ((partialMatch && sibling.EqualsTreePartial(target)) || (!partialMatch && sibling.EqualsTree(target))) { v.Add(sibling); } // regardless of match or not, check any children for matches if (sibling.getFirstChild() != null) { ((BaseAST) sibling.getFirstChild()).doWorkForFindAll(v, target, partialMatch); } } } public override bool Equals(object obj) { if (obj == null) return false; if (this.GetType() != obj.GetType()) return false; return Equals((AST)obj); } /*Is node t equal to this in terms of token type and text? */ public virtual bool Equals(AST t) { if (t == null) return false; return (Object.Equals(this.getText(), t.getText())) && (this.Type == t.Type); } /*Is t an exact structural and equals() match of this tree. The * 'this' reference is considered the start of a sibling list. */ public virtual bool EqualsList(AST t) { AST sibling; // the empty tree is not a match of any non-null tree. if (t == null) { return false; } // Otherwise, start walking sibling lists. First mismatch, return false. for (sibling = this; sibling != null && t != null; sibling = sibling.getNextSibling(), t = t.getNextSibling()) { // as a quick optimization, check roots first. if (!sibling.Equals(t)) { return false; } // if roots match, do full list match test on children. if (sibling.getFirstChild() != null) { if (!sibling.getFirstChild().EqualsList(t.getFirstChild())) { return false; } } else if (t.getFirstChild() != null) { return false; } } if (sibling == null && t == null) { return true; } // one sibling list has more than the other return false; } /*Is 'sub' a subtree of this list? * The siblings of the root are NOT ignored. */ public virtual bool EqualsListPartial(AST sub) { AST sibling; // the empty tree is always a subset of any tree. if (sub == null) { return true; } // Otherwise, start walking sibling lists. First mismatch, return false. for (sibling = this; sibling != null && sub != null; sibling = sibling.getNextSibling(), sub = sub.getNextSibling()) { // as a quick optimization, check roots first. if (!sibling.Equals(sub)) return false; // if roots match, do partial list match test on children. if (sibling.getFirstChild() != null) { if (!sibling.getFirstChild().EqualsListPartial(sub.getFirstChild())) return false; } } if (sibling == null && sub != null) { // nothing left to match in this tree, but subtree has more return false; } // either both are null or sibling has more, but subtree doesn't return true; } /*Is tree rooted at 'this' equal to 't'? The siblings * of 'this' are ignored. */ public virtual bool EqualsTree(AST t) { // check roots first. if (!this.Equals(t)) return false; // if roots match, do full list match test on children. if (this.getFirstChild() != null) { if (!this.getFirstChild().EqualsList(t.getFirstChild())) return false; } else if (t.getFirstChild() != null) { return false; } return true; } /*Is 't' a subtree of the tree rooted at 'this'? The siblings * of 'this' are ignored. */ public virtual bool EqualsTreePartial(AST sub) { // the empty tree is always a subset of any tree. if (sub == null) { return true; } // check roots first. if (!this.Equals(sub)) return false; // if roots match, do full list partial match test on children. if (this.getFirstChild() != null) { if (!this.getFirstChild().EqualsListPartial(sub.getFirstChild())) return false; } return true; } /*Walk the tree looking for all exact subtree matches. Return * an IEnumerator that lets the caller walk the list * of subtree roots found herein. */ public virtual IEnumerator findAll(AST target) { ArrayList roots = new ArrayList(10); //AST sibling; // the empty tree cannot result in an enumeration if (target == null) { return null; } doWorkForFindAll(roots, target, false); // find all matches recursively return roots.GetEnumerator(); } /*Walk the tree looking for all subtrees. Return * an IEnumerator that lets the caller walk the list * of subtree roots found herein. */ public virtual IEnumerator findAllPartial(AST sub) { ArrayList roots = new ArrayList(10); //AST sibling; // the empty tree cannot result in an enumeration if (sub == null) { return null; } doWorkForFindAll(roots, sub, true); // find all matches recursively return roots.GetEnumerator(); } /*Get the first child of this node; null if not children */ public virtual AST getFirstChild() { return down; } /*Get the next sibling in line after this one */ public virtual AST getNextSibling() { return right; } /*Get the token text for this node */ public virtual string getText() { return ""; } /*Get the token type for this node */ public virtual int Type { get { return 0; } set { ; } } /// /// Get number of children of this node; if leaf, returns 0 /// /// Number of children public int getNumberOfChildren() { BaseAST t = this.down; int n = 0; if (t != null) { n = 1; while (t.right != null) { t = t.right; n++; } } return n; } public abstract void initialize(int t, string txt); public abstract void initialize(AST t); public abstract void initialize(IToken t); /*Remove all children */ public virtual void removeChildren() { down = null; } public virtual void setFirstChild(AST c) { down = (BaseAST) c; } public virtual void setNextSibling(AST n) { right = (BaseAST) n; } /*Set the token text for this node */ public virtual void setText(string text) { ; } /*Set the token type for this node */ public virtual void setType(int ttype) { this.Type = ttype; } public static void setVerboseStringConversion(bool verbose, string[] names) { verboseStringConversion = verbose; tokenNames = names; } override public string ToString() { StringBuilder b = new StringBuilder(); // if verbose and type name not same as text (keyword probably) if (verboseStringConversion && (0 != String.Compare(getText(), (tokenNames[Type]), true)) && (0 != String.Compare(getText(), StringUtils.stripFrontBack(tokenNames[Type], @"""", @""""), true))) { b.Append('['); b.Append(getText()); b.Append(",<"); b.Append(tokenNames[Type]); b.Append(">]"); return b.ToString(); } return getText(); } /*Print out a child-sibling tree in LISP notation */ public virtual string ToStringList() { AST t = this; string ts = ""; if (t.getFirstChild() != null) ts += " ("; ts += " " + this.ToString(); if (t.getFirstChild() != null) { ts += ((BaseAST) t.getFirstChild()).ToStringList(); } if (t.getFirstChild() != null) ts += " )"; if (t.getNextSibling() != null) { ts += ((BaseAST) t.getNextSibling()).ToStringList(); } return ts; } public virtual string ToStringTree() { AST t = this; string ts = ""; if (t.getFirstChild() != null) { ts += " ("; } ts += " " + this.ToString(); if (t.getFirstChild() != null) { ts += ((BaseAST) t.getFirstChild()).ToStringList(); } if (t.getFirstChild() != null) { ts += " )"; } return ts; } public virtual string ToTree() { return ToTree(string.Empty); } public virtual string ToTree(string prefix) { StringBuilder sb = new StringBuilder(prefix); // Replace vertical bar if there is no next sibling. if ( (getNextSibling() == null) ) sb.Append("+--"); else sb.Append("|--"); sb.Append( ToString() ); sb.Append( Environment.NewLine ); if ( getFirstChild() != null ) { // Replace vertical bar if there is no next sibling. if ( getNextSibling() == null ) sb.Append( ((BaseAST) getFirstChild()).ToTree(prefix + " ") ); else sb.Append( ((BaseAST) getFirstChild()).ToTree(prefix + "| ") ); } if ( getNextSibling() != null ) sb.Append( ((BaseAST) getNextSibling()).ToTree(prefix) ); return sb.ToString(); } public static string decode(string text) { char c, c1, c2, c3, c4, c5; StringBuilder n = new StringBuilder(); for (int i = 0; i < text.Length; i++) { c = text[i]; if (c == '&') { c1 = text[i + 1]; c2 = text[i + 2]; c3 = text[i + 3]; c4 = text[i + 4]; c5 = text[i + 5]; if (c1 == 'a' && c2 == 'm' && c3 == 'p' && c4 == ';') { n.Append("&"); i += 5; } else if (c1 == 'l' && c2 == 't' && c3 == ';') { n.Append("<"); i += 4; } else if (c1 == 'g' && c2 == 't' && c3 == ';') { n.Append(">"); i += 4; } else if (c1 == 'q' && c2 == 'u' && c3 == 'o' && c4 == 't' && c5 == ';') { n.Append("\""); i += 6; } else if (c1 == 'a' && c2 == 'p' && c3 == 'o' && c4 == 's' && c5 == ';') { n.Append("'"); i += 6; } else n.Append("&"); } else n.Append(c); } return n.ToString(); } public static string encode(string text) { char c; StringBuilder n = new StringBuilder(); for (int i = 0; i < text.Length; i++) { c = text[i]; switch (c) { case '&': { n.Append("&"); break; } case '<': { n.Append("<"); break; } case '>': { n.Append(">"); break; } case '"': { n.Append("""); break; } case '\'': { n.Append("'"); break; } default: { n.Append(c); break; } } } return n.ToString(); } public virtual void xmlSerializeNode(TextWriter outWriter) { StringBuilder buf = new StringBuilder(100); buf.Append("<"); buf.Append(GetType().FullName + " "); buf.Append("text=\"" + encode(getText()) + "\" type=\"" + Type + "\"/>"); outWriter.Write(buf.ToString()); } public virtual void xmlSerializeRootOpen(TextWriter outWriter) { StringBuilder buf = new StringBuilder(100); buf.Append("<"); buf.Append(GetType().FullName + " "); buf.Append("text=\"" + encode(getText()) + "\" type=\"" + Type + "\">\n"); outWriter.Write(buf.ToString()); } public virtual void xmlSerializeRootClose(TextWriter outWriter) { outWriter.Write("\n"); } public virtual void xmlSerialize(TextWriter outWriter) { // print out this node and all siblings for (AST node = this; node != null; node = node.getNextSibling()) { if (node.getFirstChild() == null) { // print guts (class name, attributes) ((BaseAST) node).xmlSerializeNode(outWriter); } else { ((BaseAST) node).xmlSerializeRootOpen(outWriter); // print children ((BaseAST) node.getFirstChild()).xmlSerialize(outWriter); // print end tag ((BaseAST) node).xmlSerializeRootClose(outWriter); } } } #region Implementation of ICloneable [Obsolete("Deprecated since version 2.7.2. Use ASTFactory.dup() instead.", false)] public virtual object Clone() { return MemberwiseClone(); } #endregion public override Int32 GetHashCode() { return base.GetHashCode(); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ParseTree.cs0000644000175000017500000000365110522211615022776 0ustar twernertwernernamespace antlr { /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // using System; using StringBuilder = System.Text.StringBuilder; using AST = antlr.collections.AST; public abstract class ParseTree : BaseAST { /// /// Walk parse tree and return requested number of derivation steps. /// If steps less-than 0, return node text. If steps equals 1, return derivation /// string at step. /// /// derivation steps /// public string getLeftmostDerivationStep(int step) { if ( step <= 0 ) { return ToString(); } StringBuilder buf = new StringBuilder (2000); getLeftmostDerivation(buf, step); return buf.ToString(); } public string getLeftmostDerivation(int maxSteps) { StringBuilder buf = new StringBuilder(2000); buf.Append(" " + this.ToString()); buf.Append("\n"); for (int d=1; d < maxSteps; d++) { buf.Append(" =>"); buf.Append(getLeftmostDerivationStep(d)); buf.Append("\n"); } return buf.ToString(); } /// /// Get derivation and return how many you did (less than requested for /// subtree roots. /// /// string buffer /// derivation steps /// protected internal abstract int getLeftmostDerivation(StringBuilder buf, int step); // just satisfy BaseAST interface; unused as we manually create nodes public override void initialize(int i, string s) { } public override void initialize(AST ast) { } public override void initialize(IToken token) { } } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr/LLkParser.cs0000755000175000017500000000425710522211615022751 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*An LL(k) parser. * * @see antlr.Token * @see antlr.TokenBuffer * @see antlr.LL1Parser */ public class LLkParser : Parser { internal int k; public LLkParser(int k_) { k = k_; } public LLkParser(ParserSharedInputState state, int k_) { k = k_; inputState = state; } public LLkParser(TokenBuffer tokenBuf, int k_) { k = k_; setTokenBuffer(tokenBuf); } public LLkParser(TokenStream lexer, int k_) { k = k_; TokenBuffer tokenBuf = new TokenBuffer(lexer); setTokenBuffer(tokenBuf); } /*Consume another token from the input stream. Can only write sequentially! * If you need 3 tokens ahead, you must consume() 3 times. *

* Note that it is possible to overwrite tokens that have not been matched. * For example, calling consume() 3 times when k=2, means that the first token * consumed will be overwritten with the 3rd. */ override public void consume() { inputState.input.consume(); } override public int LA(int i) { return inputState.input.LA(i); } override public IToken LT(int i) { return inputState.input.LT(i); } private void trace(string ee, string rname) { traceIndent(); Console.Out.Write(ee + rname + ((inputState.guessing > 0)?"; [guessing]":"; ")); for (int i = 1; i <= k; i++) { if (i != 1) { Console.Out.Write(", "); } if ( LT(i)!=null ) { Console.Out.Write("LA(" + i + ")==" + LT(i).getText()); } else { Console.Out.Write("LA(" + i + ")==ull"); } } Console.Out.WriteLine(""); } override public void traceIn(string rname) { traceDepth += 1; trace("> ", rname); } override public void traceOut(string rname) { trace("< ", rname); traceDepth -= 1; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TreeParserSharedInputState.cs0000755000175000017500000000150110522211615026323 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*This object contains the data associated with an * input AST. Multiple parsers * share a single TreeParserSharedInputState to parse * the same tree or to have the parser walk multiple * trees. */ public class TreeParserSharedInputState { /*Are we guessing (guessing>0)? */ public int guessing = 0; public virtual void reset() { guessing = 0; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ASTPair.cs0000755000175000017500000000303710522211615022350 0ustar twernertwernernamespace antlr { using System; using Queue = System.Collections.Queue; using AST = antlr.collections.AST; /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*ASTPair: utility class used for manipulating a pair of ASTs * representing the current AST root and current AST sibling. * This exists to compensate for the lack of pointers or 'var' * arguments in Java. */ public struct ASTPair { public AST root; // current root of tree public AST child; // current child to which siblings are added /*Make sure that child is the last sibling */ public void advanceChildToEnd() { if (child != null) { while (child.getNextSibling() != null) { child = child.getNextSibling(); } } } /*Copy an ASTPair. Don't call it clone() because we want type-safety */ public ASTPair copy() { ASTPair tmp = new ASTPair(); tmp.root = root; tmp.child = child; return tmp; } private void reset() { root = null; child = null; } override public string ToString() { string r = (root == null) ? "null" : root.getText(); string c = (child == null) ? "null" : child.getText(); return "[" + r + "," + c + "]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/Parser.cs0000755000175000017500000003665110522211615022351 0ustar twernertwernerusing System; using EventHandlerList = System.ComponentModel.EventHandlerList; using BitSet = antlr.collections.impl.BitSet; using AST = antlr.collections.AST; using ASTArray = antlr.collections.impl.ASTArray; using antlr.debug; using MessageListener = antlr.debug.MessageListener; using ParserListener = antlr.debug.ParserListener; using ParserMatchListener = antlr.debug.ParserMatchListener; using ParserTokenListener = antlr.debug.ParserTokenListener; using SemanticPredicateListener = antlr.debug.SemanticPredicateListener; using SyntacticPredicateListener = antlr.debug.SyntacticPredicateListener; using TraceListener = antlr.debug.TraceListener; /* private Vector messageListeners; private Vector newLineListeners; private Vector matchListeners; private Vector tokenListeners; private Vector semPredListeners; private Vector synPredListeners; private Vector traceListeners; */ namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public abstract class Parser : IParserDebugSubject { // Used to store event delegates private EventHandlerList events_ = new EventHandlerList(); protected internal EventHandlerList Events { get { return events_; } } // The unique keys for each event that Parser [objects] can generate internal static readonly object EnterRuleEventKey = new object(); internal static readonly object ExitRuleEventKey = new object(); internal static readonly object DoneEventKey = new object(); internal static readonly object ReportErrorEventKey = new object(); internal static readonly object ReportWarningEventKey = new object(); internal static readonly object NewLineEventKey = new object(); internal static readonly object MatchEventKey = new object(); internal static readonly object MatchNotEventKey = new object(); internal static readonly object MisMatchEventKey = new object(); internal static readonly object MisMatchNotEventKey = new object(); internal static readonly object ConsumeEventKey = new object(); internal static readonly object LAEventKey = new object(); internal static readonly object SemPredEvaluatedEventKey = new object(); internal static readonly object SynPredStartedEventKey = new object(); internal static readonly object SynPredFailedEventKey = new object(); internal static readonly object SynPredSucceededEventKey = new object(); protected internal ParserSharedInputState inputState; /*Nesting level of registered handlers */ // protected int exceptionLevel = 0; /*Table of token type to token names */ protected internal string[] tokenNames; /*AST return value for a rule is squirreled away here */ protected internal AST returnAST; /*AST support code; parser and treeparser delegate to this object */ protected internal ASTFactory astFactory = new ASTFactory(); private bool ignoreInvalidDebugCalls = false; /*Used to keep track of indentdepth for traceIn/Out */ protected internal int traceDepth = 0; public Parser() { inputState = new ParserSharedInputState(); } public Parser(ParserSharedInputState state) { inputState = state; } ///

/// /// public event TraceEventHandler EnterRule { add { Events.AddHandler(EnterRuleEventKey, value); } remove { Events.RemoveHandler(EnterRuleEventKey, value); } } public event TraceEventHandler ExitRule { add { Events.AddHandler(ExitRuleEventKey, value); } remove { Events.RemoveHandler(ExitRuleEventKey, value); } } public event TraceEventHandler Done { add { Events.AddHandler(DoneEventKey, value); } remove { Events.RemoveHandler(DoneEventKey, value); } } public event MessageEventHandler ErrorReported { add { Events.AddHandler(ReportErrorEventKey, value); } remove { Events.RemoveHandler(ReportErrorEventKey, value); } } public event MessageEventHandler WarningReported { add { Events.AddHandler(ReportWarningEventKey, value); } remove { Events.RemoveHandler(ReportWarningEventKey, value); } } public event MatchEventHandler MatchedToken { add { Events.AddHandler(MatchEventKey, value); } remove { Events.RemoveHandler(MatchEventKey, value); } } public event MatchEventHandler MatchedNotToken { add { Events.AddHandler(MatchNotEventKey, value); } remove { Events.RemoveHandler(MatchNotEventKey, value); } } public event MatchEventHandler MisMatchedToken { add { Events.AddHandler(MisMatchEventKey, value); } remove { Events.RemoveHandler(MisMatchEventKey, value); } } public event MatchEventHandler MisMatchedNotToken { add { Events.AddHandler(MisMatchNotEventKey, value); } remove { Events.RemoveHandler(MisMatchNotEventKey, value); } } public event TokenEventHandler ConsumedToken { add { Events.AddHandler(ConsumeEventKey, value); } remove { Events.RemoveHandler(ConsumeEventKey, value); } } public event TokenEventHandler TokenLA { add { Events.AddHandler(LAEventKey, value); } remove { Events.RemoveHandler(LAEventKey, value); } } public event SemanticPredicateEventHandler SemPredEvaluated { add { Events.AddHandler(SemPredEvaluatedEventKey, value); } remove { Events.RemoveHandler(SemPredEvaluatedEventKey, value); } } public event SyntacticPredicateEventHandler SynPredStarted { add { Events.AddHandler(SynPredStartedEventKey, value); } remove { Events.RemoveHandler(SynPredStartedEventKey, value); } } public event SyntacticPredicateEventHandler SynPredFailed { add { Events.AddHandler(SynPredFailedEventKey, value); } remove { Events.RemoveHandler(SynPredFailedEventKey, value); } } public event SyntacticPredicateEventHandler SynPredSucceeded { add { Events.AddHandler(SynPredSucceededEventKey, value); } remove { Events.RemoveHandler(SynPredSucceededEventKey, value); } } public virtual void addMessageListener(MessageListener l) { if (!ignoreInvalidDebugCalls) throw new System.ArgumentException("addMessageListener() is only valid if parser built for debugging"); } public virtual void addParserListener(ParserListener l) { if (!ignoreInvalidDebugCalls) throw new System.ArgumentException("addParserListener() is only valid if parser built for debugging"); } public virtual void addParserMatchListener(ParserMatchListener l) { if (!ignoreInvalidDebugCalls) throw new System.ArgumentException("addParserMatchListener() is only valid if parser built for debugging"); } public virtual void addParserTokenListener(ParserTokenListener l) { if (!ignoreInvalidDebugCalls) throw new System.ArgumentException("addParserTokenListener() is only valid if parser built for debugging"); } public virtual void addSemanticPredicateListener(SemanticPredicateListener l) { if (!ignoreInvalidDebugCalls) throw new System.ArgumentException("addSemanticPredicateListener() is only valid if parser built for debugging"); } public virtual void addSyntacticPredicateListener(SyntacticPredicateListener l) { if (!ignoreInvalidDebugCalls) throw new System.ArgumentException("addSyntacticPredicateListener() is only valid if parser built for debugging"); } public virtual void addTraceListener(TraceListener l) { if (!ignoreInvalidDebugCalls) throw new System.ArgumentException("addTraceListener() is only valid if parser built for debugging"); } /*Get another token object from the token stream */ public abstract void consume(); /*Consume tokens until one matches the given token */ public virtual void consumeUntil(int tokenType) { while (LA(1) != Token.EOF_TYPE && LA(1) != tokenType) { consume(); } } /*Consume tokens until one matches the given token set */ public virtual void consumeUntil(BitSet bset) { while (LA(1) != Token.EOF_TYPE && !bset.member(LA(1))) { consume(); } } protected internal virtual void defaultDebuggingSetup(TokenStream lexer, TokenBuffer tokBuf) { // by default, do nothing -- we're not debugging } /*Get the AST return value squirreled away in the parser */ public virtual AST getAST() { return returnAST; } public virtual ASTFactory getASTFactory() { return astFactory; } public virtual string getFilename() { return inputState.filename; } public virtual ParserSharedInputState getInputState() { return inputState; } public virtual void setInputState(ParserSharedInputState state) { inputState = state; } public virtual void resetState() { traceDepth = 0; inputState.reset(); } public virtual string getTokenName(int num) { return tokenNames[num]; } public virtual string[] getTokenNames() { return tokenNames; } public virtual bool isDebugMode() { return false; } /*Return the token type of the ith token of lookahead where i=1 * is the current token being examined by the parser (i.e., it * has not been matched yet). */ public abstract int LA(int i); /*Return the ith token of lookahead */ public abstract IToken LT(int i); // Forwarded to TokenBuffer public virtual int mark() { return inputState.input.mark(); } /*Make sure current lookahead symbol matches token type t. * Throw an exception upon mismatch, which is catch by either the * error handler or by the syntactic predicate. */ public virtual void match(int t) { if (LA(1) != t) throw new MismatchedTokenException(tokenNames, LT(1), t, false, getFilename()); else consume(); } /*Make sure current lookahead symbol matches the given set * Throw an exception upon mismatch, which is catch by either the * error handler or by the syntactic predicate. */ public virtual void match(BitSet b) { if (!b.member(LA(1))) throw new MismatchedTokenException(tokenNames, LT(1), b, false, getFilename()); else consume(); } public virtual void matchNot(int t) { if (LA(1) == t) throw new MismatchedTokenException(tokenNames, LT(1), t, true, getFilename()); else consume(); } /// /// @deprecated as of 2.7.2. This method calls System.exit() and writes /// directly to stderr, which is usually not appropriate when /// a parser is embedded into a larger application. Since the method is /// static, it cannot be overridden to avoid these problems. /// ANTLR no longer uses this method internally or in generated code. /// /// [Obsolete("De-activated since version 2.7.2.6 as it cannot be overidden.", true)] public static void panic() { System.Console.Error.WriteLine("Parser: panic"); System.Environment.Exit(1); } public virtual void removeMessageListener(MessageListener l) { if (!ignoreInvalidDebugCalls) throw new System.SystemException("removeMessageListener() is only valid if parser built for debugging"); } public virtual void removeParserListener(ParserListener l) { if (!ignoreInvalidDebugCalls) throw new System.SystemException("removeParserListener() is only valid if parser built for debugging"); } public virtual void removeParserMatchListener(ParserMatchListener l) { if (!ignoreInvalidDebugCalls) throw new System.SystemException("removeParserMatchListener() is only valid if parser built for debugging"); } public virtual void removeParserTokenListener(ParserTokenListener l) { if (!ignoreInvalidDebugCalls) throw new System.SystemException("removeParserTokenListener() is only valid if parser built for debugging"); } public virtual void removeSemanticPredicateListener(SemanticPredicateListener l) { if (!ignoreInvalidDebugCalls) throw new System.ArgumentException("removeSemanticPredicateListener() is only valid if parser built for debugging"); } public virtual void removeSyntacticPredicateListener(SyntacticPredicateListener l) { if (!ignoreInvalidDebugCalls) throw new System.ArgumentException("removeSyntacticPredicateListener() is only valid if parser built for debugging"); } public virtual void removeTraceListener(TraceListener l) { if (!ignoreInvalidDebugCalls) throw new System.SystemException("removeTraceListener() is only valid if parser built for debugging"); } /*Parser error-reporting function can be overridden in subclass */ public virtual void reportError(RecognitionException ex) { Console.Error.WriteLine(ex); } /*Parser error-reporting function can be overridden in subclass */ public virtual void reportError(string s) { if (getFilename() == null) { Console.Error.WriteLine("error: " + s); } else { Console.Error.WriteLine(getFilename() + ": error: " + s); } } /*Parser warning-reporting function can be overridden in subclass */ public virtual void reportWarning(string s) { if (getFilename() == null) { Console.Error.WriteLine("warning: " + s); } else { Console.Error.WriteLine(getFilename() + ": warning: " + s); } } public virtual void recover(RecognitionException ex, BitSet tokenSet) { consume(); consumeUntil(tokenSet); } public virtual void rewind(int pos) { inputState.input.rewind(pos); } /// /// Specify an object with support code (shared by Parser and TreeParser. /// Normally, the programmer does not play with this, using /// instead. /// /// public virtual void setASTFactory(ASTFactory f) { astFactory = f; } /// /// Specify the type of node to create during tree building. /// /// Fully qualified AST Node type name. public virtual void setASTNodeClass(string cl) { astFactory.setASTNodeType(cl); } /// /// Specify the type of node to create during tree building. /// use now to be consistent with /// Token Object Type accessor. /// /// Fully qualified AST Node type name. [Obsolete("Replaced by setASTNodeClass(string) since version 2.7.1", true)] public virtual void setASTNodeType(string nodeType) { setASTNodeClass(nodeType); } public virtual void setDebugMode(bool debugMode) { if (!ignoreInvalidDebugCalls) throw new System.SystemException("setDebugMode() only valid if parser built for debugging"); } public virtual void setFilename(string f) { inputState.filename = f; } public virtual void setIgnoreInvalidDebugCalls(bool Value) { ignoreInvalidDebugCalls = Value; } /*Set or change the input token buffer */ public virtual void setTokenBuffer(TokenBuffer t) { inputState.input = t; } public virtual void traceIndent() { for (int i = 0; i < traceDepth; i++) Console.Out.Write(" "); } public virtual void traceIn(string rname) { traceDepth += 1; traceIndent(); Console.Out.WriteLine("> " + rname + "; LA(1)==" + LT(1).getText() + ((inputState.guessing > 0)?" [guessing]":"")); } public virtual void traceOut(string rname) { traceIndent(); Console.Out.WriteLine("< " + rname + "; LA(1)==" + LT(1).getText() + ((inputState.guessing > 0)?" [guessing]":"")); traceDepth -= 1; } } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ANTLRPanicException.cs0000755000175000017500000000132310522211615024613 0ustar twernertwernernamespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // using System; [Serializable] public class ANTLRPanicException : ANTLRException { public ANTLRPanicException() : base() { } public ANTLRPanicException(string s) : base(s) { } public ANTLRPanicException(string s, Exception inner) : base(s, inner) { } } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ASTNULLType.cs0000755000175000017500000000431510522211615023071 0ustar twernertwernerusing System; using IEnumerator = System.Collections.IEnumerator; using AST = antlr.collections.AST; using Token = antlr.Token; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*There is only one instance of this class **/ public class ASTNULLType : AST { public virtual void addChild(AST c) {} public virtual bool Equals(AST t) { return false; } public virtual bool EqualsList(AST t) { return false; } public virtual bool EqualsListPartial(AST t) { return false; } public virtual bool EqualsTree(AST t) { return false; } public virtual bool EqualsTreePartial(AST t) { return false; } public virtual IEnumerator findAll(AST tree) { return null; } public virtual IEnumerator findAllPartial(AST subtree) { return null; } public virtual AST getFirstChild() { return this; } public virtual AST getNextSibling() { return this; } public virtual string getText() { return ""; } public virtual int Type { get { return Token.NULL_TREE_LOOKAHEAD; } set { ; } } public int getNumberOfChildren() { return 0; } public virtual void initialize(int t, string txt) { } public virtual void initialize(AST t) { } public virtual void initialize(IToken t) { } public virtual void setFirstChild(AST c) { ; } public virtual void setNextSibling(AST n) { ; } public virtual void setText(string text) { ; } public virtual void setType(int ttype) { this.Type = ttype; } override public string ToString() { return getText(); } public virtual string ToStringList() { return getText(); } public virtual string ToStringTree() { return getText(); } #region Implementation of ICloneable public object Clone() { return MemberwiseClone(); } #endregion } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenCreator.cs0000644000175000017500000000231510522211615023500 0ustar twernertwernernamespace antlr { using System; /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // /// /// A creator of Token object instances. /// /// /// /// This class and it's sub-classes exists primarily as an optimization /// of the reflection-based mechanism(s) previously used exclusively to /// create instances of Token objects. /// /// /// Since Lexers in ANTLR use a single Token type, each TokenCreator can /// create one class of Token objects (that's why it's not called TokenFactory). /// /// public abstract class TokenCreator { /// /// Returns the fully qualified name of the Token type that this /// class creates. /// public abstract string TokenTypeName { get; } /// /// Constructs a instance. /// public abstract IToken Create(); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenStreamRetryException.cs0000755000175000017500000000135510522211615026247 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /* * Aborted recognition of current token. Try to get one again. * Used by TokenStreamSelector.retry() to force nextToken() * of stream to re-enter and retry. */ [Serializable] public class TokenStreamRetryException : TokenStreamException { public TokenStreamRetryException() {} } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/DumpASTVisitor.cs0000755000175000017500000000334710522211615023746 0ustar twernertwernerusing System; using AST = antlr.collections.AST; namespace antlr { /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /// /// Summary description for DumpASTVisitor. /// /** Simple class to dump the contents of an AST to the output */ public class DumpASTVisitor : ASTVisitor { protected int level = 0; private void tabs() { for (int i = 0; i < level; i++) { Console.Out.Write(" "); } } public void visit(AST node) { // Flatten this level of the tree if it has no children bool flatten = /*true*/ false; AST node2; for (node2 = node; node2 != null; node2 = node2.getNextSibling()) { if (node2.getFirstChild() != null) { flatten = false; break; } } for (node2 = node; node2 != null; node2 = node2.getNextSibling()) { if (!flatten || node2 == node) { tabs(); } if (node2.getText() == null) { Console.Out.Write("nil"); } else { Console.Out.Write(node2.getText()); } Console.Out.Write(" [" + node2.Type + "] "); if (flatten) { Console.Out.Write(" "); } else { Console.Out.WriteLine(""); } if (node2.getFirstChild() != null) { level++; visit(node2.getFirstChild()); level--; } } if (flatten) { Console.Out.WriteLine(""); } } } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenStreamBasicFilter.cs0000755000175000017500000000245310522211615025452 0ustar twernertwernerusing System; using BitSet = antlr.collections.impl.BitSet; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*This object is a TokenStream that passes through all * tokens except for those that you tell it to discard. * There is no buffering of the tokens. */ public class TokenStreamBasicFilter : TokenStream { /*The set of token types to discard */ protected internal BitSet discardMask; /*The input stream */ protected internal TokenStream input; public TokenStreamBasicFilter(TokenStream input) { this.input = input; discardMask = new BitSet(); } public virtual void discard(int ttype) { discardMask.add(ttype); } public virtual void discard(BitSet mask) { discardMask = mask; } public virtual IToken nextToken() { IToken tok = input.nextToken(); while (tok != null && discardMask.member(tok.Type)) { tok = input.nextToken(); } return tok; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/DefaultFileLineFormatter.cs0000755000175000017500000000177210522211615025771 0ustar twernertwernerusing System; using StringBuilder = System.Text.StringBuilder; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public class DefaultFileLineFormatter : FileLineFormatter { public override string getFormatString(string fileName, int line, int column) { StringBuilder buf = new StringBuilder(); if (fileName != null) buf.Append(fileName + ":"); if (line != - 1) { if (fileName == null) buf.Append("line "); buf.Append(line); if (column != - 1) buf.Append(":" + column); buf.Append(":"); } buf.Append(" "); return buf.ToString(); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/CharQueue.cs0000755000175000017500000000525210522211615022770 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A circular buffer object used by CharBuffer */ public class CharQueue { /*Physical circular buffer of tokens */ protected internal char[] buffer; /*buffer.length-1 for quick modulos */ private int sizeLessOne; /*physical index of front token */ private int offset; /*number of tokens in the queue */ protected internal int nbrEntries; public CharQueue(int minSize) { // Find first power of 2 >= to requested size int size; if (minSize < 0) { init(16); // pick some value for them return ; } // check for overflow if (minSize >= (Int32.MaxValue / 2)) { init(Int32.MaxValue); // wow that's big. return ; } for (size = 2; size < minSize; size *= 2) { ; } init(size); } /*Add token to end of the queue * @param tok The token to add */ public void append(char tok) { if (nbrEntries == buffer.Length) { expand(); } buffer[(offset + nbrEntries) & sizeLessOne] = tok; nbrEntries++; } /*Fetch a token from the queue by index * @param idx The index of the token to fetch, where zero is the token at the front of the queue */ public char elementAt(int idx) { return buffer[(offset + idx) & sizeLessOne]; } /*Expand the token buffer by doubling its capacity */ private void expand() { char[] newBuffer = new char[buffer.Length * 2]; // Copy the contents to the new buffer // Note that this will store the first logical item in the // first physical array element. for (int i = 0; i < buffer.Length; i++) { newBuffer[i] = elementAt(i); } // Re-initialize with new contents, keep old nbrEntries buffer = newBuffer; sizeLessOne = buffer.Length - 1; offset = 0; } /*Initialize the queue. * @param size The initial size of the queue */ public virtual void init(int size) { // Allocate buffer buffer = new char[size]; // Other initialization sizeLessOne = size - 1; offset = 0; nbrEntries = 0; } /*Clear the queue. Leaving the previous buffer alone. */ public void reset() { offset = 0; nbrEntries = 0; } /*Remove char from front of queue */ public void removeFirst() { offset = (offset + 1) & sizeLessOne; nbrEntries--; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenStreamRewriteEngine.cs0000644000175000017500000003513210522211615026027 0ustar twernertwernernamespace antlr { /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // using System; using IList = System.Collections.IList; using IDictionary = System.Collections.IDictionary; using ArrayList = System.Collections.ArrayList; using Hashtable = System.Collections.Hashtable; using IComparer = System.Collections.IComparer; using StringBuilder = System.Text.StringBuilder; using BitSet = antlr.collections.impl.BitSet; /// /// This token stream tracks the *entire* token stream coming from /// a lexer, but does not pass on the whitespace (or whatever else /// you want to discard) to the parser. /// /// /// /// This class can then be asked for the ith token in the input stream. /// Useful for dumping out the input stream exactly after doing some /// augmentation or other manipulations. Tokens are index from 0..n-1 /// /// /// You can insert stuff, replace, and delete chunks. Note that the /// operations are done lazily--only if you convert the buffer to a /// string. This is very efficient because you are not moving data around /// all the time. As the buffer of tokens is converted to strings, the /// toString() method(s) check to see if there is an operation at the /// current index. If so, the operation is done and then normal string /// rendering continues on the buffer. This is like having multiple Turing /// machine instruction streams (programs) operating on a single input tape. :) /// /// /// Since the operations are done lazily at toString-time, operations do not /// screw up the token index values. That is, an insert operation at token /// index i does not change the index values for tokens i+1..n-1. /// /// /// Because operations never actually alter the buffer, you may always get /// the original token stream back without undoing anything. Since /// the instructions are queued up, you can easily simulate transactions and /// roll back any changes if there is an error just by removing instructions. /// For example, /// /// For example: /// /// TokenStreamRewriteEngine rewriteEngine = new TokenStreamRewriteEngine(lexer); /// JavaRecognizer parser = new JavaRecognizer(rewriteEngine); /// ... /// rewriteEngine.insertAfter("pass1", t, "foobar");} /// rewriteEngine.insertAfter("pass2", u, "start");} /// System.Console.Out.WriteLine(rewriteEngine.ToString("pass1")); /// System.Console.Out.WriteLine(rewriteEngine.ToString("pass2")); /// /// /// /// You can also have multiple "instruction streams" and get multiple /// rewrites from a single pass over the input. Just name the instruction /// streams and use that name again when printing the buffer. This could be /// useful for generating a C file and also its header file--all from the /// same buffer. /// /// /// If you don't use named rewrite streams, a "default" stream is used. /// /// /// Terence Parr, parrt@cs.usfca.edu /// University of San Francisco /// February 2004 /// /// public class TokenStreamRewriteEngine : TokenStream { public const int MIN_TOKEN_INDEX = 0; protected class RewriteOperation { protected internal int index; protected internal string text; protected RewriteOperation(int index, string text) { this.index = index; this.text = text; } /// /// Execute the rewrite operation by possibly adding to the buffer. /// /// rewrite buffer /// The index of the next token to operate on. public virtual int execute(StringBuilder buf) { return index; } } protected class InsertBeforeOp : RewriteOperation { public InsertBeforeOp(int index, string text) : base(index, text) { } public override int execute(StringBuilder buf) { buf.Append(text); return index; } } protected class ReplaceOp : RewriteOperation { protected int lastIndex; public ReplaceOp(int from, int to, string text) : base(from, text) { lastIndex = to; } public override int execute(StringBuilder buf) { if ( text != null ) { buf.Append(text); } return lastIndex+1; } } protected class DeleteOp : ReplaceOp { public DeleteOp(int from, int to) : base(from, to, null) { } } public const string DEFAULT_PROGRAM_NAME = "default"; public const int PROGRAM_INIT_SIZE = 100; /// /// Track the incoming list of tokens /// protected IList tokens; /// /// You may have multiple, named streams of rewrite operations. /// I'm calling these things "programs." /// Maps string (name) -> rewrite (List) /// protected IDictionary programs = null; /// /// Map string (program name) -> Integer index /// protected IDictionary lastRewriteTokenIndexes = null; /// /// track index of tokens /// protected int index = MIN_TOKEN_INDEX; /// /// Who do we suck tokens from? /// protected TokenStream stream; /// /// Which (whitespace) token(s) to throw out /// protected BitSet discardMask = new BitSet(); public TokenStreamRewriteEngine(TokenStream upstream) : this(upstream, 1000) { } public TokenStreamRewriteEngine(TokenStream upstream, int initialSize) { stream = upstream; tokens = new ArrayList(initialSize); programs = new Hashtable(); programs[DEFAULT_PROGRAM_NAME] = new ArrayList(PROGRAM_INIT_SIZE); lastRewriteTokenIndexes = new Hashtable(); } public IToken nextToken() // throws TokenStreamException { TokenWithIndex t; // suck tokens until end of stream or we find a non-discarded token do { t = (TokenWithIndex) stream.nextToken(); if ( t != null ) { t.setIndex(index); // what is t's index in list? if ( t.Type != Token.EOF_TYPE ) { tokens.Add(t); // track all tokens except EOF } index++; // move to next position } } while ( (t != null) && (discardMask.member(t.Type)) ); return t; } public void rollback(int instructionIndex) { rollback(DEFAULT_PROGRAM_NAME, instructionIndex); } /// /// Rollback the instruction stream for a program so that /// the indicated instruction (via instructionIndex) is no /// longer in the stream. /// /// /// UNTESTED! /// /// /// public void rollback(string programName, int instructionIndex) { ArrayList il = (ArrayList) programs[programName]; if ( il != null ) { programs[programName] = il.GetRange(MIN_TOKEN_INDEX, (instructionIndex - MIN_TOKEN_INDEX)); } } public void deleteProgram() { deleteProgram(DEFAULT_PROGRAM_NAME); } /// /// Reset the program so that no instructions exist /// /// public void deleteProgram(string programName) { rollback(programName, MIN_TOKEN_INDEX); } /// /// If op.index > lastRewriteTokenIndexes, just add to the end. /// Otherwise, do linear /// /// protected void addToSortedRewriteList(RewriteOperation op) { addToSortedRewriteList(DEFAULT_PROGRAM_NAME, op); } protected void addToSortedRewriteList(string programName, RewriteOperation op) { ArrayList rewrites = (ArrayList) getProgram(programName); // if at or beyond last op's index, just append if ( op.index >= getLastRewriteTokenIndex(programName) ) { rewrites.Add(op); // append to list of operations // record the index of this operation for next time through setLastRewriteTokenIndex(programName, op.index); return; } // not after the last one, so must insert to ordered list int pos = rewrites.BinarySearch(op, RewriteOperationComparer.Default); if (pos < 0) { rewrites.Insert(-pos-1, op); } } public void insertAfter(IToken t, string text) { insertAfter(DEFAULT_PROGRAM_NAME, t, text); } public void insertAfter(int index, string text) { insertAfter(DEFAULT_PROGRAM_NAME, index, text); } public void insertAfter(string programName, IToken t, string text) { insertAfter(programName,((TokenWithIndex) t).getIndex(), text); } public void insertAfter(string programName, int index, string text) { // to insert after, just insert before next index (even if past end) insertBefore(programName, index+1, text); } public void insertBefore(IToken t, string text) { insertBefore(DEFAULT_PROGRAM_NAME, t, text); } public void insertBefore(int index, string text) { insertBefore(DEFAULT_PROGRAM_NAME, index, text); } public void insertBefore(string programName, IToken t, string text) { insertBefore(programName, ((TokenWithIndex) t).getIndex(), text); } public void insertBefore(string programName, int index, string text) { addToSortedRewriteList(programName, new InsertBeforeOp(index, text)); } public void replace(int index, string text) { replace(DEFAULT_PROGRAM_NAME, index, index, text); } public void replace(int from, int to, string text) { replace(DEFAULT_PROGRAM_NAME, from, to, text); } public void replace(IToken indexT, string text) { replace(DEFAULT_PROGRAM_NAME, indexT, indexT, text); } public void replace(IToken from, IToken to, string text) { replace(DEFAULT_PROGRAM_NAME, from, to, text); } public void replace(string programName, int from, int to, string text) { addToSortedRewriteList(new ReplaceOp(from, to, text)); } public void replace(string programName, IToken from, IToken to, string text) { replace(programName, ((TokenWithIndex) from).getIndex(), ((TokenWithIndex) to).getIndex(), text); } public void delete(int index) { delete(DEFAULT_PROGRAM_NAME, index, index); } public void delete(int from, int to) { delete(DEFAULT_PROGRAM_NAME, from, to); } public void delete(IToken indexT) { delete(DEFAULT_PROGRAM_NAME, indexT, indexT); } public void delete(IToken from, IToken to) { delete(DEFAULT_PROGRAM_NAME, from, to); } public void delete(string programName, int from, int to) { replace(programName, from, to, null); } public void delete(string programName, IToken from, IToken to) { replace(programName, from, to, null); } public void discard(int ttype) { discardMask.add(ttype); } public TokenWithIndex getToken(int i) { return (TokenWithIndex) tokens[i]; } public int getTokenStreamSize() { return tokens.Count; } public string ToOriginalString() { return ToOriginalString(MIN_TOKEN_INDEX, getTokenStreamSize()-1); } public string ToOriginalString(int start, int end) { StringBuilder buf = new StringBuilder(); for (int i = start; (i >= MIN_TOKEN_INDEX) && (i <= end) && (i < tokens.Count); i++) { buf.Append(getToken(i).getText()); } return buf.ToString(); } public override string ToString() { return ToString(MIN_TOKEN_INDEX, getTokenStreamSize()); } public string ToString(string programName) { return ToString(programName, MIN_TOKEN_INDEX, getTokenStreamSize()); } public string ToString(int start, int end) { return ToString(DEFAULT_PROGRAM_NAME, start, end); } public string ToString(string programName, int start, int end) { IList rewrites = (IList) programs[programName]; if (rewrites == null) { return null; // invalid program } StringBuilder buf = new StringBuilder(); // Index of first rewrite we have not done int rewriteOpIndex = 0; int tokenCursor = start; while ( (tokenCursor >= MIN_TOKEN_INDEX) && (tokenCursor <= end) && (tokenCursor < tokens.Count) ) { if (rewriteOpIndex < rewrites.Count) { RewriteOperation op = (RewriteOperation) rewrites[rewriteOpIndex]; while ( (tokenCursor == op.index) && (rewriteOpIndex < rewrites.Count) ) { /* Console.Out.WriteLine("execute op "+rewriteOpIndex+ " (type "+op.GetType().FullName+")" +" at index "+op.index); */ tokenCursor = op.execute(buf); rewriteOpIndex++; if (rewriteOpIndex < rewrites.Count) { op = (RewriteOperation) rewrites[rewriteOpIndex]; } } } if ( tokenCursor < end ) { buf.Append(getToken(tokenCursor).getText()); tokenCursor++; } } // now see if there are operations (append) beyond last token index for (int opi = rewriteOpIndex; opi < rewrites.Count; opi++) { RewriteOperation op = (RewriteOperation) rewrites[opi]; op.execute(buf); // must be insertions if after last token } return buf.ToString(); } public string ToDebugString() { return ToDebugString(MIN_TOKEN_INDEX, getTokenStreamSize()); } public string ToDebugString(int start, int end) { StringBuilder buf = new StringBuilder(); for (int i = start; (i >= MIN_TOKEN_INDEX) && (i <= end) && (i < tokens.Count); i++) { buf.Append(getToken(i)); } return buf.ToString(); } public int getLastRewriteTokenIndex() { return getLastRewriteTokenIndex(DEFAULT_PROGRAM_NAME); } protected int getLastRewriteTokenIndex(string programName) { object i = lastRewriteTokenIndexes[programName]; if (i == null) { return -1; } return (int) i; } protected void setLastRewriteTokenIndex(string programName, int i) { lastRewriteTokenIndexes[programName] = (object) i; } protected IList getProgram(string name) { IList il = (IList) programs[name]; if ( il == null ) { il = initializeProgram(name); } return il; } private IList initializeProgram(string name) { IList il = new ArrayList(PROGRAM_INIT_SIZE); programs[name] = il; return il; } public class RewriteOperationComparer : IComparer { public static readonly RewriteOperationComparer Default = new RewriteOperationComparer(); public virtual int Compare(object o1, object o2) { RewriteOperation rop1 = (RewriteOperation) o1; RewriteOperation rop2 = (RewriteOperation) o2; if (rop1.index < rop2.index) return -1; if (rop1.index > rop2.index) return 1; return 0; } } } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr/CommonAST.cs0000755000175000017500000000507110522211615022705 0ustar twernertwernerusing System; using AST = antlr.collections.AST; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*Common AST node implementation */ public class CommonAST : BaseAST { public static readonly CommonAST.CommonASTCreator Creator = new CommonASTCreator(); internal int ttype = Token.INVALID_TYPE; internal string text; [Obsolete("Deprecated since version 2.7.2. Use ASTFactory.dup() instead.", false)] protected CommonAST(CommonAST another) { // don't include child/sibling pointers in Clone()/dup() //down = another.down; //right = another.right; ttype = another.ttype; text = (another.text==null) ? null : String.Copy(another.text); } /*Get the token text for this node */ override public string getText() { return text; } /*Get the token type for this node */ override public int Type { get { return ttype; } set { ttype = value; } } override public void initialize(int t, string txt) { Type = t; setText(txt); } override public void initialize(AST t) { setText(t.getText()); Type = t.Type; } public CommonAST() { } public CommonAST(IToken tok) { initialize(tok); } override public void initialize(IToken tok) { setText(tok.getText()); Type = tok.Type; } /*Set the token text for this node */ override public void setText(string text_) { text = text_; } /*Set the token type for this node */ override public void setType(int ttype_) { this.Type = ttype_; } #region Implementation of ICloneable [Obsolete("Deprecated since version 2.7.2. Use ASTFactory.dup() instead.", false)] override public object Clone() { return new CommonAST(this); } #endregion public class CommonASTCreator : ASTNodeCreator { public CommonASTCreator() {} /// /// Returns the fully qualified name of the AST type that this /// class creates. /// public override string ASTNodeTypeName { get { return typeof(antlr.CommonAST).FullName;; } } /// /// Constructs a instance. /// public override AST Create() { return new CommonAST(); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ParserSharedInputState.cs0000755000175000017500000000176410522211615025516 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*This object contains the data associated with an * input stream of tokens. Multiple parsers * share a single ParserSharedInputState to parse * the same stream of tokens. */ public class ParserSharedInputState { /*Where to get token objects */ protected internal TokenBuffer input; /*Are we guessing (guessing>0)? */ public int guessing = 0; /*What file (if known) caused the problem? */ protected internal string filename; public virtual void reset() { guessing = 0; filename = null; input.reset(); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenStreamException.cs0000755000175000017500000000127610522211615025223 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /* * Anything that goes wrong while generating a stream of tokens. */ [Serializable] public class TokenStreamException : ANTLRException { public TokenStreamException() { } public TokenStreamException(string s) : base(s) { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenWithIndex.cs0000644000175000017500000000220310522211615024000 0ustar twernertwernernamespace antlr { /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // using System; /// /// This token tracks it's own index 0..n-1 relative to the beginning /// of the stream. It is designed to work with /// in TokenStreamRewriteEngine.cs /// public class TokenWithIndex : CommonToken { /// /// Index into token array indicating position in input stream /// int index; public TokenWithIndex() : base() { } public TokenWithIndex(int i, string t) : base(i, t) { } public void setIndex(int i) { index = i; } public int getIndex() { return index; } public override string ToString() { return "["+index+":\"" + getText() + "\",<" + Type + ">,line=" + line + ",col=" + col + "]\n"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/NoViableAltException.cs0000755000175000017500000000257210522211615025127 0ustar twernertwernerusing System; using AST = antlr.collections.AST; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // [Serializable] public class NoViableAltException : RecognitionException { public IToken token; public AST node; // handles parsing and treeparsing public NoViableAltException(AST t) : base("NoViableAlt", "", - 1, - 1) { node = t; } public NoViableAltException(IToken t, string fileName_) : base("NoViableAlt", fileName_, t.getLine(), t.getColumn()) { token = t; } /* * Returns a clean error message (no line number/column information) */ override public string Message { get { if (token != null) { //return "unexpected token: " + token.getText(); return "unexpected token: " + token.ToString(); } // must a tree parser error if token==null if ( (node==null) || (node==TreeParser.ASTNULL) ) { return "unexpected end of subtree"; } return "unexpected AST node: " + node.ToString(); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenQueue.cs0000755000175000017500000000525310522211615023174 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A private circular buffer object used by the token buffer */ class TokenQueue { /*Physical circular buffer of tokens */ private IToken[] buffer; /*buffer.length-1 for quick modulos */ private int sizeLessOne; /*physical index of front token */ private int offset; /*number of tokens in the queue */ protected internal int nbrEntries; public TokenQueue(int minSize) { // Find first power of 2 >= to requested size int size; if (minSize < 0) { init(16); // pick some value for them return ; } // check for overflow if (minSize >= (int.MaxValue / 2)) { init(int.MaxValue); // wow that's big. return ; } for (size = 2; size < minSize; size *= 2) { ; } init(size); } /*Add token to end of the queue * @param tok The token to add */ public void append(IToken tok) { if (nbrEntries == buffer.Length) { expand(); } buffer[(offset + nbrEntries) & sizeLessOne] = tok; nbrEntries++; } /*Fetch a token from the queue by index * @param idx The index of the token to fetch, where zero is the token at the front of the queue */ public IToken elementAt(int idx) { return buffer[(offset + idx) & sizeLessOne]; } /*Expand the token buffer by doubling its capacity */ private void expand() { IToken[] newBuffer = new IToken[buffer.Length * 2]; // Copy the contents to the new buffer // Note that this will store the first logical item in the // first physical array element. for (int i = 0; i < buffer.Length; i++) { newBuffer[i] = elementAt(i); } // Re-initialize with new contents, keep old nbrEntries buffer = newBuffer; sizeLessOne = buffer.Length - 1; offset = 0; } /*Initialize the queue. * @param size The initial size of the queue */ private void init(int size) { // Allocate buffer buffer = new IToken[size]; // Other initialization sizeLessOne = size - 1; offset = 0; nbrEntries = 0; } /*Clear the queue. Leaving the previous buffer alone. */ public void reset() { offset = 0; nbrEntries = 0; } /*Remove token from front of queue */ public void removeFirst() { offset = (offset + 1) & sizeLessOne; nbrEntries--; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ASTFactory.cs0000755000175000017500000005273710522211615023077 0ustar twernertwernerusing System; using System.Collections; using Assembly = System.Reflection.Assembly; using ArrayList = System.Collections.ArrayList; using Debug = System.Diagnostics.Debug; using AST = antlr.collections.AST; using ASTArray = antlr.collections.impl.ASTArray; using ANTLRException = antlr.ANTLRException; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // // HISTORY: // // 19-Aug-2002 kunle Augmented the basic flexibility of the default ASTFactory with a map // of TokenID-to-NodeTypeName. It's now a proper GoF-style Factory ;-) // /// /// AST Support code shared by TreeParser and Parser. /// /// /// /// We use delegation to share code (and have only one /// bit of code to maintain) rather than subclassing /// or superclassing (forces AST support code to be /// loaded even when you don't want to do AST stuff). /// /// /// Typically, is used to specify the /// homogeneous type of node to create, but you can override /// to make heterogeneous nodes etc... /// /// public class ASTFactory { //--------------------------------------------------------------------- // CONSTRUCTORS //--------------------------------------------------------------------- /// /// Constructs an ASTFactory with the default AST node type of /// . /// public ASTFactory() : this(typeof(antlr.CommonAST)) { } /// /// Constructs an ASTFactory and use the specified AST node type /// as the default. /// /// /// Name of default AST node type for this factory. /// public ASTFactory(string nodeTypeName) : this( loadNodeTypeObject(nodeTypeName) ) { } /// /// Constructs an ASTFactory and use the specified AST node type /// as the default. /// /// /// MetaType of default AST node type for this factory. /// public ASTFactory(Type nodeType) { heteroList_ = new FactoryEntry[Token.MIN_USER_TYPE+1]; defaultASTNodeTypeObject_ = nodeType; defaultCreator_ = null; typename2creator_ = new Hashtable(32, (float) 0.3); typename2creator_["antlr.CommonAST"] = CommonAST.Creator; typename2creator_["antlr.CommonASTWithHiddenTokens"] = CommonASTWithHiddenTokens.Creator; } //--------------------------------------------------------------------- // DATA MEMBERS //--------------------------------------------------------------------- /// /// Stores the Type of the default AST node class to be used during tree construction. /// protected Type defaultASTNodeTypeObject_; protected ASTNodeCreator defaultCreator_; /// /// Stores the mapping between custom AST NodeTypes and their NodeTypeName/NodeTypeClass /// and ASTNodeCreator. /// protected FactoryEntry[] heteroList_; /// /// Stores the mapping between AST node typenames and their token ID. /// protected Hashtable typename2creator_; //--------------------------------------------------------------------- // FUNCTION MEMBERS //--------------------------------------------------------------------- /// /// Specify an "override" for the type created for /// the specified Token type. /// /// /// This method is useful for situations that ANTLR cannot oridinarily deal /// with (i.e., when you create a token based upon a nonliteral token symbol /// like #[LT(1)]. This is a runtime value and ANTLR cannot determine the token /// type (and hence the AST) statically. /// /// Token type to override. /// /// Fully qualified AST typename (or null to specify /// the factory's default AST type). /// public void setTokenTypeASTNodeType(int tokenType, string NodeTypeName) { // check validity of arguments... if( tokenType < Token.MIN_USER_TYPE ) throw new ANTLRException("Internal parser error: Cannot change AST Node Type for Token ID '" + tokenType + "'"); // resize up to and including 'type' and initialize any gaps to default // factory. if (tokenType > (heteroList_.Length+1)) setMaxNodeType(tokenType); // And add new thing.. if (heteroList_[tokenType] == null) heteroList_[tokenType] = new FactoryEntry(loadNodeTypeObject(NodeTypeName)); else heteroList_[tokenType].NodeTypeObject = loadNodeTypeObject(NodeTypeName); } /// /// Register an AST Node Type for a given Token type ID. /// /// The Token type ID. /// The AST Node Type to register. [Obsolete("Replaced by setTokenTypeASTNodeType(int, string) since version 2.7.2.6", true)] public void registerFactory(int NodeType, string NodeTypeName) { setTokenTypeASTNodeType(NodeType, NodeTypeName); } /// /// Register an ASTNodeCreator for a given Token type ID. /// /// The Token type ID. /// The creater to register. public void setTokenTypeASTNodeCreator(int NodeType, ASTNodeCreator creator) { // check validity of arguments... if( NodeType < Token.MIN_USER_TYPE ) throw new ANTLRException("Internal parser error: Cannot change AST Node Type for Token ID '" + NodeType + "'"); // resize up to and including 'type' and initialize any gaps to default // factory. if (NodeType > (heteroList_.Length+1)) setMaxNodeType(NodeType); // And add new thing.. if (heteroList_[NodeType] == null) heteroList_[NodeType] = new FactoryEntry(creator); else heteroList_[NodeType].Creator = creator; //typename2creator_[NodeType.ToString()] = creator; typename2creator_[creator.ASTNodeTypeName] = creator; } /// /// Register an ASTNodeCreator to be used for creating node by default. /// /// The ASTNodeCreator. public void setASTNodeCreator(ASTNodeCreator creator) { defaultCreator_ = creator; } /// /// Pre-expands the internal list of TokenTypeID-to-ASTNodeType mappings /// to the specified size. /// This is primarily a convenience method that can be used to prevent /// unnecessary and costly re-org of the mappings list. /// /// Maximum Token Type ID. public void setMaxNodeType( int NodeType ) { //Debug.WriteLine(this, "NodeType = " + NodeType + " and NodeList.Length = " + nodeTypeList_.Length); if (heteroList_ == null) { heteroList_ = new FactoryEntry[NodeType+1]; } else { int length = heteroList_.Length; if ( NodeType >= length ) { FactoryEntry[] newList = new FactoryEntry[NodeType+1]; Array.Copy(heteroList_, 0, newList, 0, length); heteroList_ = newList; } else if ( NodeType < length ) { FactoryEntry[] newList = new FactoryEntry[NodeType+1]; Array.Copy(heteroList_, 0, newList, 0, (NodeType+1)); heteroList_ = newList; } } //Debug.WriteLine(this, "NodeType = " + NodeType + " and NodeList.Length = " + nodeTypeList_.Length); } /// /// Add a child to the current AST /// /// The AST to add a child to /// The child AST to be added public virtual void addASTChild(ref ASTPair currentAST, AST child) { if (child != null) { if (currentAST.root == null) { // Make new child the current root currentAST.root = child; } else { if (currentAST.child == null) { // Add new child to current root currentAST.root.setFirstChild(child); } else { currentAST.child.setNextSibling(child); } } // Make new child the current child currentAST.child = child; currentAST.advanceChildToEnd(); } } /// /// Creates a new uninitialized AST node. Since a specific AST Node Type /// wasn't indicated, the new AST node is created using the current default /// AST Node type - /// /// An uninitialized AST node object. public virtual AST create() { AST newNode; if (defaultCreator_ == null) newNode = createFromNodeTypeObject(defaultASTNodeTypeObject_); else newNode = defaultCreator_.Create(); return newNode; } /// /// Creates and initializes a new AST node using the specified Token Type ID. /// The used for creating this new AST node is /// determined by the following: /// /// the current TokenTypeID-to-ASTNodeType mapping (if any) or, /// the otherwise /// /// /// Token type ID to be used to create new AST Node. /// An initialized AST node object. public virtual AST create(int type) { AST newNode = createFromNodeType(type); newNode.initialize(type, ""); return newNode; } /// /// Creates and initializes a new AST node using the specified Token Type ID. /// The used for creating this new AST node is /// determined by the following: /// /// the current TokenTypeID-to-ASTNodeType mapping (if any) or, /// the otherwise /// /// /// Token type ID to be used to create new AST Node. /// Text for initializing the new AST Node. /// An initialized AST node object. public virtual AST create(int type, string txt) { AST newNode = createFromNodeType(type); newNode.initialize(type, txt); return newNode; } /// /// Creates a new AST node using the specified AST Node Type name. Once created, /// the new AST node is initialized with the specified Token type ID and string. /// The used for creating this new AST node is /// determined solely by ASTNodeTypeName. /// The AST Node type must have a default/parameterless constructor. /// /// Token type ID to be used to create new AST Node. /// Text for initializing the new AST Node. /// Fully qualified name of the Type to be used for creating the new AST Node. /// An initialized AST node object. public virtual AST create(int type, string txt, string ASTNodeTypeName) { AST newNode = createFromNodeName(ASTNodeTypeName); newNode.initialize(type, txt); return newNode; } /// /// Creates a new AST node using the specified AST Node Type name. /// /// Token instance to be used to initialize the new AST Node. /// /// Fully qualified name of the Type to be used for creating the new AST Node. /// /// A newly created and initialized AST node object. /// /// Once created, the new AST node is initialized with the specified Token /// instance. The used for creating this new AST /// node is determined solely by ASTNodeTypeName. /// The AST Node type must have a default/parameterless constructor. /// public virtual AST create(IToken tok, string ASTNodeTypeName) { AST newNode = createFromNodeName(ASTNodeTypeName); newNode.initialize(tok); return newNode; } /// /// Creates and initializes a new AST node using the specified AST Node instance. /// the new AST node is initialized with the specified Token type ID and string. /// The used for creating this new AST node is /// determined solely by aNode. /// The AST Node type must have a default/parameterless constructor. /// /// AST Node instance to be used for creating the new AST Node. /// An initialized AST node object. public virtual AST create(AST aNode) { AST newNode; if (aNode == null) newNode = null; else { newNode = createFromAST(aNode); newNode.initialize(aNode); } return newNode; } /// /// Creates and initializes a new AST node using the specified Token instance. /// The used for creating this new AST node is /// determined by the following: /// /// the current TokenTypeID-to-ASTNodeType mapping (if any) or, /// the otherwise /// /// /// Token instance to be used to create new AST Node. /// An initialized AST node object. public virtual AST create(IToken tok) { AST newNode; if (tok == null) newNode = null; else { newNode = createFromNodeType(tok.Type); newNode.initialize(tok); } return newNode; } /// /// Returns a copy of the specified AST Node instance. The copy is obtained by /// using the method Clone(). /// /// AST Node to copy. /// An AST Node (or null if t is null). public virtual AST dup(AST t) { // The Java version is implemented using code like this: if (t == null) return null; AST dup_edNode = createFromAST(t); dup_edNode.initialize(t); return dup_edNode; } /// /// Duplicate AST Node tree rooted at specified AST node and all of it's siblings. /// /// Root of AST Node tree. /// Root node of new AST Node tree (or null if t is null). public virtual AST dupList(AST t) { AST result = dupTree(t); // if t == null, then result==null AST nt = result; while (t != null) { // for each sibling of the root t = t.getNextSibling(); nt.setNextSibling(dupTree(t)); // dup each subtree, building new tree nt = nt.getNextSibling(); } return result; } /// /// Duplicate AST Node tree rooted at specified AST node. Ignore it's siblings. /// /// Root of AST Node tree. /// Root node of new AST Node tree (or null if t is null). public virtual AST dupTree(AST t) { AST result = dup(t); // make copy of root // copy all children of root. if (t != null) { result.setFirstChild(dupList(t.getFirstChild())); } return result; } /// /// Make a tree from a list of nodes. The first element in the /// array is the root. If the root is null, then the tree is /// a simple list not a tree. Handles null children nodes correctly. /// For example, build(a, b, null, c) yields tree (a b c). build(null,a,b) /// yields tree (nil a b). /// /// List of Nodes. /// AST Node tree. public virtual AST make(params AST[] nodes) { if (nodes == null || nodes.Length == 0) return null; AST root = nodes[0]; AST tail = null; if (root != null) { root.setFirstChild(null); // don't leave any old pointers set } // link in children; for (int i = 1; i < nodes.Length; i++) { if (nodes[i] == null) continue; // ignore null nodes if (root == null) { // Set the root and set it up for a flat list root = (tail = nodes[i]); } else if (tail == null) { root.setFirstChild(nodes[i]); tail = root.getFirstChild(); } else { tail.setNextSibling(nodes[i]); tail = tail.getNextSibling(); } // Chase tail to last sibling while (tail.getNextSibling() != null) { tail = tail.getNextSibling(); } } return root; } /// /// Make a tree from a list of nodes, where the nodes are contained /// in an ASTArray object. /// /// List of Nodes. /// AST Node tree. public virtual AST make(ASTArray nodes) { return make(nodes.array); } /// /// Make an AST the root of current AST. /// /// /// public virtual void makeASTRoot(ref ASTPair currentAST, AST root) { if (root != null) { // Add the current root as a child of new root root.addChild(currentAST.root); // The new current child is the last sibling of the old root currentAST.child = currentAST.root; currentAST.advanceChildToEnd(); // Set the new root currentAST.root = root; } } /// /// Sets the global default AST Node Type for this ASTFactory instance. /// This method also attempts to load the instance /// for the specified typename. /// /// Fully qualified AST Node Type name. public virtual void setASTNodeType(string t) { if (defaultCreator_ != null) { if (t != defaultCreator_.ASTNodeTypeName) { defaultCreator_ = null; } } defaultASTNodeTypeObject_ = loadNodeTypeObject(t); } /// /// To change where error messages go, can subclass/override this method /// and then setASTFactory in Parser and TreeParser. This method removes /// a prior dependency on class antlr.Tool. /// /// public virtual void error(string e) { Console.Error.WriteLine(e); } //--------------------------------------------------------------------- // PRIVATE FUNCTION MEMBERS //--------------------------------------------------------------------- private static Type loadNodeTypeObject(string nodeTypeName) { Type nodeTypeObject = null; bool typeCreated = false; if (nodeTypeName != null) { foreach (Assembly assem in AppDomain.CurrentDomain.GetAssemblies()) { try { nodeTypeObject = assem.GetType(nodeTypeName); if (nodeTypeObject != null) { typeCreated = true; break; } } catch { typeCreated = false; } } } if (!typeCreated) { throw new TypeLoadException("Unable to load AST Node Type: '" + nodeTypeName + "'"); } return nodeTypeObject; } private AST createFromAST(AST node) { AST newNode = null; Type nodeAsTypeObj = node.GetType(); ASTNodeCreator creator = (ASTNodeCreator) typename2creator_[nodeAsTypeObj.FullName]; if (creator != null) { newNode = creator.Create(); if (newNode == null) { throw new ArgumentException("Unable to create AST Node Type: '" + nodeAsTypeObj.FullName + "'"); } } else { newNode = createFromNodeTypeObject(nodeAsTypeObj); } return newNode; } private AST createFromNodeName(string nodeTypeName) { AST newNode = null; ASTNodeCreator creator = (ASTNodeCreator) typename2creator_[nodeTypeName]; if (creator != null) { newNode = creator.Create(); if (newNode == null) { throw new ArgumentException("Unable to create AST Node Type: '" + nodeTypeName + "'"); } } else { newNode = createFromNodeTypeObject( loadNodeTypeObject(nodeTypeName) ); } return newNode; } private AST createFromNodeType(int nodeTypeIndex) { Debug.Assert((nodeTypeIndex >= 0) && (nodeTypeIndex <= heteroList_.Length), "Invalid AST node type!"); AST newNode = null; FactoryEntry entry = heteroList_[nodeTypeIndex]; if ((entry != null) && (entry.Creator != null)) { newNode = entry.Creator.Create(); } else { if ((entry == null) || (entry.NodeTypeObject == null)) { if (defaultCreator_ == null) { newNode = createFromNodeTypeObject(defaultASTNodeTypeObject_); } else newNode = defaultCreator_.Create(); } else newNode = createFromNodeTypeObject( entry.NodeTypeObject ); } return newNode; } private AST createFromNodeTypeObject(Type nodeTypeObject) { AST newNode = null; try { newNode = (AST) Activator.CreateInstance(nodeTypeObject); if (newNode == null) { throw new ArgumentException("Unable to create AST Node Type: '" + nodeTypeObject.FullName + "'"); } } catch(Exception ex) { throw new ArgumentException("Unable to create AST Node Type: '" + nodeTypeObject.FullName + "'", ex); } return newNode; } protected class FactoryEntry { public FactoryEntry(Type typeObj, ASTNodeCreator creator) { NodeTypeObject = typeObj; Creator = creator; } public FactoryEntry(Type typeObj) { NodeTypeObject = typeObj; } public FactoryEntry(ASTNodeCreator creator) { Creator = creator; } public Type NodeTypeObject; public ASTNodeCreator Creator; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenBuffer.cs0000755000175000017500000000640210522211615023316 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A Stream of Token objects fed to the parser from a Tokenizer that can * be rewound via mark()/rewind() methods. *

* A dynamic array is used to buffer up all the input tokens. Normally, * "k" tokens are stored in the buffer. More tokens may be stored during * guess mode (testing syntactic predicate), or when LT(i>k) is referenced. * Consumption of tokens is deferred. In other words, reading the next * token is not done by conume(), but deferred until needed by LA or LT. *

* * @see antlr.Token * @see antlr.Tokenizer * @see antlr.TokenQueue */ public class TokenBuffer { // Token source protected internal TokenStream input; // Number of active markers protected internal int nMarkers = 0; // Additional offset used when markers are active protected internal int markerOffset = 0; // Number of calls to consume() since last LA() or LT() call protected internal int numToConsume = 0; // Circular queue internal TokenQueue queue; /*Create a token buffer */ public TokenBuffer(TokenStream input_) { input = input_; queue = new TokenQueue(1); } /*Reset the input buffer to empty state */ public virtual void reset() { nMarkers = 0; markerOffset = 0; numToConsume = 0; queue.reset(); } /*Mark another token for deferred consumption */ public virtual void consume() { numToConsume++; } /*Ensure that the token buffer is sufficiently full */ protected virtual void fill(int amount) { syncConsume(); // Fill the buffer sufficiently to hold needed tokens while (queue.nbrEntries < (amount + markerOffset)) { // Append the next token queue.append(input.nextToken()); } } /*return the Tokenizer (needed by ParseView) */ public virtual TokenStream getInput() { return input; } /*Get a lookahead token value */ public virtual int LA(int i) { fill(i); return queue.elementAt(markerOffset + i - 1).Type; } /*Get a lookahead token */ public virtual IToken LT(int i) { fill(i); return queue.elementAt(markerOffset + i - 1); } /*Return an integer marker that can be used to rewind the buffer to * its current state. */ public virtual int mark() { syncConsume(); nMarkers++; return markerOffset; } /*Rewind the token buffer to a marker. * @param mark Marker returned previously from mark() */ public virtual void rewind(int mark) { syncConsume(); markerOffset = mark; nMarkers--; } /*Sync up deferred consumption */ protected virtual void syncConsume() { while (numToConsume > 0) { if (nMarkers > 0) { // guess mode -- leave leading tokens and bump offset. markerOffset++; } else { // normal mode -- remove first token queue.removeFirst(); } numToConsume--; } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/CharScanner.cs0000755000175000017500000004664010522211615023303 0ustar twernertwernerusing System; using Stream = System.IO.Stream; using TextReader = System.IO.TextReader; using StringBuilder = System.Text.StringBuilder; using Hashtable = System.Collections.Hashtable; using Assembly = System.Reflection.Assembly; using EventHandlerList = System.ComponentModel.EventHandlerList; using BitSet = antlr.collections.impl.BitSet; using antlr.debug; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public abstract class CharScanner : TokenStream, ICharScannerDebugSubject { internal const char NO_CHAR = (char) (0); public static readonly char EOF_CHAR = Char.MaxValue; // Used to store event delegates private EventHandlerList events_ = new EventHandlerList(); protected internal EventHandlerList Events { get { return events_; } } // The unique keys for each event that CharScanner [objects] can generate internal static readonly object EnterRuleEventKey = new object(); internal static readonly object ExitRuleEventKey = new object(); internal static readonly object DoneEventKey = new object(); internal static readonly object ReportErrorEventKey = new object(); internal static readonly object ReportWarningEventKey = new object(); internal static readonly object NewLineEventKey = new object(); internal static readonly object MatchEventKey = new object(); internal static readonly object MatchNotEventKey = new object(); internal static readonly object MisMatchEventKey = new object(); internal static readonly object MisMatchNotEventKey = new object(); internal static readonly object ConsumeEventKey = new object(); internal static readonly object LAEventKey = new object(); internal static readonly object SemPredEvaluatedEventKey = new object(); internal static readonly object SynPredStartedEventKey = new object(); internal static readonly object SynPredFailedEventKey = new object(); internal static readonly object SynPredSucceededEventKey = new object(); protected internal StringBuilder text; // text of current token protected bool saveConsumedInput = true; // does consume() save characters? ///

Used for creating Token instances. protected TokenCreator tokenCreator; /// Used for caching lookahead characters. protected char cached_LA1; protected char cached_LA2; protected bool caseSensitive = true; protected bool caseSensitiveLiterals = true; protected Hashtable literals; // set by subclass /*Tab chars are handled by tab() according to this value; override * method to do anything weird with tabs. */ protected internal int tabsize = 8; protected internal IToken returnToken_ = null; // used to return tokens w/o using return val. protected internal LexerSharedInputState inputState; /*Used during filter mode to indicate that path is desired. * A subsequent scan error will report an error as usual if * acceptPath=true; */ protected internal bool commitToPath = false; /*Used to keep track of indentdepth for traceIn/Out */ protected internal int traceDepth = 0; public CharScanner() { text = new StringBuilder(); setTokenCreator(new CommonToken.CommonTokenCreator()); } public CharScanner(InputBuffer cb) : this() { inputState = new LexerSharedInputState(cb); cached_LA2 = inputState.input.LA(2); cached_LA1 = inputState.input.LA(1); } public CharScanner(LexerSharedInputState sharedState) : this() { inputState = sharedState; if (inputState != null) { cached_LA2 = inputState.input.LA(2); cached_LA1 = inputState.input.LA(1); } } public event TraceEventHandler EnterRule { add { Events.AddHandler(EnterRuleEventKey, value); } remove { Events.RemoveHandler(EnterRuleEventKey, value); } } public event TraceEventHandler ExitRule { add { Events.AddHandler(ExitRuleEventKey, value); } remove { Events.RemoveHandler(ExitRuleEventKey, value); } } public event TraceEventHandler Done { add { Events.AddHandler(DoneEventKey, value); } remove { Events.RemoveHandler(DoneEventKey, value); } } public event MessageEventHandler ErrorReported { add { Events.AddHandler(ReportErrorEventKey, value); } remove { Events.RemoveHandler(ReportErrorEventKey, value); } } public event MessageEventHandler WarningReported { add { Events.AddHandler(ReportWarningEventKey, value); } remove { Events.RemoveHandler(ReportWarningEventKey, value); } } public event NewLineEventHandler HitNewLine { add { Events.AddHandler(NewLineEventKey, value); } remove { Events.RemoveHandler(NewLineEventKey, value); } } public event MatchEventHandler MatchedChar { add { Events.AddHandler(MatchEventKey, value); } remove { Events.RemoveHandler(MatchEventKey, value); } } public event MatchEventHandler MatchedNotChar { add { Events.AddHandler(MatchNotEventKey, value); } remove { Events.RemoveHandler(MatchNotEventKey, value); } } public event MatchEventHandler MisMatchedChar { add { Events.AddHandler(MisMatchEventKey, value); } remove { Events.RemoveHandler(MisMatchEventKey, value); } } public event MatchEventHandler MisMatchedNotChar { add { Events.AddHandler(MisMatchNotEventKey, value); } remove { Events.RemoveHandler(MisMatchNotEventKey, value); } } public event TokenEventHandler ConsumedChar { add { Events.AddHandler(ConsumeEventKey, value); } remove { Events.RemoveHandler(ConsumeEventKey, value); } } public event TokenEventHandler CharLA { add { Events.AddHandler(LAEventKey, value); } remove { Events.RemoveHandler(LAEventKey, value); } } public event SemanticPredicateEventHandler SemPredEvaluated { add { Events.AddHandler(SemPredEvaluatedEventKey, value); } remove { Events.RemoveHandler(SemPredEvaluatedEventKey, value); } } public event SyntacticPredicateEventHandler SynPredStarted { add { Events.AddHandler(SynPredStartedEventKey, value); } remove { Events.RemoveHandler(SynPredStartedEventKey, value); } } public event SyntacticPredicateEventHandler SynPredFailed { add { Events.AddHandler(SynPredFailedEventKey, value); } remove { Events.RemoveHandler(SynPredFailedEventKey, value); } } public event SyntacticPredicateEventHandler SynPredSucceeded { add { Events.AddHandler(SynPredSucceededEventKey, value); } remove { Events.RemoveHandler(SynPredSucceededEventKey, value); } } // From interface TokenStream public virtual IToken nextToken() { return null; } public virtual void append(char c) { if (saveConsumedInput) { text.Append(c); } } public virtual void append(string s) { if (saveConsumedInput) { text.Append(s); } } public virtual void commit() { inputState.input.commit(); } public virtual void recover(RecognitionException ex, BitSet tokenSet) { consume(); consumeUntil(tokenSet); } public virtual void consume() { if (inputState.guessing == 0) { if (caseSensitive) { append(cached_LA1); } else { // use input.LA(), not LA(), to get original case // CharScanner.LA() would toLower it. append(inputState.input.LA(1)); } if (cached_LA1 == '\t') { tab(); } else { inputState.column++; } } if (caseSensitive) { cached_LA1 = inputState.input.consume(); cached_LA2 = inputState.input.LA(2); } else { cached_LA1 = toLower(inputState.input.consume()); cached_LA2 = toLower(inputState.input.LA(2)); } } /*Consume chars until one matches the given char */ public virtual void consumeUntil(int c) { while ((EOF_CHAR != cached_LA1) && (c != cached_LA1)) { consume(); } } /*Consume chars until one matches the given set */ public virtual void consumeUntil(BitSet bset) { while (cached_LA1 != EOF_CHAR && !bset.member(cached_LA1)) { consume(); } } public virtual bool getCaseSensitive() { return caseSensitive; } public bool getCaseSensitiveLiterals() { return caseSensitiveLiterals; } public virtual int getColumn() { return inputState.column; } public virtual void setColumn(int c) { inputState.column = c; } public virtual bool getCommitToPath() { return commitToPath; } public virtual string getFilename() { return inputState.filename; } public virtual InputBuffer getInputBuffer() { return inputState.input; } public virtual LexerSharedInputState getInputState() { return inputState; } public virtual void setInputState(LexerSharedInputState state) { inputState = state; } public virtual int getLine() { return inputState.line; } /*return a copy of the current text buffer */ public virtual string getText() { return text.ToString(); } public virtual IToken getTokenObject() { return returnToken_; } public virtual char LA(int i) { if (i == 1) { return cached_LA1; } if (i == 2) { return cached_LA2; } if (caseSensitive) { return inputState.input.LA(i); } else { return toLower(inputState.input.LA(i)); } } protected internal virtual IToken makeToken(int t) { IToken newToken = null; bool typeCreated; try { newToken = tokenCreator.Create(); if (newToken != null) { newToken.Type = t; newToken.setColumn(inputState.tokenStartColumn); newToken.setLine(inputState.tokenStartLine); // tracking real start line now: newToken.setLine(inputState.line); newToken.setFilename(inputState.filename); } typeCreated = true; } catch { typeCreated = false; } if (!typeCreated) { panic("Can't create Token object '" + tokenCreator.TokenTypeName + "'"); newToken = Token.badToken; } return newToken; } public virtual int mark() { return inputState.input.mark(); } public virtual void match(char c) { match((int) c); } public virtual void match(int c) { if (cached_LA1 != c) { throw new MismatchedCharException(cached_LA1, Convert.ToChar(c), false, this); } consume(); } public virtual void match(BitSet b) { if (!b.member(cached_LA1)) { throw new MismatchedCharException(cached_LA1, b, false, this); } consume(); } public virtual void match(string s) { int len = s.Length; for (int i = 0; i < len; i++) { if (cached_LA1 != s[i]) { throw new MismatchedCharException(cached_LA1, s[i], false, this); } consume(); } } public virtual void matchNot(char c) { matchNot((int) c); } public virtual void matchNot(int c) { if (cached_LA1 == c) { throw new MismatchedCharException(cached_LA1, Convert.ToChar(c), true, this); } consume(); } public virtual void matchRange(int c1, int c2) { if (cached_LA1 < c1 || cached_LA1 > c2) { throw new MismatchedCharException(cached_LA1, Convert.ToChar(c1), Convert.ToChar(c2), false, this); } consume(); } public virtual void matchRange(char c1, char c2) { matchRange((int) c1, (int) c2); } public virtual void newline() { inputState.line++; inputState.column = 1; } /*advance the current column number by an appropriate amount * according to tab size. This method is called from consume(). */ public virtual void tab() { int c = getColumn(); int nc = (((c - 1) / tabsize) + 1) * tabsize + 1; // calculate tab stop setColumn(nc); } public virtual void setTabSize(int size) { tabsize = size; } public virtual int getTabSize() { return tabsize; } public virtual void panic() { //Console.Error.WriteLine("CharScanner: panic"); //Environment.Exit(1); panic(""); } /// /// This method is executed by ANTLR internally when it detected an illegal /// state that cannot be recovered from. /// The previous implementation of this method called /// and writes directly to , which is usually not /// appropriate when a translator is embedded into a larger application. /// /// Error message. public virtual void panic(string s) { //Console.Error.WriteLine("CharScanner; panic: " + s); //Environment.Exit(1); throw new ANTLRPanicException("CharScanner::panic: " + s); } /*Parser error-reporting function can be overridden in subclass */ public virtual void reportError(RecognitionException ex) { Console.Error.WriteLine(ex); } /*Parser error-reporting function can be overridden in subclass */ public virtual void reportError(string s) { if (getFilename() == null) { Console.Error.WriteLine("error: " + s); } else { Console.Error.WriteLine(getFilename() + ": error: " + s); } } /*Parser warning-reporting function can be overridden in subclass */ public virtual void reportWarning(string s) { if (getFilename() == null) { Console.Error.WriteLine("warning: " + s); } else { Console.Error.WriteLine(getFilename() + ": warning: " + s); } } public virtual void refresh() { if (caseSensitive) { cached_LA2 = inputState.input.LA(2); cached_LA1 = inputState.input.LA(1); } else { cached_LA2 = toLower(inputState.input.LA(2)); cached_LA1 = toLower(inputState.input.LA(1)); } } public virtual void resetState(InputBuffer ib) { text.Length = 0; traceDepth = 0; inputState.resetInput(ib); refresh(); } public void resetState(Stream s) { resetState(new ByteBuffer(s)); } public void resetState(TextReader tr) { resetState(new CharBuffer(tr)); } public virtual void resetText() { text.Length = 0; inputState.tokenStartColumn = inputState.column; inputState.tokenStartLine = inputState.line; } public virtual void rewind(int pos) { inputState.input.rewind(pos); //setColumn(inputState.tokenStartColumn); if (caseSensitive) { cached_LA2 = inputState.input.LA(2); cached_LA1 = inputState.input.LA(1); } else { cached_LA2 = toLower(inputState.input.LA(2)); cached_LA1 = toLower(inputState.input.LA(1)); } } public virtual void setCaseSensitive(bool t) { caseSensitive = t; if (caseSensitive) { cached_LA2 = inputState.input.LA(2); cached_LA1 = inputState.input.LA(1); } else { cached_LA2 = toLower(inputState.input.LA(2)); cached_LA1 = toLower(inputState.input.LA(1)); } } public virtual void setCommitToPath(bool commit) { commitToPath = commit; } public virtual void setFilename(string f) { inputState.filename = f; } public virtual void setLine(int line) { inputState.line = line; } public virtual void setText(string s) { resetText(); text.Append(s); } public virtual void setTokenObjectClass(string cl) { this.tokenCreator = new ReflectionBasedTokenCreator(this, cl); } public virtual void setTokenCreator(TokenCreator tokenCreator) { this.tokenCreator = tokenCreator; } // Test the token text against the literals table // Override this method to perform a different literals test public virtual int testLiteralsTable(int ttype) { string tokenText = text.ToString(); if ( (tokenText == null) || (tokenText == string.Empty) ) return ttype; else { object typeAsObject = literals[tokenText]; return (typeAsObject == null) ? ttype : ((int) typeAsObject); } } /*Test the text passed in against the literals table * Override this method to perform a different literals test * This is used primarily when you want to test a portion of * a token. */ public virtual int testLiteralsTable(string someText, int ttype) { if ( (someText == null) || (someText == string.Empty) ) return ttype; else { object typeAsObject = literals[someText]; return (typeAsObject == null) ? ttype : ((int) typeAsObject); } } // Override this method to get more specific case handling public virtual char toLower(int c) { return Char.ToLower(Convert.ToChar(c), System.Globalization.CultureInfo.InvariantCulture); } public virtual void traceIndent() { for (int i = 0; i < traceDepth; i++) Console.Out.Write(" "); } public virtual void traceIn(string rname) { traceDepth += 1; traceIndent(); Console.Out.WriteLine("> lexer " + rname + "; c==" + LA(1)); } public virtual void traceOut(string rname) { traceIndent(); Console.Out.WriteLine("< lexer " + rname + "; c==" + LA(1)); traceDepth -= 1; } /*This method is called by YourLexer.nextToken() when the lexer has * hit EOF condition. EOF is NOT a character. * This method is not called if EOF is reached during * syntactic predicate evaluation or during evaluation * of normal lexical rules, which presumably would be * an IOException. This traps the "normal" EOF condition. * * uponEOF() is called after the complete evaluation of * the previous token and only if your parser asks * for another token beyond that last non-EOF token. * * You might want to throw token or char stream exceptions * like: "Heh, premature eof" or a retry stream exception * ("I found the end of this file, go back to referencing file"). */ public virtual void uponEOF() { } private class ReflectionBasedTokenCreator : TokenCreator { protected ReflectionBasedTokenCreator() {} public ReflectionBasedTokenCreator(CharScanner owner, string tokenTypeName) { this.owner = owner; SetTokenType(tokenTypeName); } private CharScanner owner; /// /// The fully qualified name of the Token type to create. /// private string tokenTypeName; /// /// Type object used as a template for creating tokens by reflection. /// private Type tokenTypeObject; /// /// Returns the fully qualified name of the Token type that this /// class creates. /// private void SetTokenType(string tokenTypeName) { this.tokenTypeName = tokenTypeName; foreach (Assembly assem in AppDomain.CurrentDomain.GetAssemblies()) { try { tokenTypeObject = assem.GetType(tokenTypeName); if (tokenTypeObject != null) { break; } } catch { throw new TypeLoadException("Unable to load Type for Token class '" + tokenTypeName + "'"); } } if (tokenTypeObject==null) throw new TypeLoadException("Unable to load Type for Token class '" + tokenTypeName + "'"); } /// /// Returns the fully qualified name of the Token type that this /// class creates. /// public override string TokenTypeName { get { return tokenTypeName; } } /// /// Constructs a instance. /// public override IToken Create() { IToken newToken = null; try { newToken = (Token) Activator.CreateInstance(tokenTypeObject); } catch { // supress exception } return newToken; } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ASTVisitor.cs0000755000175000017500000000116710522211615023116 0ustar twernertwernerusing System; using AST = antlr.collections.AST; namespace antlr { /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /// /// Summary description for ASTVisitor. /// public interface ASTVisitor { void visit(AST node); } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr/RecognitionException.cs0000755000175000017500000000327110522211615025244 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // [Serializable] public class RecognitionException : ANTLRException { public string fileName; // not used by treeparsers public int line; // not used by treeparsers public int column; // not used by treeparsers public RecognitionException() : base("parsing error") { fileName = null; line = - 1; column = - 1; } /* * RecognitionException constructor comment. * @param s java.lang.String */ public RecognitionException(string s) : base(s) { fileName = null; line = - 1; column = - 1; } /* * RecognitionException constructor comment. * @param s java.lang.String */ public RecognitionException(string s, string fileName_, int line_, int column_) : base(s) { fileName = fileName_; line = line_; column = column_; } public virtual string getFilename() { return fileName; } public virtual int getLine() { return line; } public virtual int getColumn() { return column; } [Obsolete("Replaced by Message property since version 2.7.0", true)] public virtual string getErrorMessage() { return Message; } override public string ToString() { return FileLineFormatter.getFormatter().getFormatString(fileName, line, column) + Message; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/CommonToken.cs0000755000175000017500000000365010522211615023337 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public class CommonToken : Token { public static readonly CommonToken.CommonTokenCreator Creator = new CommonTokenCreator(); // most tokens will want line and text information protected internal int line; protected internal string text = null; protected internal int col; public CommonToken() { } public CommonToken(int t, string txt) { type_ = t; setText(txt); } public CommonToken(string s) { text = s; } override public int getLine() { return line; } override public string getText() { return text; } override public void setLine(int l) { line = l; } override public void setText(string s) { text = s; } override public string ToString() { return "[\"" + getText() + "\",<" + type_ + ">,line=" + line + ",col=" + col + "]"; } /*Return token's start column */ override public int getColumn() { return col; } override public void setColumn(int c) { col = c; } public class CommonTokenCreator : TokenCreator { public CommonTokenCreator() {} /// /// Returns the fully qualified name of the Token type that this /// class creates. /// public override string TokenTypeName { get { return typeof(antlr.CommonToken).FullName;; } } /// /// Constructs a instance. /// public override IToken Create() { return new CommonToken(); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/CharBuffer.cs0000755000175000017500000000447710522211615023125 0ustar twernertwernerusing System; using System.Runtime.InteropServices; using TextReader = System.IO.TextReader; using IOException = System.IO.IOException; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A Stream of characters fed to the lexer from a InputStream that can * be rewound via mark()/rewind() methods. *

* A dynamic array is used to buffer up all the input characters. Normally, * "k" characters are stored in the buffer. More characters may be stored during * guess mode (testing syntactic predicate), or when LT(i>k) is referenced. * Consumption of characters is deferred. In other words, reading the next * character is not done by conume(), but deferred until needed by LA or LT. *

*/ // SAS: Move most functionality into InputBuffer -- just the file-specific // stuff is in here public class CharBuffer : InputBuffer { // char source [NonSerialized()] internal TextReader input; private const int BUF_SIZE = 16; ///

/// Small buffer used to avoid reading individual chars /// private char[] buf = new char[BUF_SIZE]; /*Create a character buffer */ public CharBuffer(TextReader input_) : base() { input = input_; } /*Ensure that the character buffer is sufficiently full */ override public void fill(int amount) { try { syncConsume(); // Fill the buffer sufficiently to hold needed characters int charsToRead = (amount + markerOffset) - queue.Count; int c; while (charsToRead > 0) { // Read a few characters c = input.Read(buf, 0, BUF_SIZE); for (int i = 0; i < c; i++) { // Append the next character queue.Add(buf[i]); } if (c < BUF_SIZE) { while ((charsToRead-- > 0) && (queue.Count < BUF_SIZE)) { queue.Add(CharScanner.EOF_CHAR); } break; } charsToRead -= c; } } catch (IOException io) { throw new CharStreamIOException(io); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/LexerSharedInputState.cs0000755000175000017500000000347210522211615025337 0ustar twernertwernerusing System; using Stream = System.IO.Stream; using TextReader = System.IO.TextReader; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*This object contains the data associated with an * input stream of characters. Multiple lexers * share a single LexerSharedInputState to lex * the same input stream. */ public class LexerSharedInputState { protected internal int column; protected internal int line; protected internal int tokenStartColumn; protected internal int tokenStartLine; protected internal InputBuffer input; /*What file (if known) caused the problem? */ protected internal string filename; public int guessing; public LexerSharedInputState(InputBuffer inbuf) { initialize(); input = inbuf; } public LexerSharedInputState(Stream inStream) : this(new ByteBuffer(inStream)) { } public LexerSharedInputState(TextReader inReader) : this(new CharBuffer(inReader)) { } private void initialize() { column = 1; line = 1; tokenStartColumn = 1; tokenStartLine = 1; guessing = 0; filename = null; } public virtual void reset() { initialize(); input.reset(); } public virtual void resetInput(InputBuffer ib) { reset(); input = ib; } public virtual void resetInput(Stream s) { reset(); input = new ByteBuffer(s); } public virtual void resetInput(TextReader tr) { reset(); input = new CharBuffer(tr); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenStreamRecognitionException.cs0000755000175000017500000000155410522211615027423 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /* * Wraps a RecognitionException in a TokenStreamException so you * can pass it along. */ [Serializable] public class TokenStreamRecognitionException : TokenStreamException { public RecognitionException recog; public TokenStreamRecognitionException(RecognitionException re) : base(re.Message) { this.recog = re; } override public string ToString() { return recog.ToString(); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenStreamSelector.cs0000755000175000017500000000636010522211615025044 0ustar twernertwernerusing System; using Hashtable = System.Collections.Hashtable; using Stack = System.Collections.Stack; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A token stream MUX (multiplexor) knows about n token streams * and can multiplex them onto the same channel for use by token * stream consumer like a parser. This is a way to have multiple * lexers break up the same input stream for a single parser. * Or, you can have multiple instances of the same lexer handle * multiple input streams; this works great for includes. */ public class TokenStreamSelector : TokenStream { /*The set of inputs to the MUX */ protected internal Hashtable inputStreamNames; /*The currently-selected token stream input */ protected internal TokenStream input; /*Used to track stack of input streams */ protected internal Stack streamStack = new Stack(); public TokenStreamSelector() : base() { inputStreamNames = new Hashtable(); } public virtual void addInputStream(TokenStream stream, string key) { inputStreamNames[key] = stream; } /*Return the stream from tokens are being pulled at * the moment. */ public virtual TokenStream getCurrentStream() { return input; } public virtual TokenStream getStream(string sname) { TokenStream stream = (TokenStream) inputStreamNames[sname]; if (stream == null) { throw new System.ArgumentException("TokenStream " + sname + " not found"); } return stream; } public virtual IToken nextToken() { // return input.nextToken(); // keep looking for a token until you don't // get a retry exception. for (; ; ) { try { return input.nextToken(); } catch (TokenStreamRetryException) { // just retry "forever" } } } public virtual TokenStream pop() { TokenStream stream = (TokenStream) streamStack.Pop(); select(stream); return stream; } public virtual void push(TokenStream stream) { streamStack.Push(input); // save current stream select(stream); } public virtual void push(string sname) { streamStack.Push(input); select(sname); } /*Abort recognition of current Token and try again. * A stream can push a new stream (for include files * for example, and then retry(), which will cause * the current stream to abort back to this.nextToken(). * this.nextToken() then asks for a token from the * current stream, which is the new "substream." */ public virtual void retry() { throw new TokenStreamRetryException(); } /*Set the stream without pushing old stream */ public virtual void select(TokenStream stream) { input = stream; if (input is CharScanner) { ((CharScanner) input).refresh(); } } public virtual void select(string sname) { input = getStream(sname); if (input is CharScanner) { ((CharScanner) input).refresh(); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/CommonASTWithHiddenTokens.cs0000755000175000017500000000515110522211615026040 0ustar twernertwernerusing System; using AST = antlr.collections.AST; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A CommonAST whose initialization copies hidden token * information from the Token used to create a node. */ public class CommonASTWithHiddenTokens : CommonAST { new public static readonly CommonASTWithHiddenTokens.CommonASTWithHiddenTokensCreator Creator = new CommonASTWithHiddenTokensCreator(); protected internal IHiddenStreamToken hiddenBefore, hiddenAfter; // references to hidden tokens public CommonASTWithHiddenTokens() : base() { } public CommonASTWithHiddenTokens(IToken tok) : base(tok) { } [Obsolete("Deprecated since version 2.7.2. Use ASTFactory.dup() instead.", false)] protected CommonASTWithHiddenTokens(CommonASTWithHiddenTokens another) : base(another) { hiddenBefore = another.hiddenBefore; hiddenAfter = another.hiddenAfter; } public virtual IHiddenStreamToken getHiddenAfter() { return hiddenAfter; } public virtual IHiddenStreamToken getHiddenBefore() { return hiddenBefore; } override public void initialize(AST t) { hiddenBefore = ((CommonASTWithHiddenTokens) t).getHiddenBefore(); hiddenAfter = ((CommonASTWithHiddenTokens) t).getHiddenAfter(); base.initialize(t); } override public void initialize(IToken tok) { IHiddenStreamToken t = (IHiddenStreamToken) tok; base.initialize(t); hiddenBefore = t.getHiddenBefore(); hiddenAfter = t.getHiddenAfter(); } #region Implementation of ICloneable [Obsolete("Deprecated since version 2.7.2. Use ASTFactory.dup() instead.", false)] override public object Clone() { return new CommonASTWithHiddenTokens(this); } #endregion public class CommonASTWithHiddenTokensCreator : ASTNodeCreator { public CommonASTWithHiddenTokensCreator() {} /// /// Returns the fully qualified name of the AST type that this /// class creates. /// public override string ASTNodeTypeName { get { return typeof(antlr.CommonASTWithHiddenTokens).FullName;; } } /// /// Constructs a instance. /// public override AST Create() { return new CommonASTWithHiddenTokens(); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/SemanticException.cs0000755000175000017500000000164010522211615024525 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // [Serializable] public class SemanticException : RecognitionException { public SemanticException(string s) : base(s) { } [Obsolete("Replaced by SemanticException(string, string, int, int) since version 2.7.2.6", false)] public SemanticException(String s, String fileName, int line) : this(s, fileName, line, -1) { } public SemanticException(string s, string fileName, int line, int column) : base(s, fileName, line, column) { } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/MismatchedCharException.cs0000755000175000017500000001100510522211615025632 0ustar twernertwernerusing System; using StringBuilder = System.Text.StringBuilder; using BitSet = antlr.collections.impl.BitSet; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // [Serializable] public class MismatchedCharException : RecognitionException { /* * Returns a clean error message (no line number/column information) */ override public string Message { get { StringBuilder sb = new StringBuilder(); switch (mismatchType) { case CharTypeEnum.CharType: sb.Append("expecting "); appendCharName(sb, expecting); sb.Append(", found "); appendCharName(sb, foundChar); break; case CharTypeEnum.NotCharType: sb.Append("expecting anything but '"); appendCharName(sb, expecting); sb.Append("'; got it anyway"); break; case CharTypeEnum.RangeType: case CharTypeEnum.NotRangeType: sb.Append("expecting token "); if (mismatchType == CharTypeEnum.NotRangeType) sb.Append("NOT "); sb.Append("in range: "); appendCharName(sb, expecting); sb.Append(".."); appendCharName(sb, upper); sb.Append(", found "); appendCharName(sb, foundChar); break; case CharTypeEnum.SetType: case CharTypeEnum.NotSetType: sb.Append("expecting " + (mismatchType == CharTypeEnum.NotSetType ? "NOT " : "") + "one of ("); int[] elems = bset.toArray(); for (int i = 0; i < elems.Length; i++) { appendCharName(sb, elems[i]); } sb.Append("), found "); appendCharName(sb, foundChar); break; default: sb.Append(base.Message); break; } return sb.ToString(); } } // Types of chars public enum CharTypeEnum { CharType = 1, NotCharType = 2, RangeType = 3, NotRangeType = 4, SetType = 5, NotSetType = 6 } // One of the above public CharTypeEnum mismatchType; // what was found on the input stream public int foundChar; // For CHAR/NOT_CHAR and RANGE/NOT_RANGE public int expecting; // For RANGE/NOT_RANGE (expecting is lower bound of range) public int upper; // For SET/NOT_SET public BitSet bset; // who knows...they may want to ask scanner questions public CharScanner scanner; /* * MismatchedCharException constructor comment. */ public MismatchedCharException() : base("Mismatched char") { } // Expected range / not range public MismatchedCharException(char c, char lower, char upper_, bool matchNot, CharScanner scanner_) : base("Mismatched char", scanner_.getFilename(), scanner_.getLine(), scanner_.getColumn()) { mismatchType = matchNot ? CharTypeEnum.NotRangeType : CharTypeEnum.RangeType; foundChar = c; expecting = lower; upper = upper_; scanner = scanner_; } // Expected token / not token public MismatchedCharException(char c, char expecting_, bool matchNot, CharScanner scanner_) : base("Mismatched char", scanner_.getFilename(), scanner_.getLine(), scanner_.getColumn()) { mismatchType = matchNot ? CharTypeEnum.NotCharType : CharTypeEnum.CharType; foundChar = c; expecting = expecting_; scanner = scanner_; } // Expected BitSet / not BitSet public MismatchedCharException(char c, BitSet set_, bool matchNot, CharScanner scanner_) : base("Mismatched char", scanner_.getFilename(), scanner_.getLine(), scanner_.getColumn()) { mismatchType = matchNot ? CharTypeEnum.NotSetType : CharTypeEnum.SetType; foundChar = c; bset = set_; scanner = scanner_; } /// /// Append a char to the msg buffer. If special, then show escaped version /// /// Message buffer /// Char to append private void appendCharName(StringBuilder sb, int c) { switch (c) { case 65535 : // 65535 = (char) -1 = EOF sb.Append("''"); break; case '\n' : sb.Append(@"'\n'"); break; case '\r' : sb.Append(@"'\r'"); break; case '\t' : sb.Append(@"'\t'"); break; default : sb.Append('\''); sb.Append((char) c); sb.Append('\''); break; } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/Token.cs0000755000175000017500000000351210522211615022163 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A token is minimally a token type. Subclasses can add the text matched * for the token and line info. */ public class Token : IToken //, ICloneable { // constants public const int MIN_USER_TYPE = 4; public const int NULL_TREE_LOOKAHEAD = 3; public const int INVALID_TYPE = 0; public const int EOF_TYPE = 1; public static readonly int SKIP = - 1; // each Token has at least a token type protected int type_; // the illegal token object public static Token badToken = new Token(INVALID_TYPE, ""); public Token() { type_ = INVALID_TYPE; } public Token(int t) { type_ = t; } public Token(int t, string txt) { type_ = t; setText(txt); } public virtual int getColumn() { return 0; } public virtual int getLine() { return 0; } public virtual string getFilename() { return null; } public virtual void setFilename(string name) { } public virtual string getText() { return ""; } public int Type { get { return type_; } set { type_ = value; } } public virtual void setType(int newType) { this.Type = newType; } public virtual void setColumn(int c) { ; } public virtual void setLine(int l) { ; } public virtual void setText(string t) { ; } override public string ToString() { return "[\"" + getText() + "\",<" + type_ + ">]"; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/NoViableAltForCharException.cs0000755000175000017500000000312310522211615026365 0ustar twernertwernerusing System; using StringBuilder = System.Text.StringBuilder; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // [Serializable] public class NoViableAltForCharException : RecognitionException { public char foundChar; public NoViableAltForCharException(char c, CharScanner scanner) : base("NoViableAlt", scanner.getFilename(), scanner.getLine(), scanner.getColumn()) { foundChar = c; } public NoViableAltForCharException(char c, string fileName, int line, int column) : base("NoViableAlt", fileName, line, column) { foundChar = c; } /* * Returns a clean error message (no line number/column information) */ override public string Message { get { StringBuilder mesg = new StringBuilder("unexpected char: "); // I'm trying to mirror a change in the C++ stuff. // But java seems to lack something isprint-ish.. // so we do it manually. This is probably too restrictive. if ((foundChar >= ' ') && (foundChar <= '~')) { mesg.Append('\''); mesg.Append(foundChar); mesg.Append('\''); } else { mesg.Append("0x"); mesg.Append(((int)foundChar).ToString("X")); } return mesg.ToString(); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenStream.cs0000755000175000017500000000100710522211615023334 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public interface TokenStream { IToken nextToken(); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TokenStreamIOException.cs0000755000175000017500000000151510522211615025447 0ustar twernertwernerusing System; using IOException = System.IO.IOException; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /* * Wraps an IOException in a TokenStreamException */ [Serializable] public class TokenStreamIOException : TokenStreamException { public IOException io; /* * TokenStreamIOException constructor comment. * @param s java.lang.String */ public TokenStreamIOException(IOException io) : base(io.Message) { this.io = io; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/IToken.cs0000644000175000017500000000152110522211615022267 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /// /// A token is minimally a token type. Subclasses can add the text matched /// for the token and line info. /// public interface IToken { int getColumn(); void setColumn(int c); int getLine(); void setLine(int l); string getFilename(); void setFilename(string name); string getText(); void setText(string t); int Type { get; set; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ParseTreeToken.cs0000644000175000017500000000165510522211615024001 0ustar twernertwernernamespace antlr { /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // using System; using StringBuilder = System.Text.StringBuilder; using AST = antlr.collections.AST; public class ParseTreeToken : ParseTree { protected IToken token; public ParseTreeToken(IToken token) { this.token = token; } protected override internal int getLeftmostDerivation(StringBuilder buf, int step) { buf.Append(' '); buf.Append(ToString()); return step; // did on replacements } public override string ToString() { if ( token != null ) { return token.getText(); } return ""; } } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr/FileLineFormatter.cs0000755000175000017500000000173010522211615024456 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public abstract class FileLineFormatter { private static FileLineFormatter formatter = new DefaultFileLineFormatter(); public static FileLineFormatter getFormatter() { return formatter; } public static void setFormatter(FileLineFormatter f) { formatter = f; } /*@param fileName the file that should appear in the prefix. (or null) * @param line the line (or -1) * @param column the column (or -1) */ public abstract string getFormatString(string fileName, int line, int column); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ASTNodeCreator.cs0000644000175000017500000000265610522211615023665 0ustar twernertwernernamespace antlr { using System; using AST = antlr.collections.AST; /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // /// /// A creator of AST node instances. /// /// /// /// This class and it's sub-classes exists primarily as an optimization /// of the reflection-based mechanism(s) previously used exclusively to /// create instances of AST node objects. /// /// /// Parsers and TreeParsers already use the ASTFactory class in ANTLR whenever /// they need to create an AST node objeect. What this class does is to support /// performant extensibility of the basic ASTFactory. The ASTFactory can now be /// extnded as run-time to support more new AST node types without using needing /// to use reflection. /// /// public abstract class ASTNodeCreator { /// /// Returns the fully qualified name of the AST type that this /// class creates. /// public abstract string ASTNodeTypeName { get; } /// /// Constructs an instance. /// public abstract AST Create(); } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/TreeParser.cs0000755000175000017500000001236310522211615023163 0ustar twernertwernerusing System; using AST = antlr.collections.AST; using BitSet = antlr.collections.impl.BitSet; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public class TreeParser { /*The AST Null object; the parsing cursor is set to this when * it is found to be null. This way, we can test the * token type of a node without having to have tests for null * everywhere. */ public static ASTNULLType ASTNULL = new ASTNULLType(); /*Where did this rule leave off parsing; avoids a return parameter */ protected internal AST retTree_; /*guessing nesting level; guessing==0 implies not guessing */ // protected int guessing = 0; /*Nesting level of registered handlers */ // protected int exceptionLevel = 0; protected internal TreeParserSharedInputState inputState; /*Table of token type to token names */ protected internal string[] tokenNames; /*AST return value for a rule is squirreled away here */ protected internal AST returnAST; /*AST support code; parser and treeparser delegate to this object */ protected internal ASTFactory astFactory = new ASTFactory(); /*Used to keep track of indentdepth for traceIn/Out */ protected internal int traceDepth = 0; public TreeParser() { inputState = new TreeParserSharedInputState(); } /*Get the AST return value squirreled away in the parser */ public virtual AST getAST() { return returnAST; } public virtual ASTFactory getASTFactory() { return astFactory; } public virtual void resetState() { traceDepth = 0; returnAST = null; retTree_ = null; inputState.reset(); } public virtual string getTokenName(int num) { return tokenNames[num]; } public virtual string[] getTokenNames() { return tokenNames; } protected internal virtual void match(AST t, int ttype) { //System.out.println("match("+ttype+"); cursor is "+t); if (t == null || t == ASTNULL || t.Type != ttype) { throw new MismatchedTokenException(getTokenNames(), t, ttype, false); } } /*Make sure current lookahead symbol matches the given set * Throw an exception upon mismatch, which is catch by either the * error handler or by the syntactic predicate. */ public virtual void match(AST t, BitSet b) { if (t == null || t == ASTNULL || !b.member(t.Type)) { throw new MismatchedTokenException(getTokenNames(), t, b, false); } } protected internal virtual void matchNot(AST t, int ttype) { //System.out.println("match("+ttype+"); cursor is "+t); if (t == null || t == ASTNULL || t.Type == ttype) { throw new MismatchedTokenException(getTokenNames(), t, ttype, true); } } /// /// @deprecated as of 2.7.2. This method calls System.exit() and writes /// directly to stderr, which is usually not appropriate when /// a parser is embedded into a larger application. Since the method is /// static, it cannot be overridden to avoid these problems. /// ANTLR no longer uses this method internally or in generated code. /// /// [Obsolete("De-activated since version 2.7.2.6 as it cannot be overidden.", true)] public static void panic() { Console.Error.WriteLine("TreeWalker: panic"); System.Environment.Exit(1); } /*Parser error-reporting function can be overridden in subclass */ public virtual void reportError(RecognitionException ex) { Console.Error.WriteLine(ex.ToString()); } /*Parser error-reporting function can be overridden in subclass */ public virtual void reportError(string s) { Console.Error.WriteLine("error: " + s); } /*Parser warning-reporting function can be overridden in subclass */ public virtual void reportWarning(string s) { Console.Error.WriteLine("warning: " + s); } /*Specify an object with support code (shared by * Parser and TreeParser. Normally, the programmer * does not play with this, using setASTNodeType instead. */ public virtual void setASTFactory(ASTFactory f) { astFactory = f; } /*Specify the type of node to create during tree building */ public virtual void setASTNodeType(string nodeType) { setASTNodeClass(nodeType); } /*Specify the type of node to create during tree building */ public virtual void setASTNodeClass(string nodeType) { astFactory.setASTNodeType(nodeType); } public virtual void traceIndent() { for (int i = 0; i < traceDepth; i++) Console.Out.Write(" "); } public virtual void traceIn(string rname, AST t) { traceDepth += 1; traceIndent(); Console.Out.WriteLine("> " + rname + "(" + ((t != null) ? t.ToString() : "null") + ")" + ((inputState.guessing > 0) ? " [guessing]" : "")); } public virtual void traceOut(string rname, AST t) { traceIndent(); Console.Out.WriteLine("< " + rname + "(" + ((t != null) ? t.ToString() : "null") + ")" + ((inputState.guessing > 0) ? " [guessing]" : "")); traceDepth--; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/StringUtils.cs0000755000175000017500000000551210522211615023374 0ustar twernertwernerusing System; namespace antlr { /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public class StringUtils { /*General-purpose utility function for removing * characters from back of string * @param s The string to process * @param c The character to remove * @return The resulting string */ static public string stripBack(string s, char c) { while (s.Length > 0 && s[s.Length - 1] == c) { s = s.Substring(0, (s.Length - 1) - (0)); } return s; } /*General-purpose utility function for removing * characters from back of string * @param s The string to process * @param remove A string containing the set of characters to remove * @return The resulting string */ static public string stripBack(string s, string remove) { bool changed; do { changed = false; for (int i = 0; i < remove.Length; i++) { char c = remove[i]; while (s.Length > 0 && s[s.Length - 1] == c) { changed = true; s = s.Substring(0, (s.Length - 1) - (0)); } } } while (changed); return s; } /*General-purpose utility function for removing * characters from front of string * @param s The string to process * @param c The character to remove * @return The resulting string */ static public string stripFront(string s, char c) { while (s.Length > 0 && s[0] == c) { s = s.Substring(1); } return s; } /*General-purpose utility function for removing * characters from front of string * @param s The string to process * @param remove A string containing the set of characters to remove * @return The resulting string */ static public string stripFront(string s, string remove) { bool changed; do { changed = false; for (int i = 0; i < remove.Length; i++) { char c = remove[i]; while (s.Length > 0 && s[0] == c) { changed = true; s = s.Substring(1); } } } while (changed); return s; } /*General-purpose utility function for removing * characters from the front and back of string * @param s The string to process * @param head exact string to strip from head * @param tail exact string to strip from tail * @return The resulting string */ public static string stripFrontBack(string src, string head, string tail) { int h = src.IndexOf(head); int t = src.LastIndexOf(tail); if (h == - 1 || t == - 1) return src; return src.Substring(h + 1, (t) - (h + 1)); } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/MismatchedTokenException.cs0000755000175000017500000001344410522211615026046 0ustar twernertwernerusing System; using StringBuilder = System.Text.StringBuilder; using BitSet = antlr.collections.impl.BitSet; using AST = antlr.collections.AST; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // [Serializable] public class MismatchedTokenException : RecognitionException { // Token names array for formatting internal string[] tokenNames; // The token that was encountered public IToken token; // The offending AST node if tree walking public AST node; internal string tokenText = null; // taken from node or token object // Types of tokens public enum TokenTypeEnum { TokenType = 1, NotTokenType = 2, RangeType = 3, NotRangeType = 4, SetType = 5, NotSetType = 6 } // One of the above public TokenTypeEnum mismatchType; // For TOKEN/NOT_TOKEN and RANGE/NOT_RANGE public int expecting; // For RANGE/NOT_RANGE (expecting is lower bound of range) public int upper; // For SET/NOT_SET public BitSet bset; /*Looking for AST wildcard, didn't find it */ public MismatchedTokenException() : base("Mismatched Token: expecting any AST node", "", - 1, - 1) { } // Expected range / not range public MismatchedTokenException(string[] tokenNames_, AST node_, int lower, int upper_, bool matchNot) : base("Mismatched Token", "", - 1, - 1) { tokenNames = tokenNames_; node = node_; if (node_ == null) { tokenText = ""; } else { tokenText = node_.ToString(); } mismatchType = matchNot ? TokenTypeEnum.NotRangeType : TokenTypeEnum.RangeType; expecting = lower; upper = upper_; } // Expected token / not token public MismatchedTokenException(string[] tokenNames_, AST node_, int expecting_, bool matchNot) : base("Mismatched Token", "", - 1, - 1) { tokenNames = tokenNames_; node = node_; if (node_ == null) { tokenText = ""; } else { tokenText = node_.ToString(); } mismatchType = matchNot ? TokenTypeEnum.NotTokenType : TokenTypeEnum.TokenType; expecting = expecting_; } // Expected BitSet / not BitSet public MismatchedTokenException(string[] tokenNames_, AST node_, BitSet set_, bool matchNot) : base("Mismatched Token", "", - 1, - 1) { tokenNames = tokenNames_; node = node_; if (node_ == null) { tokenText = ""; } else { tokenText = node_.ToString(); } mismatchType = matchNot ? TokenTypeEnum.NotSetType : TokenTypeEnum.SetType; bset = set_; } // Expected range / not range public MismatchedTokenException(string[] tokenNames_, IToken token_, int lower, int upper_, bool matchNot, string fileName_) : base("Mismatched Token", fileName_, token_.getLine(), token_.getColumn()) { tokenNames = tokenNames_; token = token_; tokenText = token_.getText(); mismatchType = matchNot ? TokenTypeEnum.NotRangeType : TokenTypeEnum.RangeType; expecting = lower; upper = upper_; } // Expected token / not token public MismatchedTokenException(string[] tokenNames_, IToken token_, int expecting_, bool matchNot, string fileName_) : base("Mismatched Token", fileName_, token_.getLine(), token_.getColumn()) { tokenNames = tokenNames_; token = token_; tokenText = token_.getText(); mismatchType = matchNot ? TokenTypeEnum.NotTokenType : TokenTypeEnum.TokenType; expecting = expecting_; } // Expected BitSet / not BitSet public MismatchedTokenException(string[] tokenNames_, IToken token_, BitSet set_, bool matchNot, string fileName_) : base("Mismatched Token", fileName_, token_.getLine(), token_.getColumn()) { tokenNames = tokenNames_; token = token_; tokenText = token_.getText(); mismatchType = matchNot ? TokenTypeEnum.NotSetType : TokenTypeEnum.SetType; bset = set_; } /* * Returns a clean error message (no line number/column information) */ override public string Message { get { StringBuilder sb = new StringBuilder(); switch (mismatchType) { case TokenTypeEnum.TokenType: sb.Append("expecting " + tokenName(expecting) + ", found '" + tokenText + "'"); break; case TokenTypeEnum.NotTokenType: sb.Append("expecting anything but " + tokenName(expecting) + "; got it anyway"); break; case TokenTypeEnum.RangeType: sb.Append("expecting token in range: " + tokenName(expecting) + ".." + tokenName(upper) + ", found '" + tokenText + "'"); break; case TokenTypeEnum.NotRangeType: sb.Append("expecting token NOT in range: " + tokenName(expecting) + ".." + tokenName(upper) + ", found '" + tokenText + "'"); break; case TokenTypeEnum.SetType: case TokenTypeEnum.NotSetType: sb.Append("expecting " + (mismatchType == TokenTypeEnum.NotSetType ? "NOT " : "") + "one of ("); int[] elems = bset.toArray(); for (int i = 0; i < elems.Length; i++) { sb.Append(" "); sb.Append(tokenName(elems[i])); } sb.Append("), found '" + tokenText + "'"); break; default: sb.Append(base.Message); break; } return sb.ToString(); } } private string tokenName(int tokenType) { if (tokenType == Token.INVALID_TYPE) { return ""; } else if (tokenType < 0 || tokenType >= tokenNames.Length) { return "<" + tokenType.ToString() + ">"; } else { return tokenNames[tokenType]; } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/CommonHiddenStreamToken.cs0000755000175000017500000000353110522211615025625 0ustar twernertwernerusing System; namespace antlr { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // public class CommonHiddenStreamToken : CommonToken, IHiddenStreamToken { new public static readonly CommonHiddenStreamToken.CommonHiddenStreamTokenCreator Creator = new CommonHiddenStreamTokenCreator(); protected internal IHiddenStreamToken hiddenBefore; protected internal IHiddenStreamToken hiddenAfter; public CommonHiddenStreamToken() : base() { } public CommonHiddenStreamToken(int t, string txt) : base(t, txt) { } public CommonHiddenStreamToken(string s) : base(s) { } public virtual IHiddenStreamToken getHiddenAfter() { return hiddenAfter; } public virtual IHiddenStreamToken getHiddenBefore() { return hiddenBefore; } public virtual void setHiddenAfter(IHiddenStreamToken t) { hiddenAfter = t; } public virtual void setHiddenBefore(IHiddenStreamToken t) { hiddenBefore = t; } public class CommonHiddenStreamTokenCreator : TokenCreator { public CommonHiddenStreamTokenCreator() {} /// /// Returns the fully qualified name of the Token type that this /// class creates. /// public override string TokenTypeName { get { return typeof(antlr.CommonHiddenStreamToken).FullName;; } } /// /// Constructs a instance. /// public override IToken Create() { return new CommonHiddenStreamToken(); } } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr/ParseTreeRule.cs0000644000175000017500000000435010522211615023623 0ustar twernertwernernamespace antlr { /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // using System; using StringBuilder = System.Text.StringBuilder; using AST = antlr.collections.AST; public class ParseTreeRule : ParseTree { public const int INVALID_ALT = -1; protected string ruleName; protected int altNumber; // unused until I modify antlr to record this public ParseTreeRule(string ruleName) : this(ruleName, INVALID_ALT) { } public ParseTreeRule(string ruleName, int altNumber) { this.ruleName = ruleName; this.altNumber = altNumber; } public string getRuleName() { return ruleName; } /// /// Do a step-first walk, building up a buffer of tokens until /// you've reached a particular step and print out any rule subroots /// insteads of descending. /// /// derivation buffer /// derivation steps /// protected internal override int getLeftmostDerivation(StringBuilder buf, int step) { int numReplacements = 0; if ( step <= 0 ) { buf.Append(' '); buf.Append(ToString()); return numReplacements; } AST child = getFirstChild(); numReplacements = 1; // walk child printing them out, descending into at most one while ( child != null ) { if ( (numReplacements >= step) || (child is ParseTreeToken) ) { buf.Append(' '); buf.Append(child.ToString()); } else { // descend for at least one more derivation; update count int remainingReplacements = step - numReplacements; int n = ((ParseTree) child).getLeftmostDerivation(buf, remainingReplacements); numReplacements += n; } child = child.getNextSibling(); } return numReplacements; } public override string ToString() { if ( altNumber == INVALID_ALT ) { return '<'+ruleName+'>'; } else { return '<'+ruleName+"["+altNumber+"]>"; } } } } antlr-2.7.7/lib/csharp/antlr.runtime/antlr.collections.impl/0000755000175000017500000000000010522211615024025 5ustar twernertwernerantlr-2.7.7/lib/csharp/antlr.runtime/antlr.collections.impl/BitSet.cs0000755000175000017500000002761210522211615025561 0ustar twernertwernerusing System; using ArrayList = System.Collections.ArrayList; //using CharFormatter = antlr.CharFormatter; namespace antlr.collections.impl { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*A BitSet to replace java.util.BitSet. * Primary differences are that most set operators return new sets * as opposed to oring and anding "in place". Further, a number of * operations were added. I cannot contain a BitSet because there * is no way to access the internal bits (which I need for speed) * and, because it is final, I cannot subclass to add functionality. * Consider defining set degree. Without access to the bits, I must * call a method n times to test the ith bit...ack! * * Also seems like or() from util is wrong when size of incoming set is bigger * than this.bits.length. * * @author Terence Parr * @author
Pete Wells */ public class BitSet : ICloneable { protected internal const int BITS = 64; // number of bits / long protected internal const int NIBBLE = 4; protected internal const int LOG_BITS = 6; // 2^6 == 64 /*We will often need to do a mod operator (i mod nbits). Its * turns out that, for powers of two, this mod operation is * same as (i & (nbits-1)). Since mod is slow, we use a * precomputed mod mask to do the mod instead. */ protected internal static readonly int MOD_MASK = BITS - 1; /*The actual data bits */ protected internal long[] dataBits; /*Construct a bitset of size one word (64 bits) */ public BitSet() : this(BITS) { } /*Construction from a static array of longs */ public BitSet(long[] bits_) { dataBits = bits_; } /*Construct a bitset given the size * @param nbits The size of the bitset in bits */ public BitSet(int nbits) { dataBits = new long[((nbits - 1) >> LOG_BITS) + 1]; } /*or this element into this set (grow as necessary to accommodate) */ public virtual void add(int el) { int n = wordNumber(el); if (n >= dataBits.Length) { growToInclude(el); } dataBits[n] |= bitMask(el); } public virtual BitSet and(BitSet a) { BitSet s = (BitSet) this.Clone(); s.andInPlace(a); return s; } public virtual void andInPlace(BitSet a) { int min = (int) (Math.Min(dataBits.Length, a.dataBits.Length)); for (int i = min - 1; i >= 0; i--) { dataBits[i] &= a.dataBits[i]; } // clear all bits in this not present in a (if this bigger than a). for (int i = min; i < dataBits.Length; i++) { dataBits[i] = 0; } } private static long bitMask(int bitNumber) { int bitPosition = bitNumber & MOD_MASK; // bitNumber mod BITS return 1L << bitPosition; } public virtual void clear() { for (int i = dataBits.Length - 1; i >= 0; i--) { dataBits[i] = 0; } } public virtual void clear(int el) { int n = wordNumber(el); if (n >= dataBits.Length) { // grow as necessary to accommodate growToInclude(el); } dataBits[n] &= ~ bitMask(el); } public virtual object Clone() { BitSet s; try { s = new BitSet(); s.dataBits = new long[dataBits.Length]; Array.Copy(dataBits, 0, s.dataBits, 0, dataBits.Length); } catch //(System.Exception e) { throw new System.ApplicationException(); } return s; } public virtual int degree() { int deg = 0; for (int i = dataBits.Length - 1; i >= 0; i--) { long word = dataBits[i]; if (word != 0L) { for (int bit = BITS - 1; bit >= 0; bit--) { if ((word & (1L << bit)) != 0) { deg++; } } } } return deg; } override public int GetHashCode() { return dataBits.GetHashCode(); } /*code "inherited" from java.util.BitSet */ override public bool Equals(object obj) { if ((obj != null) && (obj is BitSet)) { BitSet bset = (BitSet) obj; int n = (int) (System.Math.Min(dataBits.Length, bset.dataBits.Length)); for (int i = n; i-- > 0; ) { if (dataBits[i] != bset.dataBits[i]) { return false; } } if (dataBits.Length > n) { for (int i = (int) (dataBits.Length); i-- > n; ) { if (dataBits[i] != 0) { return false; } } } else if (bset.dataBits.Length > n) { for (int i = (int) (bset.dataBits.Length); i-- > n; ) { if (bset.dataBits[i] != 0) { return false; } } } return true; } return false; } /* * Grows the set to a larger number of bits. * @param bit element that must fit in set */ public virtual void growToInclude(int bit) { int newSize = (int) (System.Math.Max(dataBits.Length << 1, numWordsToHold(bit))); long[] newbits = new long[newSize]; Array.Copy(dataBits, 0, newbits, 0, dataBits.Length); dataBits = newbits; } public virtual bool member(int el) { int n = wordNumber(el); if (n >= dataBits.Length) return false; return (dataBits[n] & bitMask(el)) != 0; } public virtual bool nil() { for (int i = dataBits.Length - 1; i >= 0; i--) { if (dataBits[i] != 0) return false; } return true; } public virtual BitSet not() { BitSet s = (BitSet) this.Clone(); s.notInPlace(); return s; } public virtual void notInPlace() { for (int i = dataBits.Length - 1; i >= 0; i--) { dataBits[i] = ~ dataBits[i]; } } /*complement bits in the range 0..maxBit. */ public virtual void notInPlace(int maxBit) { notInPlace(0, maxBit); } /*complement bits in the range minBit..maxBit.*/ public virtual void notInPlace(int minBit, int maxBit) { // make sure that we have room for maxBit growToInclude(maxBit); for (int i = minBit; i <= maxBit; i++) { int n = wordNumber(i); dataBits[n] ^= bitMask(i); } } private int numWordsToHold(int el) { return (el >> LOG_BITS) + 1; } public static BitSet of(int el) { BitSet s = new BitSet(el + 1); s.add(el); return s; } /*return this | a in a new set */ public virtual BitSet or(BitSet a) { BitSet s = (BitSet) this.Clone(); s.orInPlace(a); return s; } public virtual void orInPlace(BitSet a) { // If this is smaller than a, grow this first if (a.dataBits.Length > dataBits.Length) { setSize((int) (a.dataBits.Length)); } int min = (int) (System.Math.Min(dataBits.Length, a.dataBits.Length)); for (int i = min - 1; i >= 0; i--) { dataBits[i] |= a.dataBits[i]; } } // remove this element from this set public virtual void remove(int el) { int n = wordNumber(el); if (n >= dataBits.Length) { growToInclude(el); } dataBits[n] &= ~ bitMask(el); } /* * Sets the size of a set. * @param nwords how many words the new set should be */ private void setSize(int nwords) { long[] newbits = new long[nwords]; int n = (int) (System.Math.Min(nwords, dataBits.Length)); Array.Copy(dataBits, 0, newbits, 0, n); dataBits = newbits; } public virtual int size() { return dataBits.Length << LOG_BITS; // num words * bits per word } /*return how much space is being used by the dataBits array not * how many actually have member bits on. */ public virtual int lengthInLongWords() { return dataBits.Length; } /*Is this contained within a? */ public virtual bool subset(BitSet a) { if (a == null) //(a == null || !(a is BitSet)) return false; return this.and(a).Equals(this); } /*Subtract the elements of 'a' from 'this' in-place. * Basically, just turn off all bits of 'this' that are in 'a'. */ public virtual void subtractInPlace(BitSet a) { if (a == null) return ; // for all words of 'a', turn off corresponding bits of 'this' for (int i = 0; i < dataBits.Length && i < a.dataBits.Length; i++) { dataBits[i] &= ~ a.dataBits[i]; } } public virtual int[] toArray() { int[] elems = new int[degree()]; int en = 0; for (int i = 0; i < (dataBits.Length << LOG_BITS); i++) { if (member(i)) { elems[en++] = i; } } return elems; } public virtual long[] toPackedArray() { return dataBits; } override public string ToString() { return ToString(","); } /*Transform a bit set into a string by formatting each element as an integer * @separator The string to put in between elements * @return A commma-separated list of values */ public virtual string ToString(string separator) { string str = ""; for (int i = 0; i < (dataBits.Length << LOG_BITS); i++) { if (member(i)) { if (str.Length > 0) { str += separator; } str = str + i; } } return str; } /*Create a string representation where instead of integer elements, the * ith element of vocabulary is displayed instead. Vocabulary is a Vector * of Strings. * @separator The string to put in between elements * @return A commma-separated list of character constants. */ public virtual string ToString(string separator, ArrayList vocabulary) { if (vocabulary == null) { return ToString(separator); } string str = ""; for (int i = 0; i < (dataBits.Length << LOG_BITS); i++) { if (member(i)) { if (str.Length > 0) { str += separator; } if (i >= vocabulary.Count) { str += ""; } else if (vocabulary[i] == null) { str += "<" + i + ">"; } else { str += (string) vocabulary[i]; } } } return str; } /* * Dump a comma-separated list of the words making up the bit set. * Split each 64 bit number into two more manageable 32 bit numbers. * This generates a comma-separated list of C++-like unsigned long constants. */ public virtual string toStringOfHalfWords() { string s = new string("".ToCharArray()); for (int i = 0; i < dataBits.Length; i++) { if (i != 0) s += ", "; long tmp = dataBits[i]; tmp &= 0xFFFFFFFFL; s += (tmp + "UL"); s += ", "; tmp = SupportClass.URShift(dataBits[i], 32); tmp &= 0xFFFFFFFFL; s += (tmp + "UL"); } return s; } /* * Dump a comma-separated list of the words making up the bit set. * This generates a comma-separated list of Java-like long int constants. */ public virtual string toStringOfWords() { string s = new string("".ToCharArray()); for (int i = 0; i < dataBits.Length; i++) { if (i != 0) s += ", "; s += (dataBits[i] + "L"); } return s; } /*Print out the bit set but collapse char ranges. */ /* public virtual string toStringWithRanges(string separator, CharFormatter formatter) { string str = ""; int[] elems = this.toArray(); if (elems.Length == 0) { return ""; } // look for ranges int i = 0; while (i < elems.Length) { int lastInRange; lastInRange = 0; for (int j = i + 1; j < elems.Length; j++) { if (elems[j] != elems[j - 1] + 1) { break; } lastInRange = j; } // found a range if (str.Length > 0) { str += separator; } if (lastInRange - i >= 2) { str += formatter.literalChar(elems[i]); str += ".."; str += formatter.literalChar(elems[lastInRange]); i = lastInRange; // skip past end of range for next range } else { // no range, just print current char and move on str += formatter.literalChar(elems[i]); } i++; } return str; } */ private static int wordNumber(int bit) { return bit >> LOG_BITS; // bit / BITS } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.collections.impl/ASTArray.cs0000755000175000017500000000163610522211615026013 0ustar twernertwernerusing System; using AST = antlr.collections.AST; namespace antlr.collections.impl { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /*ASTArray is a class that allows ANTLR to * generate code that can create and initialize an array * in one expression, like: * (new ASTArray(3)).add(x).add(y).add(z) */ public class ASTArray { public int size = 0; public AST[] array; public ASTArray(int capacity) { array = new AST[capacity]; } public virtual ASTArray add(AST node) { array[size++] = node; return this; } } }antlr-2.7.7/lib/csharp/antlr.runtime/antlr.runtime.csproj0000644000175000017500000005151610522211615023464 0ustar twernertwerner antlr-2.7.7/lib/csharp/antlr.runtime/SupportClass.cs0000755000175000017500000000103610522211615022424 0ustar twernertwernerusing System; internal class SupportClass { public static int URShift(int number, int bits) { if ( number >= 0) return number >> bits; else return (number >> bits) + (2 << ~bits); } public static int URShift(int number, long bits) { return URShift(number, (int)bits); } public static long URShift(long number, int bits) { if ( number >= 0) return number >> bits; else return (number >> bits) + (2L << ~bits); } public static long URShift(long number, long bits) { return URShift(number, (int)bits); } } antlr-2.7.7/lib/csharp/antlr.runtime/AssemblyInfo.cs0000755000175000017500000000226710522211615022364 0ustar twernertwernerusing System; using System.Reflection; using System.Runtime.CompilerServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. // TODO: Review the values of the assembly attributes [assembly: AssemblyTitle("antlr.runtime")] [assembly: AssemblyDescription("ANTLR Runtime for .NET")] [assembly: AssemblyCompany("www.antlr.org")] [assembly: AssemblyProduct("")] [assembly: AssemblyCopyright("")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] [assembly: AssemblyVersion("2.7.7.01")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Revision // Build Number // // You can specify all the values or you can default the Revision and Build Numbers // by using the '*' as shown below: [assembly: CLSCompliantAttribute(true)] #if STRONGNAME #pragma warning disable 1699 [assembly: AssemblyDelaySign(false)] [assembly: AssemblyKeyFile("org.antlr.snk")] #pragma warning restore 1699 #endif #if APTC [assembly: System.Security.AllowPartiallyTrustedCallers] #endifantlr-2.7.7/lib/csharp/antlr.runtime/antlr.collections/0000755000175000017500000000000010522211615023065 5ustar twernertwernerantlr-2.7.7/lib/csharp/antlr.runtime/antlr.collections/AST.cs0000755000175000017500000000447110522211615024054 0ustar twernertwernerusing System; using IEnumerator = System.Collections.IEnumerator; using IToken = antlr.IToken; namespace antlr.collections { /*ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ // // ANTLR C# Code Generator by Micheal Jordan // Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com // Anthony Oguntimehin // // With many thanks to Eric V. Smith from the ANTLR list. // /// /// Minimal AST node interface used by ANTLR AST generation and tree-walker. /// public interface AST : ICloneable { /// /// Add a (rightmost) child to this node /// /// void addChild(AST c); bool Equals(AST t); bool EqualsList(AST t); bool EqualsListPartial(AST t); bool EqualsTree(AST t); bool EqualsTreePartial(AST t); IEnumerator findAll(AST tree); IEnumerator findAllPartial(AST subtree); /// /// Get the first child of this node; null if no children /// AST getFirstChild(); /// /// Get the next sibling in line after this one /// AST getNextSibling(); /// /// Get the token text for this node /// /// string getText(); /// /// Get the token type for this node /// int Type { get; set;} /// /// Get number of children of this node; if leaf, returns 0 /// /// Number of children int getNumberOfChildren(); void initialize(int t, string txt); void initialize(AST t); void initialize(IToken t); /// /// Set the first child of a node. /// /// void setFirstChild(AST c); /// /// Set the next sibling after this one. /// /// void setNextSibling(AST n); /// /// Set the token text for this node /// /// void setText(string text); /// /// Set the token type for this node /// /// void setType(int ttype); string ToString(); string ToStringList(); string ToStringTree(); } }antlr-2.7.7/lib/cpp/0000755000175000017500000000000010522211615014130 5ustar twernertwernerantlr-2.7.7/lib/cpp/doxygen.cfg0000644000175000017500000000715310522211615016274 0ustar twernertwerner# # Doxygen config file for ANTLR's C++ support libraries. # # Thanks to Bill Zheng for parts of this. # PROJECT_NAME = "ANTLR Support Libraries 2.7.1+" # Input files: INPUT = antlr src RECURSIVE = YES FILE_PATTERNS = *.cpp *.h *.hpp JAVADOC_AUTOBRIEF = NO #--------------------------------------------------------------------------- # Configuration options related to the preprocessor #--------------------------------------------------------------------------- # If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will # evaluate all C-preprocessor directives found in the sources and include # files. ENABLE_PREPROCESSING = YES # If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro # names in the source code. If set to NO (the default) only conditional # compilation will be performed. MACRO_EXPANSION = YES # If the SEARCH_INCLUDES tag is set to YES (the default) the includes files # in the INCLUDE_PATH (see below) will be search if a #include is found. SEARCH_INCLUDES = YES # The INCLUDE_PATH tag can be used to specify one or more directories that # contain include files that are not input files but should be processed by # the preprocessor. INCLUDE_PATH = # The PREDEFINED tag can be used to specify one or more macro names that # are defined before the preprocessor is started (similar to the -D option of # gcc). The argument of the tag is a list of macros of the form: name # or name=definition (no spaces). If the definition and the = are # omitted =1 is assumed. PREDEFINED = "ANTLR_USE_NAMESPACE(_x_)=_x_::" \ "ANTLR_USING_NAMESPACE(_x_)=using namespace _x_;" \ "ANTLR_C_USING(_x_)=" \ "ANTLR_API=" # If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES # then the macro expansion is limited to the macros specified with the # PREDEFINED tag. EXPAND_ONLY_PREDEF = YES # Output options OUTPUT_DIRECTORY = gen_doc PAPER_TYPE = a4wide #PAPER_TYPE = a4 TAB_SIZE = 3 CASE_SENSE_NAMES = YES # If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend # the brief description of a member or function before the detailed description. # Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. REPEAT_BRIEF = YES # The INTERNAL_DOCS tag determines if documentation # that is typed after a \internal command is included. If the tag is set # to NO (the default) then the documentation will be excluded. # Set it to YES to include the internal documentation. INTERNAL_DOCS = NO # if the INHERIT_DOCS tag is set to YES (the default) then an undocumented # member inherits the documentation from any documented member that it # reimplements. INHERIT_DOCS = YES # if the INLINE_INFO tag is set to YES (the default) then a tag [inline] # is inserted in the documentation for inline members. INLINE_INFO = YES # Dot and friends... HAVE_DOT = YES CLASS_GRAPH = YES COLLABORATION_GRAPH = YES INCLUDE_GRAPH = YES INCLUDED_BY_GRAPH = YES EXTRACT_ALL = YES EXTRACT_STATIC = YES EXTRACT_PRIVATE = YES # HTML output and friends... GENERATE_HTML = YES # Tree view gives too much trouble with various browsers. GENERATE_TREEVIEW = NO # Latex output and friends... GENERATE_LATEX = NO PDF_HYPERLINKS = YES GENERATE_MAN = NO GENERATE_RTF = NO # Control of convenience stuff GENERATE_TODOLIST = YES # Control over warnings etc. Unset EXTRACT_ALL to get this to work WARN_IF_UNDOCUMENTED = YES WARNINGS = YES QUIET = YES antlr-2.7.7/lib/cpp/Makefile.in0000644000175000017500000000057310522211615016202 0ustar twernertwerner############################################################################### # $Id:$ ############################################################################### ## do not change this value subdir=lib/cpp ## get configured autoconf variables @stdvars@ ## Your own variables shall go here .. # Docs DOXY_TARGET=doxygen.cfg DOXY_GENDIR=gendoc/html @stdmake@ @stddeps@ antlr-2.7.7/lib/cpp/antlr/0000755000175000017500000000000010522211615015250 5ustar twernertwernerantlr-2.7.7/lib/cpp/antlr/TokenStreamSelector.hpp0000644000175000017500000000531710522211615021724 0ustar twernertwerner#ifndef INC_TokenStreamSelector_hpp__ #define INC_TokenStreamSelector_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TokenStreamSelector.hpp#2 $ */ #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** A token stream MUX (multiplexor) knows about n token streams * and can multiplex them onto the same channel for use by token * stream consumer like a parser. This is a way to have multiple * lexers break up the same input stream for a single parser. * Or, you can have multiple instances of the same lexer handle * multiple input streams; this works great for includes. */ class ANTLR_API TokenStreamSelector : public TokenStream { protected: /** The set of inputs to the MUX */ #ifdef OS_NO_ALLOCATOR typedef ANTLR_USE_NAMESPACE(std)less lessp; typedef ANTLR_USE_NAMESPACE(std)map inputStreamNames_coll; #else typedef ANTLR_USE_NAMESPACE(std)map inputStreamNames_coll; #endif inputStreamNames_coll inputStreamNames; /** The currently-selected token stream input */ TokenStream* input; /** Used to track stack of input streams */ #ifdef OS_NO_ALLOCATOR typedef ANTLR_USE_NAMESPACE(std)stack > streamStack_coll; #else typedef ANTLR_USE_NAMESPACE(std)stack streamStack_coll; #endif streamStack_coll streamStack; public: TokenStreamSelector(); ~TokenStreamSelector(); void addInputStream(TokenStream* stream, const ANTLR_USE_NAMESPACE(std)string& key); /// Return the stream from which tokens are being pulled at the moment. TokenStream* getCurrentStream() const; TokenStream* getStream(const ANTLR_USE_NAMESPACE(std)string& sname) const; RefToken nextToken(); TokenStream* pop(); void push(TokenStream* stream); void push(const ANTLR_USE_NAMESPACE(std)string& sname); /** Abort recognition of current Token and try again. * A stream can push a new stream (for include files * for example, and then retry(), which will cause * the current stream to abort back to this.nextToken(). * this.nextToken() then asks for a token from the * current stream, which is the new "substream." */ void retry(); /** Set the stream without pushing old stream */ void select(TokenStream* stream); void select(const ANTLR_USE_NAMESPACE(std)string& sname); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenStreamSelector_hpp__ antlr-2.7.7/lib/cpp/antlr/TreeParser.hpp0000644000175000017500000001035110522211615020035 0ustar twernertwerner#ifndef INC_TreeParser_hpp__ #define INC_TreeParser_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TreeParser.hpp#2 $ */ #include #include #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API TreeParser { public: TreeParser() : astFactory(0) , inputState(new TreeParserInputState()) , traceDepth(0) { } TreeParser(const TreeParserSharedInputState& state) : astFactory(0) , inputState(state) , traceDepth(0) { } virtual ~TreeParser() { } /// Get the AST return value squirreled away in the parser virtual RefAST getAST() = 0; /** Make sure current lookahead symbol matches the given set * Throw an exception upon mismatch, which is caught by either the * error handler or by a syntactic predicate. */ virtual void match(RefAST t, const BitSet& b) { if ( !t || t==ASTNULL || !b.member(t->getType()) ) throw MismatchedTokenException( getTokenNames(), getNumTokens(), t, b, false ); } /** Specify the AST factory to be used during tree building. (Compulsory) * Setting the factory is compulsory (if you intend to modify * the tree in the treeparser). The AST Factory is shared between * parser (who builds the initial AST) and treeparser. * @see Parser::getASTFactory() */ virtual void setASTFactory(ASTFactory* factory) { astFactory = factory; } /// Return pointer to ASTFactory virtual ASTFactory* getASTFactory() const { return astFactory; } /// Get the name for token 'num' virtual const char* getTokenName(int num) const = 0; /// Return the number of tokens defined virtual int getNumTokens() const = 0; /// Return an array of getNumTokens() token names virtual const char* const* getTokenNames() const = 0; /// Parser error-reporting function can be overridden in subclass virtual void reportError(const RecognitionException& ex); /// Parser error-reporting function can be overridden in subclass virtual void reportError(const ANTLR_USE_NAMESPACE(std)string& s); /// Parser warning-reporting function can be overridden in subclass virtual void reportWarning(const ANTLR_USE_NAMESPACE(std)string& s); /// These are used during when traceTreeParser commandline option is passed. virtual void traceIndent(); virtual void traceIn(const char* rname, RefAST t); virtual void traceOut(const char* rname, RefAST t); /** The AST Null object; the parsing cursor is set to this when * it is found to be null. This way, we can test the * token type of a node without having to have tests for 0 * everywhere. */ static RefAST ASTNULL; protected: virtual void match(RefAST t, int ttype) { if (!t || t == ASTNULL || t->getType() != ttype ) throw MismatchedTokenException( getTokenNames(), getNumTokens(), t, ttype, false ); } virtual void matchNot(RefAST t, int ttype) { if ( !t || t == ASTNULL || t->getType() == ttype ) throw MismatchedTokenException( getTokenNames(), getNumTokens(), t, ttype, true ); } /** AST support code; parser and treeparser delegate to this object */ ASTFactory* astFactory; /// The input state of this tree parser. TreeParserSharedInputState inputState; /** Used to keep track of indent depth with -traceTreeParser */ int traceDepth; /** Utility class which allows tracing to work even when exceptions are * thrown. */ class Tracer { private: TreeParser* parser; const char* text; RefAST tree; public: Tracer(TreeParser* p, const char* t, RefAST a) : parser(p), text(t), tree(a) { parser->traceIn(text,tree); } ~Tracer() { parser->traceOut(text,tree); } private: Tracer(const Tracer&); // undefined const Tracer& operator=(const Tracer&); // undefined }; private: // no copying of treeparser instantiations... TreeParser(const TreeParser& other); TreeParser& operator=(const TreeParser& other); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TreeParser_hpp__ antlr-2.7.7/lib/cpp/antlr/ASTPair.hpp0000644000175000017500000000307510522211615017231 0ustar twernertwerner#ifndef INC_ASTPair_hpp__ #define INC_ASTPair_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/ASTPair.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** ASTPair: utility class used for manipulating a pair of ASTs * representing the current AST root and current AST sibling. * This exists to compensate for the lack of pointers or 'var' * arguments in Java. * * OK, so we can do those things in C++, but it seems easier * to stick with the Java way for now. */ class ANTLR_API ASTPair { public: RefAST root; // current root of tree RefAST child; // current child to which siblings are added /** Make sure that child is the last sibling */ void advanceChildToEnd() { if (child) { while (child->getNextSibling()) { child = child->getNextSibling(); } } } // /** Copy an ASTPair. Don't call it clone() because we want type-safety */ // ASTPair copy() { // ASTPair tmp = new ASTPair(); // tmp.root = root; // tmp.child = child; // return tmp; // } ANTLR_USE_NAMESPACE(std)string toString() const { ANTLR_USE_NAMESPACE(std)string r = !root ? ANTLR_USE_NAMESPACE(std)string("null") : root->getText(); ANTLR_USE_NAMESPACE(std)string c = !child ? ANTLR_USE_NAMESPACE(std)string("null") : child->getText(); return "["+r+","+c+"]"; } }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_ASTPair_hpp__ antlr-2.7.7/lib/cpp/antlr/ParserSharedInputState.hpp0000644000175000017500000000407010522211615022366 0ustar twernertwerner#ifndef INC_ParserSharedInputState_hpp__ #define INC_ParserSharedInputState_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/ParserSharedInputState.hpp#2 $ */ #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** This object contains the data associated with an * input stream of tokens. Multiple parsers * share a single ParserSharedInputState to parse * the same stream of tokens. */ class ANTLR_API ParserInputState { public: /** Construct a new ParserInputState * @param in the TokenBuffer to read from. The object is deleted together * with the ParserInputState object. */ ParserInputState( TokenBuffer* in ) : guessing(0) , filename() , input(in) , inputResponsible(true) { } /** Construct a new ParserInputState * @param in the TokenBuffer to read from. */ ParserInputState( TokenBuffer& in ) : guessing(0) , filename("") , input(&in) , inputResponsible(false) { } virtual ~ParserInputState() { if (inputResponsible) delete input; } TokenBuffer& getInput( void ) { return *input; } /// Reset the ParserInputState and the underlying TokenBuffer void reset( void ) { input->reset(); guessing = 0; } public: /** Are we guessing (guessing>0)? */ int guessing; /** What file (if known) caused the problem? * @todo wrap this one.. */ ANTLR_USE_NAMESPACE(std)string filename; private: /** Where to get token objects */ TokenBuffer* input; /// Do we need to free the TokenBuffer or is it owned by another.. bool inputResponsible; // we don't want these: ParserInputState(const ParserInputState&); ParserInputState& operator=(const ParserInputState&); }; /// A reference counted ParserInputState typedef RefCount ParserSharedInputState; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_ParserSharedInputState_hpp__ antlr-2.7.7/lib/cpp/antlr/Makefile.in0000644000175000017500000000711510522211615017321 0ustar twernertwerner############################################################################### # $Id:$ ############################################################################### ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stdvars@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ## do not change this value subdir=lib/cpp/antlr antlr_hpp_FILES = \ @abs_top_srcdir@/lib/cpp/antlr/ANTLRException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/ANTLRUtil.hpp \ @abs_top_srcdir@/lib/cpp/antlr/ASTArray.hpp \ @abs_top_srcdir@/lib/cpp/antlr/ASTFactory.hpp \ @abs_top_srcdir@/lib/cpp/antlr/AST.hpp \ @abs_top_srcdir@/lib/cpp/antlr/ASTNULLType.hpp \ @abs_top_srcdir@/lib/cpp/antlr/ASTPair.hpp \ @abs_top_srcdir@/lib/cpp/antlr/ASTRefCount.hpp \ @abs_top_srcdir@/lib/cpp/antlr/BaseAST.hpp \ @abs_top_srcdir@/lib/cpp/antlr/BitSet.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CharBuffer.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CharInputBuffer.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CharScanner.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CharStreamException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CharStreamIOException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CircularQueue.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CommonAST.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CommonASTWithHiddenTokens.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CommonHiddenStreamToken.hpp \ @abs_top_srcdir@/lib/cpp/antlr/CommonToken.hpp \ @abs_top_srcdir@/lib/cpp/antlr/config.hpp \ @abs_top_srcdir@/lib/cpp/antlr/Countable.hpp \ @abs_top_srcdir@/lib/cpp/antlr/InputBuffer.hpp \ @abs_top_srcdir@/lib/cpp/antlr/IOException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/LexerSharedInputState.hpp \ @abs_top_srcdir@/lib/cpp/antlr/LLkParser.hpp \ @abs_top_srcdir@/lib/cpp/antlr/MismatchedCharException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/MismatchedTokenException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/NoViableAltException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/NoViableAltForCharException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/Parser.hpp \ @abs_top_srcdir@/lib/cpp/antlr/ParserSharedInputState.hpp \ @abs_top_srcdir@/lib/cpp/antlr/RecognitionException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/RefCount.hpp \ @abs_top_srcdir@/lib/cpp/antlr/SemanticException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/String.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenBuffer.hpp \ @abs_top_srcdir@/lib/cpp/antlr/Token.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenRefCount.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenStreamBasicFilter.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenStreamException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenStreamHiddenTokenFilter.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenStream.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenStreamIOException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenStreamRecognitionException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenStreamRetryException.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenStreamRewriteEngine.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenStreamSelector.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TokenWithIndex.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TreeParser.hpp \ @abs_top_srcdir@/lib/cpp/antlr/TreeParserSharedInputState.hpp \ $(eol) all : clean: distclean: clean @RMF@ Makefile test: install: @@ECHO@ "install hpp files .. " @$(MKDIR) -p "$(includedir)/antlr" @for f in $(antlr_hpp_FILES) ; do \ @ECHO@ "install $${f}" ; \ if test -f "$${f}" ; then \ $(INSTALL) -m 444 "$${f}" "$(includedir)/antlr" ; \ fi ;\ done .PHONY: all clean distclean install test ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stddeps@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx antlr-2.7.7/lib/cpp/antlr/ASTNULLType.hpp0000644000175000017500000000343410522211615017751 0ustar twernertwerner#ifndef INC_ASTNULLType_hpp__ #define INC_ASTNULLType_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/ASTNULLType.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** There is only one instance of this class **/ class ANTLR_API ASTNULLType : public AST { public: const char* typeName( void ) const; RefAST clone( void ) const; void addChild(RefAST c); size_t getNumberOfChildren() const; void setFirstChild(RefAST c); void setNextSibling(RefAST n); bool equals(RefAST t) const; bool equalsList(RefAST t) const; bool equalsListPartial(RefAST t) const; bool equalsTree(RefAST t) const; bool equalsTreePartial(RefAST t) const; ANTLR_USE_NAMESPACE(std)vector findAll(RefAST tree); ANTLR_USE_NAMESPACE(std)vector findAllPartial(RefAST subtree); RefAST getFirstChild() const; RefAST getNextSibling() const; ANTLR_USE_NAMESPACE(std)string getText() const; int getType() const; void initialize(int t, const ANTLR_USE_NAMESPACE(std)string& txt); void initialize(RefAST t); void initialize(RefToken t); void initialize(ANTLR_USE_NAMESPACE(std)istream& infile); void setText(const ANTLR_USE_NAMESPACE(std)string& text); void setType(int ttype); ANTLR_USE_NAMESPACE(std)string toString() const; ANTLR_USE_NAMESPACE(std)string toStringList() const; ANTLR_USE_NAMESPACE(std)string toStringTree() const; bool attributesToStream( ANTLR_USE_NAMESPACE(std)ostream &out ) const; void toStream( ANTLR_USE_NAMESPACE(std)ostream &out ) const; }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_ASTNULLType_hpp__ antlr-2.7.7/lib/cpp/antlr/AST.hpp0000644000175000017500000001230210522211615016406 0ustar twernertwerner#ifndef INC_AST_hpp__ #define INC_AST_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/AST.hpp#2 $ */ #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif struct ASTRef; class ANTLR_API AST { public: AST() : ref(0) {} AST(const AST&) : ref(0) {} virtual ~AST() {} /// Return the type name for this AST node. (for XML output) virtual const char* typeName( void ) const = 0; /// Clone this AST node. virtual RefAST clone( void ) const = 0; /// Is node t equal to this in terms of token type and text? virtual bool equals(RefAST t) const = 0; /** Is t an exact structural and equals() match of this tree. The * 'this' reference is considered the start of a sibling list. */ virtual bool equalsList(RefAST t) const = 0; /** Is 't' a subtree of this list? The siblings of the root are NOT ignored. */ virtual bool equalsListPartial(RefAST t) const = 0; /** Is tree rooted at 'this' equal to 't'? The siblings of 'this' are * ignored. */ virtual bool equalsTree(RefAST t) const = 0; /** Is 't' a subtree of the tree rooted at 'this'? The siblings of * 'this' are ignored. */ virtual bool equalsTreePartial(RefAST t) const = 0; /** Walk the tree looking for all exact subtree matches. Return * a vector of RefAST that lets the caller walk the list * of subtree roots found herein. */ virtual ANTLR_USE_NAMESPACE(std)vector findAll(RefAST t) = 0; /** Walk the tree looking for all subtrees. Return * a vector of RefAST that lets the caller walk the list * of subtree roots found herein. */ virtual ANTLR_USE_NAMESPACE(std)vector findAllPartial(RefAST t) = 0; /// Add a node to the end of the child list for this node virtual void addChild(RefAST c) = 0; /// Get the number of children. Returns 0 if the node is a leaf virtual size_t getNumberOfChildren() const = 0; /// Get the first child of this node; null if no children virtual RefAST getFirstChild() const = 0; /// Get the next sibling in line after this one virtual RefAST getNextSibling() const = 0; /// Get the token text for this node virtual ANTLR_USE_NAMESPACE(std)string getText() const = 0; /// Get the token type for this node virtual int getType() const = 0; /** Various initialization routines. Used by several factories to initialize * an AST element. */ virtual void initialize(int t, const ANTLR_USE_NAMESPACE(std)string& txt) = 0; virtual void initialize(RefAST t) = 0; virtual void initialize(RefToken t) = 0; #ifdef ANTLR_SUPPORT_XML /** initialize this node from the contents of a stream. * @param in the stream to read the AST attributes from. */ virtual void initialize( ANTLR_USE_NAMESPACE(std)istream& in ) = 0; #endif /// Set the first child of a node. virtual void setFirstChild(RefAST c) = 0; /// Set the next sibling after this one. virtual void setNextSibling(RefAST n) = 0; /// Set the token text for this node virtual void setText(const ANTLR_USE_NAMESPACE(std)string& txt) = 0; /// Set the token type for this node virtual void setType(int type) = 0; /// Return this AST node as a string virtual ANTLR_USE_NAMESPACE(std)string toString() const = 0; /// Print out a child-sibling tree in LISP notation virtual ANTLR_USE_NAMESPACE(std)string toStringList() const = 0; virtual ANTLR_USE_NAMESPACE(std)string toStringTree() const = 0; #ifdef ANTLR_SUPPORT_XML /** get attributes of this node to 'out'. Override to customize XML * output. * @param out the stream to write the AST attributes to. * @returns if a explicit closetag should be written */ virtual bool attributesToStream( ANTLR_USE_NAMESPACE(std)ostream& out ) const = 0; /** Print a symbol over ostream. Overload this one to customize the XML * output for AST derived AST-types * @param output stream */ virtual void toStream( ANTLR_USE_NAMESPACE(std)ostream &out ) const = 0; /** Dump AST contents in XML format to output stream. * Works in conjunction with to_stream method. Overload that one is * derived classes to customize behaviour. * @param output stream to write to string to put the stuff in. * @param ast RefAST object to write. */ friend ANTLR_USE_NAMESPACE(std)ostream& operator<<( ANTLR_USE_NAMESPACE(std)ostream& output, const RefAST& ast ); #endif private: friend struct ASTRef; ASTRef* ref; AST(RefAST other); AST& operator=(const AST& other); AST& operator=(RefAST other); }; #ifdef ANTLR_SUPPORT_XML inline ANTLR_USE_NAMESPACE(std)ostream& operator<<( ANTLR_USE_NAMESPACE(std)ostream& output, const RefAST& ast ) { ast->toStream(output); return output; } #endif extern ANTLR_API RefAST nullAST; extern ANTLR_API AST* const nullASTptr; #ifdef NEEDS_OPERATOR_LESS_THAN // RK: apparently needed by MSVC and a SUN CC, up to and including // 2.7.2 this was undefined ? inline bool operator<( RefAST l, RefAST r ) { return nullAST == l ? ( nullAST == r ? false : true ) : l->getType() < r->getType(); } #endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_AST_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenStreamException.hpp0000644000175000017500000000161710522211615022101 0ustar twernertwerner#ifndef INC_TokenStreamException_hpp__ #define INC_TokenStreamException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TokenStreamException.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** Baseclass for exceptions thrown by classes implementing the TokenStream * interface. * @see TokenStream */ class ANTLR_API TokenStreamException : public ANTLRException { public: TokenStreamException() : ANTLRException() { } TokenStreamException(const ANTLR_USE_NAMESPACE(std)string& s) : ANTLRException(s) { } virtual ~TokenStreamException() throw() { } }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenStreamException_hpp__ antlr-2.7.7/lib/cpp/antlr/NoViableAltForCharException.hpp0000644000175000017500000000207210522211615023246 0ustar twernertwerner#ifndef INC_NoViableAltForCharException_hpp__ # define INC_NoViableAltForCharException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/NoViableAltForCharException.hpp#2 $ */ # include # include # include # ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { # endif class ANTLR_API NoViableAltForCharException : public RecognitionException { public: NoViableAltForCharException(int c, CharScanner* scanner); NoViableAltForCharException(int c, const ANTLR_USE_NAMESPACE(std)string& fileName_, int line_, int column_); virtual ~NoViableAltForCharException() throw() { } /// Returns a clean error message (no line number/column information) ANTLR_USE_NAMESPACE(std)string getMessage() const; protected: int foundChar; }; # ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } # endif #endif //INC_NoViableAltForCharException_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenWithIndex.hpp0000644000175000017500000000314510522211615020670 0ustar twernertwerner#ifndef INC_TokenWithIndex_hpp__ #define INC_TokenWithIndex_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API TokenWithIndex : public ANTLR_USE_NAMESPACE(antlr)CommonToken { public: // static size_t count; TokenWithIndex() : CommonToken(), index(0) { // std::cout << __PRETTY_FUNCTION__ << std::endl; // count++; } TokenWithIndex(int t, const ANTLR_USE_NAMESPACE(std)string& txt) : CommonToken(t,txt) , index(0) { // std::cout << __PRETTY_FUNCTION__ << std::endl; // count++; } TokenWithIndex(const ANTLR_USE_NAMESPACE(std)string& s) : CommonToken(s) , index(0) { // std::cout << __PRETTY_FUNCTION__ << std::endl; // count++; } ~TokenWithIndex() { // count--; } void setIndex( size_t idx ) { index = idx; } size_t getIndex( void ) const { return index; } ANTLR_USE_NAMESPACE(std)string toString() const { return ANTLR_USE_NAMESPACE(std)string("[")+ index+ ":\""+ getText()+"\",<"+ getType()+">,line="+ getLine()+",column="+ getColumn()+"]"; } static RefToken factory() { return RefToken(new TokenWithIndex()); } protected: size_t index; private: TokenWithIndex(const TokenWithIndex&); const TokenWithIndex& operator=(const TokenWithIndex&); }; typedef TokenRefCount RefTokenWithIndex; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CommonToken_hpp__ antlr-2.7.7/lib/cpp/antlr/IOException.hpp0000644000175000017500000000166210522211615020154 0ustar twernertwerner#ifndef INC_IOException_hpp__ #define INC_IOException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** Generic IOException used inside support code. (thrown by XML I/O routs) * basically this is something I'm using since a lot of compilers don't * support ios_base::failure. */ class ANTLR_API IOException : public ANTLRException { public: ANTLR_USE_NAMESPACE(std)exception io; IOException( ANTLR_USE_NAMESPACE(std)exception& e ) : ANTLRException(e.what()) { } IOException( const ANTLR_USE_NAMESPACE(std)string& mesg ) : ANTLRException(mesg) { } virtual ~IOException() throw() { } }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_IOException_hpp__ antlr-2.7.7/lib/cpp/antlr/CommonASTWithHiddenTokens.hpp0000644000175000017500000000301510522211615022714 0ustar twernertwerner#ifndef INC_CommonASTWithHiddenTokens_hpp__ #define INC_CommonASTWithHiddenTokens_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/CommonASTWithHiddenTokens.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** A CommonAST whose initialization copies hidden token * information from the Token used to create a node. */ class ANTLR_API CommonASTWithHiddenTokens : public CommonAST { public: CommonASTWithHiddenTokens(); virtual ~CommonASTWithHiddenTokens(); virtual const char* typeName( void ) const { return CommonASTWithHiddenTokens::TYPE_NAME; } /// Clone this AST node. virtual RefAST clone( void ) const; // Borland C++ builder seems to need the decl's of the first two... virtual void initialize(int t,const ANTLR_USE_NAMESPACE(std)string& txt); virtual void initialize(RefAST t); virtual void initialize(RefToken t); virtual RefToken getHiddenAfter() const { return hiddenAfter; } virtual RefToken getHiddenBefore() const { return hiddenBefore; } static RefAST factory(); static const char* const TYPE_NAME; protected: RefToken hiddenBefore,hiddenAfter; // references to hidden tokens }; typedef ASTRefCount RefCommonASTWithHiddenTokens; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CommonASTWithHiddenTokens_hpp__ antlr-2.7.7/lib/cpp/antlr/BitSet.hpp0000644000175000017500000000347610522211615017165 0ustar twernertwerner#ifndef INC_BitSet_hpp__ #define INC_BitSet_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/BitSet.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** A BitSet to replace java.util.BitSet. * Primary differences are that most set operators return new sets * as opposed to oring and anding "in place". Further, a number of * operations were added. I cannot contain a BitSet because there * is no way to access the internal bits (which I need for speed) * and, because it is final, I cannot subclass to add functionality. * Consider defining set degree. Without access to the bits, I must * call a method n times to test the ith bit...ack! * * Also seems like or() from util is wrong when size of incoming set is bigger * than this.length. * * This is a C++ version of the Java class described above, with only * a handful of the methods implemented, because we don't need the * others at runtime. It's really just a wrapper around vector, * which should probably be changed to a wrapper around bitset, once * bitset is more widely available. * * @author Terence Parr, MageLang Institute * @author
Pete Wells */ class ANTLR_API BitSet { private: ANTLR_USE_NAMESPACE(std)vector storage; public: BitSet( unsigned int nbits=64 ); BitSet( const unsigned long* bits_, unsigned int nlongs); ~BitSet(); void add( unsigned int el ); bool member( unsigned int el ) const; ANTLR_USE_NAMESPACE(std)vector toArray() const; }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_BitSet_hpp__ antlr-2.7.7/lib/cpp/antlr/BaseAST.hpp0000644000175000017500000001101710522211615017203 0ustar twernertwerner#ifndef INC_BaseAST_hpp__ #define INC_BaseAST_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/BaseAST.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API BaseAST; typedef ASTRefCount RefBaseAST; class ANTLR_API BaseAST : public AST { public: BaseAST() : AST() { } BaseAST(const BaseAST& other) : AST(other) { } virtual ~BaseAST() { } /// Return the class name virtual const char* typeName( void ) const = 0; /// Clone this AST node. virtual RefAST clone( void ) const = 0; /// Is node t equal to this in terms of token type and text? virtual bool equals(RefAST t) const; /** Is t an exact structural and equals() match of this tree. The * 'this' reference is considered the start of a sibling list. */ virtual bool equalsList(RefAST t) const; /** Is 't' a subtree of this list? The siblings of the root are NOT ignored. */ virtual bool equalsListPartial(RefAST t) const; /** Is tree rooted at 'this' equal to 't'? The siblings of 'this' are * ignored. */ virtual bool equalsTree(RefAST t) const; /** Is 't' a subtree of the tree rooted at 'this'? The siblings of * 'this' are ignored. */ virtual bool equalsTreePartial(RefAST t) const; /** Walk the tree looking for all exact subtree matches. Return * an ASTEnumerator that lets the caller walk the list * of subtree roots found herein. */ virtual ANTLR_USE_NAMESPACE(std)vector findAll(RefAST t); /** Walk the tree looking for all subtrees. Return * an ASTEnumerator that lets the caller walk the list * of subtree roots found herein. */ virtual ANTLR_USE_NAMESPACE(std)vector findAllPartial(RefAST t); /// Add a node to the end of the child list for this node virtual void addChild(RefAST c) { if( !c ) return; RefBaseAST tmp = down; if (tmp) { while (tmp->right) tmp = tmp->right; tmp->right = c; } else down = c; } /** Get the number of child nodes of this node (shallow e.g. not of the * whole tree it spans). */ virtual size_t getNumberOfChildren() const; /// Get the first child of this node; null if no children virtual RefAST getFirstChild() const { return RefAST(down); } /// Get the next sibling in line after this one virtual RefAST getNextSibling() const { return RefAST(right); } /// Get the token text for this node virtual ANTLR_USE_NAMESPACE(std)string getText() const { return ""; } /// Get the token type for this node virtual int getType() const { return 0; } /// Remove all children virtual void removeChildren() { down = static_cast(static_cast(nullAST)); } /// Set the first child of a node. virtual void setFirstChild(RefAST c) { down = static_cast(static_cast(c)); } /// Set the next sibling after this one. virtual void setNextSibling(RefAST n) { right = static_cast(static_cast(n)); } /// Set the token text for this node virtual void setText(const ANTLR_USE_NAMESPACE(std)string& txt) { } /// Set the token type for this node virtual void setType(int type) { } #ifdef ANTLR_SUPPORT_XML /** print attributes of this node to 'out'. Override to customize XML * output. * @param out the stream to write the AST attributes to. */ virtual bool attributesToStream( ANTLR_USE_NAMESPACE(std)ostream& out ) const; /** Write this subtree to a stream. Overload this one to customize the XML * output for AST derived AST-types * @param output stream */ virtual void toStream( ANTLR_USE_NAMESPACE(std)ostream &out ) const; #endif /// Return string representation for the AST virtual ANTLR_USE_NAMESPACE(std)string toString() const { return getText(); } /// Print out a child sibling tree in LISP notation virtual ANTLR_USE_NAMESPACE(std)string toStringList() const; virtual ANTLR_USE_NAMESPACE(std)string toStringTree() const; protected: RefBaseAST down; RefBaseAST right; private: void doWorkForFindAll(ANTLR_USE_NAMESPACE(std)vector& v, RefAST target, bool partialMatch); }; /** Is node t equal to this in terms of token type and text? */ inline bool BaseAST::equals(RefAST t) const { if (!t) return false; return ((getType() == t->getType()) && (getText() == t->getText())); } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_BaseAST_hpp__ antlr-2.7.7/lib/cpp/antlr/CommonAST.hpp0000644000175000017500000000355610522211615017572 0ustar twernertwerner#ifndef INC_CommonAST_hpp__ #define INC_CommonAST_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/CommonAST.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API CommonAST : public BaseAST { public: CommonAST() : BaseAST() , ttype( Token::INVALID_TYPE ) , text() { } CommonAST( RefToken t ) : BaseAST() , ttype( t->getType() ) , text( t->getText() ) { } CommonAST( const CommonAST& other ) : BaseAST(other) , ttype(other.ttype) , text(other.text) { } virtual ~CommonAST() { } virtual const char* typeName( void ) const { return CommonAST::TYPE_NAME; } /// Clone this AST node. virtual RefAST clone( void ) const { CommonAST *ast = new CommonAST( *this ); return RefAST(ast); } virtual ANTLR_USE_NAMESPACE(std)string getText() const { return text; } virtual int getType() const { return ttype; } virtual void initialize( int t, const ANTLR_USE_NAMESPACE(std)string& txt ) { setType(t); setText(txt); } virtual void initialize( RefAST t ) { setType(t->getType()); setText(t->getText()); } virtual void initialize( RefToken t ) { setType(t->getType()); setText(t->getText()); } #ifdef ANTLR_SUPPORT_XML virtual void initialize( ANTLR_USE_NAMESPACE(std)istream& in ); #endif virtual void setText( const ANTLR_USE_NAMESPACE(std)string& txt ) { text = txt; } virtual void setType( int type ) { ttype = type; } static RefAST factory(); static const char* const TYPE_NAME; protected: int ttype; ANTLR_USE_NAMESPACE(std)string text; }; typedef ASTRefCount RefCommonAST; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CommonAST_hpp__ antlr-2.7.7/lib/cpp/antlr/RecognitionException.hpp0000644000175000017500000000337010522211615022123 0ustar twernertwerner#ifndef INC_RecognitionException_hpp__ # define INC_RecognitionException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/RecognitionException.hpp#2 $ */ # include # include # ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { # endif class ANTLR_API RecognitionException : public ANTLRException { public: RecognitionException(); RecognitionException(const ANTLR_USE_NAMESPACE(std)string& s); RecognitionException(const ANTLR_USE_NAMESPACE(std)string& s, const ANTLR_USE_NAMESPACE(std)string& fileName, int line, int column ); virtual ~RecognitionException() throw() { } /// Return file where mishap occurred. virtual ANTLR_USE_NAMESPACE(std)string getFilename() const throw() { return fileName; } /** * @return the line number that this exception happened on. */ virtual int getLine() const throw() { return line; } /** * @return the column number that this exception happened on. */ virtual int getColumn() const throw() { return column; } /// Return complete error message with line/column number info (if present) virtual ANTLR_USE_NAMESPACE(std)string toString() const; /// See what file/line/column info is present and return it as a string virtual ANTLR_USE_NAMESPACE(std)string getFileLineColumnString() const; protected: ANTLR_USE_NAMESPACE(std)string fileName; // not used by treeparsers int line; // not used by treeparsers int column; // not used by treeparsers }; # ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } # endif #endif //INC_RecognitionException_hpp__ antlr-2.7.7/lib/cpp/antlr/CharBuffer.hpp0000644000175000017500000000267210522211615017777 0ustar twernertwerner#ifndef INC_CharBuffer_hpp__ #define INC_CharBuffer_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/CharBuffer.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /**A Stream of characters fed to the lexer from a InputStream that can * be rewound via mark()/rewind() methods. *

* A dynamic array is used to buffer up all the input characters. Normally, * "k" characters are stored in the buffer. More characters may be stored * during guess mode (testing syntactic predicate), or when LT(i>k) is * referenced. * Consumption of characters is deferred. In other words, reading the next * character is not done by consume(), but deferred until needed by LA or LT. *

* * @see antlr.CharQueue */ class ANTLR_API CharBuffer : public InputBuffer { public: /// Create a character buffer CharBuffer( ANTLR_USE_NAMESPACE(std)istream& input ); /// Get the next character from the stream int getChar(); protected: // character source ANTLR_USE_NAMESPACE(std)istream& input; private: // NOTE: Unimplemented CharBuffer(const CharBuffer& other); CharBuffer& operator=(const CharBuffer& other); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CharBuffer_hpp__ antlr-2.7.7/lib/cpp/antlr/MismatchedCharException.hpp0000644000175000017500000000370010522211615022514 0ustar twernertwerner#ifndef INC_MismatchedCharException_hpp__ #define INC_MismatchedCharException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/MismatchedCharException.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class CharScanner; class ANTLR_API MismatchedCharException : public RecognitionException { public: // Types of chars #ifndef NO_STATIC_CONSTS static const int CHAR = 1; static const int NOT_CHAR = 2; static const int RANGE = 3; static const int NOT_RANGE = 4; static const int SET = 5; static const int NOT_SET = 6; #else enum { CHAR = 1, NOT_CHAR = 2, RANGE = 3, NOT_RANGE = 4, SET = 5, NOT_SET = 6 }; #endif public: // One of the above int mismatchType; // what was found on the input stream int foundChar; // For CHAR/NOT_CHAR and RANGE/NOT_RANGE int expecting; // For RANGE/NOT_RANGE (expecting is lower bound of range) int upper; // For SET/NOT_SET BitSet set; protected: // who knows...they may want to ask scanner questions CharScanner* scanner; public: MismatchedCharException(); // Expected range / not range MismatchedCharException( int c, int lower, int upper_, bool matchNot, CharScanner* scanner_ ); // Expected token / not token MismatchedCharException( int c, int expecting_, bool matchNot, CharScanner* scanner_ ); // Expected BitSet / not BitSet MismatchedCharException( int c, BitSet set_, bool matchNot, CharScanner* scanner_ ); ~MismatchedCharException() throw() {} /** * Returns a clean error message (no line number/column information) */ ANTLR_USE_NAMESPACE(std)string getMessage() const; }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_MismatchedCharException_hpp__ antlr-2.7.7/lib/cpp/antlr/CharStreamIOException.hpp0000644000175000017500000000144510522211615022125 0ustar twernertwerner#ifndef INC_CharStreamIOException_hpp__ #define INC_CharStreamIOException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/CharStreamIOException.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API CharStreamIOException : public CharStreamException { public: ANTLR_USE_NAMESPACE(std)exception io; CharStreamIOException(ANTLR_USE_NAMESPACE(std)exception& e) : CharStreamException(e.what()), io(e) {} ~CharStreamIOException() throw() {} }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CharStreamIOException_hpp__ antlr-2.7.7/lib/cpp/antlr/ASTRefCount.hpp0000644000175000017500000000331410522211615020057 0ustar twernertwerner#ifndef INC_ASTRefCount_hpp__ # define INC_ASTRefCount_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/ASTRefCount.hpp#2 $ */ # include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class AST; struct ANTLR_API ASTRef { AST* const ptr; unsigned int count; ASTRef(AST* p); ~ASTRef(); ASTRef* increment() { ++count; return this; } bool decrement() { return (--count==0); } static ASTRef* getRef(const AST* p); private: ASTRef( const ASTRef& ); ASTRef& operator=( const ASTRef& ); }; template class ANTLR_API ASTRefCount { private: ASTRef* ref; public: ASTRefCount(const AST* p=0) : ref(p ? ASTRef::getRef(p) : 0) { } ASTRefCount(const ASTRefCount& other) : ref(other.ref ? other.ref->increment() : 0) { } ~ASTRefCount() { if (ref && ref->decrement()) delete ref; } ASTRefCount& operator=(AST* other) { ASTRef* tmp = ASTRef::getRef(other); if (ref && ref->decrement()) delete ref; ref=tmp; return *this; } ASTRefCount& operator=(const ASTRefCount& other) { if( other.ref != ref ) { ASTRef* tmp = other.ref ? other.ref->increment() : 0; if (ref && ref->decrement()) delete ref; ref=tmp; } return *this; } operator T* () const { return ref ? static_cast(ref->ptr) : 0; } T* operator->() const { return ref ? static_cast(ref->ptr) : 0; } T* get() const { return ref ? static_cast(ref->ptr) : 0; } }; typedef ASTRefCount RefAST; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_ASTRefCount_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenStreamBasicFilter.hpp0000644000175000017500000000205110522211615022323 0ustar twernertwerner#ifndef INC_TokenStreamBasicFilter_hpp__ #define INC_TokenStreamBasicFilter_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TokenStreamBasicFilter.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** This object is a TokenStream that passes through all * tokens except for those that you tell it to discard. * There is no buffering of the tokens. */ class ANTLR_API TokenStreamBasicFilter : public TokenStream { /** The set of token types to discard */ protected: BitSet discardMask; /** The input stream */ protected: TokenStream* input; public: TokenStreamBasicFilter(TokenStream& input_); void discard(int ttype); void discard(const BitSet& mask); RefToken nextToken(); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenStreamBasicFilter_hpp__ antlr-2.7.7/lib/cpp/antlr/LexerSharedInputState.hpp0000644000175000017500000000662710522211615022223 0ustar twernertwerner#ifndef INC_LexerSharedInputState_hpp__ #define INC_LexerSharedInputState_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/LexerSharedInputState.hpp#2 $ */ #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** This object contains the data associated with an * input stream of characters. Multiple lexers * share a single LexerSharedInputState to lex * the same input stream. */ class ANTLR_API LexerInputState { public: /** Construct a new LexerInputState * @param inbuf the InputBuffer to read from. The object is deleted together * with the LexerInputState object. */ LexerInputState(InputBuffer* inbuf) : column(1) , line(1) , tokenStartColumn(1) , tokenStartLine(1) , guessing(0) , filename("") , input(inbuf) , inputResponsible(true) { } /** Construct a new LexerInputState * @param inbuf the InputBuffer to read from. */ LexerInputState(InputBuffer& inbuf) : column(1) , line(1) , tokenStartColumn(1) , tokenStartLine(1) , guessing(0) , filename("") , input(&inbuf) , inputResponsible(false) { } /** Construct a new LexerInputState * @param in an istream to read from. * @see antlr.CharBuffer */ LexerInputState(ANTLR_USE_NAMESPACE(std)istream& in) : column(1) , line(1) , tokenStartColumn(1) , tokenStartLine(1) , guessing(0) , filename("") , input(new CharBuffer(in)) , inputResponsible(true) { } /** Reset the LexerInputState with a specified stream and filename. * This method is a hack, dunno what I was thinking when I added it. * This should actually be done in a subclass. * @deprecated */ virtual void initialize( ANTLR_USE_NAMESPACE(std)istream& in, const char* file = "" ) { column = 1; line = 1; tokenStartColumn = 1; tokenStartLine = 1; guessing = 0; filename = file; if( input && inputResponsible ) delete input; input = new CharBuffer(in); inputResponsible = true; } /** Reset the LexerInputState to initial state. * The underlying InputBuffer is also reset. */ virtual void reset( void ) { column = 1; line = 1; tokenStartColumn = 1; tokenStartLine = 1; guessing = 0; input->reset(); } /** Set the file position of the SharedLexerInputState. * @param line_ line number to be set * @param column_ column number to be set */ void setPosition( int line_, int column_ ) { line = line_; column = column_; } virtual ~LexerInputState() { if (inputResponsible) delete input; } int column; int line; int tokenStartColumn; int tokenStartLine; int guessing; /** What file (if known) caused the problem? */ ANTLR_USE_NAMESPACE(std)string filename; InputBuffer& getInput(); private: /// Input buffer we use InputBuffer* input; /// Who is responsible for cleaning up the InputBuffer? bool inputResponsible; // we don't want these: LexerInputState(const LexerInputState&); LexerInputState& operator=(const LexerInputState&); }; inline InputBuffer& LexerInputState::getInput() { return *input; } /// A reference counted LexerInputState object typedef RefCount LexerSharedInputState; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_LexerSharedInputState_hpp__ antlr-2.7.7/lib/cpp/antlr/CharScanner.hpp0000644000175000017500000003265210522211615020160 0ustar twernertwerner#ifndef INC_CharScanner_hpp__ #define INC_CharScanner_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/CharScanner.hpp#2 $ */ #include #include #ifdef HAS_NOT_CCTYPE_H #include #else #include #endif #if ( _MSC_VER == 1200 ) // VC6 seems to need this // note that this is not a standard C++ include file. # include #endif #include #include #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API CharScanner; ANTLR_C_USING(tolower) #ifdef ANTLR_REALLY_NO_STRCASECMP // Apparently, neither strcasecmp nor stricmp is standard, and Codewarrior // on the mac has neither... inline int strcasecmp(const char *s1, const char *s2) { while (true) { char c1 = tolower(*s1++), c2 = tolower(*s2++); if (c1 < c2) return -1; if (c1 > c2) return 1; if (c1 == 0) return 0; } } #else #ifdef NO_STRCASECMP ANTLR_C_USING(stricmp) #else ANTLR_C_USING(strcasecmp) #endif #endif /** Functor for the literals map */ class ANTLR_API CharScannerLiteralsLess : public ANTLR_USE_NAMESPACE(std)binary_function { private: const CharScanner* scanner; public: #ifdef NO_TEMPLATE_PARTS CharScannerLiteralsLess() {} // not really used, definition to appease MSVC #endif CharScannerLiteralsLess(const CharScanner* theScanner) : scanner(theScanner) { } bool operator() (const ANTLR_USE_NAMESPACE(std)string& x,const ANTLR_USE_NAMESPACE(std)string& y) const; // defaults are good enough.. // CharScannerLiteralsLess(const CharScannerLiteralsLess&); // CharScannerLiteralsLess& operator=(const CharScannerLiteralsLess&); }; /** Superclass of generated lexers */ class ANTLR_API CharScanner : public TokenStream { protected: typedef RefToken (*factory_type)(); public: CharScanner(InputBuffer& cb, bool case_sensitive ); CharScanner(InputBuffer* cb, bool case_sensitive ); CharScanner(const LexerSharedInputState& state, bool case_sensitive ); virtual ~CharScanner() { } virtual int LA(unsigned int i); virtual void append(char c) { if (saveConsumedInput) { size_t l = text.length(); if ((l%256) == 0) text.reserve(l+256); text.replace(l,0,&c,1); } } virtual void append(const ANTLR_USE_NAMESPACE(std)string& s) { if( saveConsumedInput ) text += s; } virtual void commit() { inputState->getInput().commit(); } /** called by the generated lexer to do error recovery, override to * customize the behaviour. */ virtual void recover(const RecognitionException& ex, const BitSet& tokenSet) { consume(); consumeUntil(tokenSet); } virtual void consume() { if (inputState->guessing == 0) { int c = LA(1); if (caseSensitive) { append(c); } else { // use input.LA(), not LA(), to get original case // CharScanner.LA() would toLower it. append(inputState->getInput().LA(1)); } // RK: in a sense I don't like this automatic handling. if (c == '\t') tab(); else inputState->column++; } inputState->getInput().consume(); } /** Consume chars until one matches the given char */ virtual void consumeUntil(int c) { for(;;) { int la_1 = LA(1); if( la_1 == EOF_CHAR || la_1 == c ) break; consume(); } } /** Consume chars until one matches the given set */ virtual void consumeUntil(const BitSet& set) { for(;;) { int la_1 = LA(1); if( la_1 == EOF_CHAR || set.member(la_1) ) break; consume(); } } /// Mark the current position and return a id for it virtual unsigned int mark() { return inputState->getInput().mark(); } /// Rewind the scanner to a previously marked position virtual void rewind(unsigned int pos) { inputState->getInput().rewind(pos); } /// See if input contains character 'c' throw MismatchedCharException if not virtual void match(int c) { int la_1 = LA(1); if ( la_1 != c ) throw MismatchedCharException(la_1, c, false, this); consume(); } /** See if input contains element from bitset b * throw MismatchedCharException if not */ virtual void match(const BitSet& b) { int la_1 = LA(1); if ( !b.member(la_1) ) throw MismatchedCharException( la_1, b, false, this ); consume(); } /** See if input contains string 's' throw MismatchedCharException if not * @note the string cannot match EOF */ virtual void match( const char* s ) { while( *s != '\0' ) { // the & 0xFF is here to prevent sign extension lateron int la_1 = LA(1), c = (*s++ & 0xFF); if ( la_1 != c ) throw MismatchedCharException(la_1, c, false, this); consume(); } } /** See if input contains string 's' throw MismatchedCharException if not * @note the string cannot match EOF */ virtual void match(const ANTLR_USE_NAMESPACE(std)string& s) { size_t len = s.length(); for (size_t i = 0; i < len; i++) { // the & 0xFF is here to prevent sign extension lateron int la_1 = LA(1), c = (s[i] & 0xFF); if ( la_1 != c ) throw MismatchedCharException(la_1, c, false, this); consume(); } } /** See if input does not contain character 'c' * throw MismatchedCharException if not */ virtual void matchNot(int c) { int la_1 = LA(1); if ( la_1 == c ) throw MismatchedCharException(la_1, c, true, this); consume(); } /** See if input contains character in range c1-c2 * throw MismatchedCharException if not */ virtual void matchRange(int c1, int c2) { int la_1 = LA(1); if ( la_1 < c1 || la_1 > c2 ) throw MismatchedCharException(la_1, c1, c2, false, this); consume(); } virtual bool getCaseSensitive() const { return caseSensitive; } virtual void setCaseSensitive(bool t) { caseSensitive = t; } virtual bool getCaseSensitiveLiterals() const=0; /// Get the line the scanner currently is in (starts at 1) virtual int getLine() const { return inputState->line; } /// set the line number virtual void setLine(int l) { inputState->line = l; } /// Get the column the scanner currently is in (starts at 1) virtual int getColumn() const { return inputState->column; } /// set the column number virtual void setColumn(int c) { inputState->column = c; } /// get the filename for the file currently used virtual const ANTLR_USE_NAMESPACE(std)string& getFilename() const { return inputState->filename; } /// Set the filename the scanner is using (used in error messages) virtual void setFilename(const ANTLR_USE_NAMESPACE(std)string& f) { inputState->filename = f; } virtual bool getCommitToPath() const { return commitToPath; } virtual void setCommitToPath(bool commit) { commitToPath = commit; } /** return a copy of the current text buffer */ virtual const ANTLR_USE_NAMESPACE(std)string& getText() const { return text; } virtual void setText(const ANTLR_USE_NAMESPACE(std)string& s) { text = s; } virtual void resetText() { text = ""; inputState->tokenStartColumn = inputState->column; inputState->tokenStartLine = inputState->line; } virtual RefToken getTokenObject() const { return _returnToken; } /** Used to keep track of line breaks, needs to be called from * within generated lexers when a \n \r is encountered. */ virtual void newline() { ++inputState->line; inputState->column = 1; } /** Advance the current column number by an appropriate amount according * to the tabsize. This method needs to be explicitly called from the * lexer rules encountering tabs. */ virtual void tab() { int c = getColumn(); int nc = ( ((c-1)/tabsize) + 1) * tabsize + 1; // calculate tab stop setColumn( nc ); } /// set the tabsize. Returns the old tabsize int setTabsize( int size ) { int oldsize = tabsize; tabsize = size; return oldsize; } /// Return the tabsize used by the scanner int getTabSize() const { return tabsize; } /** Report exception errors caught in nextToken() */ virtual void reportError(const RecognitionException& e); /** Parser error-reporting function can be overridden in subclass */ virtual void reportError(const ANTLR_USE_NAMESPACE(std)string& s); /** Parser warning-reporting function can be overridden in subclass */ virtual void reportWarning(const ANTLR_USE_NAMESPACE(std)string& s); virtual InputBuffer& getInputBuffer() { return inputState->getInput(); } virtual LexerSharedInputState getInputState() { return inputState; } /** set the input state for the lexer. * @note state is a reference counted object, hence no reference */ virtual void setInputState(LexerSharedInputState state) { inputState = state; } /// Set the factory for created tokens virtual void setTokenObjectFactory(factory_type factory) { tokenFactory = factory; } /** Test the token text against the literals table * Override this method to perform a different literals test */ virtual int testLiteralsTable(int ttype) const { ANTLR_USE_NAMESPACE(std)map::const_iterator i = literals.find(text); if (i != literals.end()) ttype = (*i).second; return ttype; } /** Test the text passed in against the literals table * Override this method to perform a different literals test * This is used primarily when you want to test a portion of * a token */ virtual int testLiteralsTable(const ANTLR_USE_NAMESPACE(std)string& txt,int ttype) const { ANTLR_USE_NAMESPACE(std)map::const_iterator i = literals.find(txt); if (i != literals.end()) ttype = (*i).second; return ttype; } /// Override this method to get more specific case handling virtual int toLower(int c) const { // test on EOF_CHAR for buggy (?) STLPort tolower (or HPUX tolower?) // also VC++ 6.0 does this. (see fix 422 (is reverted by this fix) // this one is more structural. Maybe make this configurable. return (c == EOF_CHAR ? EOF_CHAR : tolower(c)); } /** This method is called by YourLexer::nextToken() when the lexer has * hit EOF condition. EOF is NOT a character. * This method is not called if EOF is reached during * syntactic predicate evaluation or during evaluation * of normal lexical rules, which presumably would be * an IOException. This traps the "normal" EOF condition. * * uponEOF() is called after the complete evaluation of * the previous token and only if your parser asks * for another token beyond that last non-EOF token. * * You might want to throw token or char stream exceptions * like: "Heh, premature eof" or a retry stream exception * ("I found the end of this file, go back to referencing file"). */ virtual void uponEOF() { } /// Methods used to change tracing behavior virtual void traceIndent(); virtual void traceIn(const char* rname); virtual void traceOut(const char* rname); #ifndef NO_STATIC_CONSTS static const int EOF_CHAR = EOF; #else enum { EOF_CHAR = EOF }; #endif protected: ANTLR_USE_NAMESPACE(std)string text; ///< Text of current token /// flag indicating wether consume saves characters bool saveConsumedInput; factory_type tokenFactory; ///< Factory for tokens bool caseSensitive; ///< Is this lexer case sensitive ANTLR_USE_NAMESPACE(std)map literals; // set by subclass RefToken _returnToken; ///< used to return tokens w/o using return val /// Input state, gives access to input stream, shared among different lexers LexerSharedInputState inputState; /** Used during filter mode to indicate that path is desired. * A subsequent scan error will report an error as usual * if acceptPath=true; */ bool commitToPath; int tabsize; ///< tab size the scanner uses. /// Create a new RefToken of type t virtual RefToken makeToken(int t) { RefToken tok = tokenFactory(); tok->setType(t); tok->setColumn(inputState->tokenStartColumn); tok->setLine(inputState->tokenStartLine); return tok; } /** Tracer class, used when -traceLexer is passed to antlr */ class Tracer { private: CharScanner* parser; const char* text; Tracer(const Tracer& other); // undefined Tracer& operator=(const Tracer& other); // undefined public: Tracer( CharScanner* p,const char* t ) : parser(p), text(t) { parser->traceIn(text); } ~Tracer() { parser->traceOut(text); } }; int traceDepth; private: CharScanner( const CharScanner& other ); // undefined CharScanner& operator=( const CharScanner& other ); // undefined #ifndef NO_STATIC_CONSTS static const int NO_CHAR = 0; #else enum { NO_CHAR = 0 }; #endif }; inline int CharScanner::LA(unsigned int i) { int c = inputState->getInput().LA(i); if ( caseSensitive ) return c; else return toLower(c); // VC 6 tolower bug caught in toLower. } inline bool CharScannerLiteralsLess::operator() (const ANTLR_USE_NAMESPACE(std)string& x,const ANTLR_USE_NAMESPACE(std)string& y) const { if (scanner->getCaseSensitiveLiterals()) return ANTLR_USE_NAMESPACE(std)less()(x,y); else { #ifdef NO_STRCASECMP return (stricmp(x.c_str(),y.c_str())<0); #else return (strcasecmp(x.c_str(),y.c_str())<0); #endif } } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CharScanner_hpp__ antlr-2.7.7/lib/cpp/antlr/CommonToken.hpp0000644000175000017500000000324310522211615020214 0ustar twernertwerner#ifndef INC_CommonToken_hpp__ #define INC_CommonToken_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/CommonToken.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API CommonToken : public Token { public: CommonToken(); CommonToken(int t, const ANTLR_USE_NAMESPACE(std)string& txt); CommonToken(const ANTLR_USE_NAMESPACE(std)string& s); /// return contents of token virtual ANTLR_USE_NAMESPACE(std)string getText() const { return text; } /// set contents of token virtual void setText(const ANTLR_USE_NAMESPACE(std)string& s) { text = s; } /** get the line the token is at (starting at 1) * @see CharScanner::newline() * @see CharScanner::tab() */ virtual int getLine() const { return line; } /** gt the column the token is at (starting at 1) * @see CharScanner::newline() * @see CharScanner::tab() */ virtual int getColumn() const { return col; } /// set line for token virtual void setLine(int l) { line = l; } /// set column for token virtual void setColumn(int c) { col = c; } virtual ANTLR_USE_NAMESPACE(std)string toString() const; static RefToken factory(); protected: // most tokens will want line and text information int line; int col; ANTLR_USE_NAMESPACE(std)string text; private: CommonToken(const CommonToken&); const CommonToken& operator=(const CommonToken&); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CommonToken_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenStreamRetryException.hpp0000644000175000017500000000130410522211615023120 0ustar twernertwerner#ifndef INC_TokenStreamRetryException_hpp__ #define INC_TokenStreamRetryException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TokenStreamRetryException.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class TokenStreamRetryException : public TokenStreamException { public: TokenStreamRetryException() {} ~TokenStreamRetryException() throw() {} }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenStreamRetryException_hpp__ antlr-2.7.7/lib/cpp/antlr/ASTArray.hpp0000644000175000017500000000160410522211615017410 0ustar twernertwerner#ifndef INC_ASTArray_hpp__ #define INC_ASTArray_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/ASTArray.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** ASTArray is a class that allows ANTLR to * generate code that can create and initialize an array * in one expression, like: * (new ASTArray(3))->add(x)->add(y)->add(z) */ class ANTLR_API ASTArray { public: int size; // = 0; ANTLR_USE_NAMESPACE(std)vector array; ASTArray(int capacity) : size(0) , array(capacity) { } ASTArray* add(RefAST node) { array[size++] = node; return this; } }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_ASTArray_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenStream.hpp0000644000175000017500000000131610522211615020216 0ustar twernertwerner#ifndef INC_TokenStream_hpp__ #define INC_TokenStream_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TokenStream.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** This interface allows any object to pretend it is a stream * of tokens. * @author Terence Parr, MageLang Institute */ class ANTLR_API TokenStream { public: virtual RefToken nextToken()=0; virtual ~TokenStream() { } }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenStream_hpp__ antlr-2.7.7/lib/cpp/antlr/CircularQueue.hpp0000644000175000017500000000414710522211615020540 0ustar twernertwerner#ifndef INC_CircularQueue_hpp__ #define INC_CircularQueue_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/CircularQueue.hpp#2 $ */ #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif // Resize every 5000 items #define OFFSET_MAX_RESIZE 5000 template class ANTLR_API CircularQueue { public: CircularQueue() : storage() , m_offset(0) { } ~CircularQueue() { } /// Clear the queue inline void clear( void ) { m_offset = 0; storage.clear(); } /// @todo this should use at or should have a check inline T elementAt( size_t idx ) const { return storage[idx+m_offset]; } void removeFirst() { if (m_offset >= OFFSET_MAX_RESIZE) { storage.erase( storage.begin(), storage.begin() + m_offset + 1 ); m_offset = 0; } else ++m_offset; } inline void removeItems( size_t nb ) { // it would be nice if we would not get called with nb > entries // (or to be precise when entries() == 0) // This case is possible when lexer/parser::recover() calls // consume+consumeUntil when the queue is empty. // In recover the consume says to prepare to read another // character/token. Then in the subsequent consumeUntil the // LA() call will trigger // syncConsume which calls this method *before* the same queue // has been sufficiently filled. if( nb > entries() ) nb = entries(); if (m_offset >= OFFSET_MAX_RESIZE) { storage.erase( storage.begin(), storage.begin() + m_offset + nb ); m_offset = 0; } else m_offset += nb; } inline void append(const T& t) { storage.push_back(t); } inline size_t entries() const { return storage.size() - m_offset; } private: ANTLR_USE_NAMESPACE(std)vector storage; size_t m_offset; CircularQueue(const CircularQueue&); const CircularQueue& operator=(const CircularQueue&); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CircularQueue_hpp__ antlr-2.7.7/lib/cpp/antlr/TreeParserSharedInputState.hpp0000644000175000017500000000223010522211615023202 0ustar twernertwerner#ifndef INC_TreeParserSharedInputState_hpp__ #define INC_TreeParserSharedInputState_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TreeParserSharedInputState.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** This object contains the data associated with an * input AST. Multiple parsers * share a single TreeParserSharedInputState to parse * the same tree or to have the parser walk multiple * trees. */ class ANTLR_API TreeParserInputState { public: TreeParserInputState() : guessing(0) {} virtual ~TreeParserInputState() {} public: /** Are we guessing (guessing>0)? */ int guessing; //= 0; private: // we don't want these: TreeParserInputState(const TreeParserInputState&); TreeParserInputState& operator=(const TreeParserInputState&); }; typedef RefCount TreeParserSharedInputState; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TreeParserSharedInputState_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenBuffer.hpp0000644000175000017500000000551710522211615020203 0ustar twernertwerner#ifndef INC_TokenBuffer_hpp__ #define INC_TokenBuffer_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TokenBuffer.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /**A Stream of Token objects fed to the parser from a TokenStream that can * be rewound via mark()/rewind() methods. *

* A dynamic array is used to buffer up all the input tokens. Normally, * "k" tokens are stored in the buffer. More tokens may be stored during * guess mode (testing syntactic predicate), or when LT(i>k) is referenced. * Consumption of tokens is deferred. In other words, reading the next * token is not done by conume(), but deferred until needed by LA or LT. *

* * @todo: see if we can integrate this one with InputBuffer into one template * or so. * * @see antlr.Token * @see antlr.TokenStream * @see antlr.TokenQueue */ class ANTLR_API TokenBuffer { public: /** Create a token buffer */ TokenBuffer(TokenStream& input_); virtual ~TokenBuffer(); /// Reset the input buffer to empty state inline void reset( void ) { nMarkers = 0; markerOffset = 0; numToConsume = 0; queue.clear(); } /** Get a lookahead token value */ int LA( unsigned int i ); /** Get a lookahead token */ RefToken LT( unsigned int i ); /** Return an integer marker that can be used to rewind the buffer to * its current state. */ unsigned int mark(); /**Rewind the token buffer to a marker. * @param mark Marker returned previously from mark() */ void rewind(unsigned int mark); /** Mark another token for deferred consumption */ inline void consume() { numToConsume++; } /// Return the number of entries in the TokenBuffer virtual unsigned int entries() const; private: /** Ensure that the token buffer is sufficiently full */ void fill(unsigned int amount); /** Sync up deferred consumption */ void syncConsume(); protected: /// Token source TokenStream& input; /// Number of active markers unsigned int nMarkers; /// Additional offset used when markers are active unsigned int markerOffset; /// Number of calls to consume() since last LA() or LT() call unsigned int numToConsume; /// Circular queue with Tokens CircularQueue queue; private: TokenBuffer(const TokenBuffer& other); const TokenBuffer& operator=(const TokenBuffer& other); }; /** Sync up deferred consumption */ inline void TokenBuffer::syncConsume() { if (numToConsume > 0) { if (nMarkers > 0) markerOffset += numToConsume; else queue.removeItems( numToConsume ); numToConsume = 0; } } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenBuffer_hpp__ antlr-2.7.7/lib/cpp/antlr/Parser.hpp0000644000175000017500000002125210522211615017217 0ustar twernertwerner#ifndef INC_Parser_hpp__ #define INC_Parser_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/Parser.hpp#2 $ */ #include #include #include #include #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif extern bool DEBUG_PARSER; /** A generic ANTLR parser (LL(k) for k>=1) containing a bunch of * utility routines useful at any lookahead depth. We distinguish between * the LL(1) and LL(k) parsers because of efficiency. This may not be * necessary in the near future. * * Each parser object contains the state of the parse including a lookahead * cache (the form of which is determined by the subclass), whether or * not the parser is in guess mode, where tokens come from, etc... * *

* During guess mode, the current lookahead token(s) and token type(s) * cache must be saved because the token stream may not have been informed * to save the token (via mark) before the try block. * Guessing is started by: *

    *
  1. saving the lookahead cache. *
  2. marking the current position in the TokenBuffer. *
  3. increasing the guessing level. *
* * After guessing, the parser state is restored by: *
    *
  1. restoring the lookahead cache. *
  2. rewinding the TokenBuffer. *
  3. decreasing the guessing level. *
* * @see antlr.Token * @see antlr.TokenBuffer * @see antlr.TokenStream * @see antlr.LL1Parser * @see antlr.LLkParser * * @todo add constructors with ASTFactory. */ class ANTLR_API Parser { protected: Parser(TokenBuffer& input) : inputState(new ParserInputState(input)), astFactory(0), traceDepth(0) { } Parser(TokenBuffer* input) : inputState(new ParserInputState(input)), astFactory(0), traceDepth(0) { } Parser(const ParserSharedInputState& state) : inputState(state), astFactory(0), traceDepth(0) { } public: virtual ~Parser() { } /** Return the token type of the ith token of lookahead where i=1 * is the current token being examined by the parser (i.e., it * has not been matched yet). */ virtual int LA(unsigned int i)=0; /// Return the i-th token of lookahead virtual RefToken LT(unsigned int i)=0; /** DEPRECATED! Specify the factory to be used during tree building. (Compulsory) * Setting the factory is nowadays compulsory. * @see setASTFactory */ virtual void setASTNodeFactory( ASTFactory *factory ) { astFactory = factory; } /** Specify the factory to be used during tree building. (Compulsory) * Setting the factory is nowadays compulsory. */ virtual void setASTFactory( ASTFactory *factory ) { astFactory = factory; } /** Return a pointer to the ASTFactory used. * So you might use it in subsequent treewalkers or to reload AST's * from disk. */ virtual ASTFactory* getASTFactory() { return astFactory; } /** Get the root AST node of the generated AST. When using a custom AST type * or heterogenous AST's, you'll have to convert it to the right type * yourself. */ virtual RefAST getAST() = 0; /// Return the filename of the input file. virtual inline ANTLR_USE_NAMESPACE(std)string getFilename() const { return inputState->filename; } /// Set the filename of the input file (used for error reporting). virtual void setFilename(const ANTLR_USE_NAMESPACE(std)string& f) { inputState->filename = f; } virtual void setInputState(ParserSharedInputState state) { inputState = state; } virtual inline ParserSharedInputState getInputState() const { return inputState; } /// Get another token object from the token stream virtual void consume()=0; /// Consume tokens until one matches the given token virtual void consumeUntil(int tokenType) { while (LA(1) != Token::EOF_TYPE && LA(1) != tokenType) consume(); } /// Consume tokens until one matches the given token set virtual void consumeUntil(const BitSet& set) { while (LA(1) != Token::EOF_TYPE && !set.member(LA(1))) consume(); } /** Make sure current lookahead symbol matches token type t. * Throw an exception upon mismatch, which is catch by either the * error handler or by the syntactic predicate. */ virtual void match(int t) { if ( DEBUG_PARSER ) { traceIndent(); ANTLR_USE_NAMESPACE(std)cout << "enter match(" << t << ") with LA(1)=" << LA(1) << ANTLR_USE_NAMESPACE(std)endl; } if ( LA(1) != t ) { if ( DEBUG_PARSER ) { traceIndent(); ANTLR_USE_NAMESPACE(std)cout << "token mismatch: " << LA(1) << "!=" << t << ANTLR_USE_NAMESPACE(std)endl; } throw MismatchedTokenException(getTokenNames(), getNumTokens(), LT(1), t, false, getFilename()); } else { // mark token as consumed -- fetch next token deferred until LA/LT consume(); } } virtual void matchNot(int t) { if ( LA(1)==t ) { // Throws inverted-sense exception throw MismatchedTokenException(getTokenNames(), getNumTokens(), LT(1), t, true, getFilename()); } else { // mark token as consumed -- fetch next token deferred until LA/LT consume(); } } /** Make sure current lookahead symbol matches the given set * Throw an exception upon mismatch, which is catch by either the * error handler or by the syntactic predicate. */ virtual void match(const BitSet& b) { if ( DEBUG_PARSER ) { traceIndent(); ANTLR_USE_NAMESPACE(std)cout << "enter match(" << "bitset" /*b.toString()*/ << ") with LA(1)=" << LA(1) << ANTLR_USE_NAMESPACE(std)endl; } if ( !b.member(LA(1)) ) { if ( DEBUG_PARSER ) { traceIndent(); ANTLR_USE_NAMESPACE(std)cout << "token mismatch: " << LA(1) << " not member of " << "bitset" /*b.toString()*/ << ANTLR_USE_NAMESPACE(std)endl; } throw MismatchedTokenException(getTokenNames(), getNumTokens(), LT(1), b, false, getFilename()); } else { // mark token as consumed -- fetch next token deferred until LA/LT consume(); } } /** Mark a spot in the input and return the position. * Forwarded to TokenBuffer. */ virtual inline unsigned int mark() { return inputState->getInput().mark(); } /// rewind to a previously marked position virtual inline void rewind(unsigned int pos) { inputState->getInput().rewind(pos); } /** called by the generated parser to do error recovery, override to * customize the behaviour. */ virtual void recover(const RecognitionException& ex, const BitSet& tokenSet) { consume(); consumeUntil(tokenSet); } /// Parser error-reporting function can be overridden in subclass virtual void reportError(const RecognitionException& ex); /// Parser error-reporting function can be overridden in subclass virtual void reportError(const ANTLR_USE_NAMESPACE(std)string& s); /// Parser warning-reporting function can be overridden in subclass virtual void reportWarning(const ANTLR_USE_NAMESPACE(std)string& s); /// get the token name for the token number 'num' virtual const char* getTokenName(int num) const = 0; /// get a vector with all token names virtual const char* const* getTokenNames() const = 0; /** Get the number of tokens defined. * This one should be overridden in subclasses. */ virtual int getNumTokens(void) const = 0; /** Set or change the input token buffer */ // void setTokenBuffer(TokenBuffer* t); virtual void traceIndent(); virtual void traceIn(const char* rname); virtual void traceOut(const char* rname); protected: // void setTokenNames(const char** tokenNames_); ParserSharedInputState inputState; // /// AST return value for a rule is squirreled away here // RefAST returnAST; /// AST support code; parser and treeparser delegate to this object ASTFactory *astFactory; // used to keep track of the indentation for the trace int traceDepth; /** Utility class which allows tracing to work even when exceptions are * thrown. */ class Tracer { /*{{{*/ private: Parser* parser; const char* text; public: Tracer(Parser* p,const char * t) : parser(p), text(t) { parser->traceIn(text); } ~Tracer() { #ifdef ANTLR_CXX_SUPPORTS_UNCAUGHT_EXCEPTION // Only give trace if there's no uncaught exception.. if(!ANTLR_USE_NAMESPACE(std)uncaught_exception()) #endif parser->traceOut(text); } private: Tracer(const Tracer&); // undefined const Tracer& operator=(const Tracer&); // undefined /*}}}*/ }; private: Parser(const Parser&); // undefined const Parser& operator=(const Parser&); // undefined }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_Parser_hpp__ antlr-2.7.7/lib/cpp/antlr/CharStreamException.hpp0000644000175000017500000000132510522211615021672 0ustar twernertwerner#ifndef INC_CharStreamException_hpp__ #define INC_CharStreamException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/CharStreamException.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API CharStreamException : public ANTLRException { public: CharStreamException(const ANTLR_USE_NAMESPACE(std)string& s) : ANTLRException(s) {} ~CharStreamException() throw() {} }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CharStreamException_hpp__ antlr-2.7.7/lib/cpp/antlr/SemanticException.hpp0000644000175000017500000000165210522211615021407 0ustar twernertwerner#ifndef INC_SemanticException_hpp__ #define INC_SemanticException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/SemanticException.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API SemanticException : public RecognitionException { public: SemanticException(const ANTLR_USE_NAMESPACE(std)string& s) : RecognitionException(s) { } SemanticException(const ANTLR_USE_NAMESPACE(std)string& s, const ANTLR_USE_NAMESPACE(std)string& fileName_, int line_,int column_) : RecognitionException(s,fileName_,line_,column_) { } ~SemanticException() throw() { } }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_SemanticException_hpp__ antlr-2.7.7/lib/cpp/antlr/CharInputBuffer.hpp0000644000175000017500000000334410522211615021014 0ustar twernertwerner#ifndef INC_CharInputBuffer_hpp__ # define INC_CharInputBuffer_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ # include # include # ifdef HAS_NOT_CCTYPE_H # include # else # include # endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** CharInputBuffer.hpp provides an InputBuffer for plain character arrays (buffers). */ class CharInputBuffer : public InputBuffer { public: /** Construct a CharInputBuffer.hpp object with a char* buffer of 'size' * if 'owner' is true, then the buffer will be delete[]-ed on destruction. * @note it is assumed the buffer was allocated with new[]! */ CharInputBuffer( unsigned char* buf, size_t size, bool owner = false ) : buffer(buf) , ptr(buf) , end(buf + size) , delete_buffer(owner) { } /** Destructor * @note If you're using malloced data, then you probably need to change * this destructor. Or better use this class as template for your own. */ ~CharInputBuffer( void ) { if( delete_buffer && buffer ) delete [] buffer; } /** Reset the CharInputBuffer to initial state * Called from LexerInputState::reset. * @see LexerInputState */ virtual inline void reset( void ) { InputBuffer::reset(); ptr = buffer; } virtual int getChar( void ) { return (ptr < end) ? *ptr++ : EOF; } protected: unsigned char* buffer; ///< the buffer with data unsigned char* ptr; ///< position ptr into the buffer unsigned char* end; ///< end sentry for buffer bool delete_buffer; ///< flag signifying if we have to delete the buffer }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif antlr-2.7.7/lib/cpp/antlr/String.hpp0000644000175000017500000000134710522211615017234 0ustar twernertwerner#ifndef INC_String_hpp__ #define INC_String_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/String.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif ANTLR_API ANTLR_USE_NAMESPACE(std)string operator+( const ANTLR_USE_NAMESPACE(std)string& lhs, const int rhs ); ANTLR_API ANTLR_USE_NAMESPACE(std)string operator+( const ANTLR_USE_NAMESPACE(std)string& lhs, size_t rhs ); ANTLR_API ANTLR_USE_NAMESPACE(std)string charName( int ch ); #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_String_hpp__ antlr-2.7.7/lib/cpp/antlr/InputBuffer.hpp0000644000175000017500000000666510522211615020227 0ustar twernertwerner#ifndef INC_InputBuffer_hpp__ #define INC_InputBuffer_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/InputBuffer.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** A Stream of characters fed to the lexer from a InputStream that can * be rewound via mark()/rewind() methods. *

* A dynamic array is used to buffer up all the input characters. Normally, * "k" characters are stored in the buffer. More characters may be stored during * guess mode (testing syntactic predicate), or when LT(i>k) is referenced. * Consumption of characters is deferred. In other words, reading the next * character is not done by conume(), but deferred until needed by LA or LT. *

* * @see antlr.CharQueue */ class ANTLR_API InputBuffer { public: /** Create a character buffer */ InputBuffer() : nMarkers(0) , markerOffset(0) , numToConsume(0) { } virtual ~InputBuffer() { } /// Reset the input buffer to empty state virtual inline void reset( void ) { nMarkers = 0; markerOffset = 0; numToConsume = 0; queue.clear(); } /** This method updates the state of the input buffer so that * the text matched since the most recent mark() is no longer * held by the buffer. So, you either do a mark/rewind for * failed predicate or mark/commit to keep on parsing without * rewinding the input. */ inline void commit( void ) { nMarkers--; } /** Mark another character for deferred consumption */ virtual inline void consume() { numToConsume++; } /** Ensure that the character buffer is sufficiently full */ virtual void fill(unsigned int amount); /** Override this in subclasses to get the next character */ virtual int getChar()=0; /** Get a lookahead character */ virtual inline int LA(unsigned int i) { fill(i); return queue.elementAt(markerOffset + i - 1); } /** Return an integer marker that can be used to rewind the buffer to * its current state. */ virtual unsigned int mark(); /// Are there any marks active in the InputBuffer virtual inline bool isMarked() const { return (nMarkers != 0); } /** Rewind the character buffer to a marker. * @param mark Marker returned previously from mark() */ virtual void rewind(unsigned int mark); /** Get the number of non-consumed characters */ virtual unsigned int entries() const; ANTLR_USE_NAMESPACE(std)string getLAChars() const; ANTLR_USE_NAMESPACE(std)string getMarkedChars() const; protected: // char source // leave to subclasses // Number of active markers unsigned int nMarkers; // = 0; // Additional offset used when markers are active unsigned int markerOffset; // = 0; // Number of calls to consume() since last LA() or LT() call unsigned int numToConsume; // = 0; // Circular queue CircularQueue queue; /** Sync up deferred consumption */ void syncConsume(); private: InputBuffer(const InputBuffer& other); InputBuffer& operator=(const InputBuffer& other); }; /** Sync up deferred consumption */ inline void InputBuffer::syncConsume() { if (numToConsume > 0) { if (nMarkers > 0) markerOffset += numToConsume; else queue.removeItems( numToConsume ); numToConsume = 0; } } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_InputBuffer_hpp__ antlr-2.7.7/lib/cpp/antlr/MismatchedTokenException.hpp0000644000175000017500000000613010522211615022717 0ustar twernertwerner#ifndef INC_MismatchedTokenException_hpp__ #define INC_MismatchedTokenException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/MismatchedTokenException.hpp#2 $ */ #include #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API MismatchedTokenException : public RecognitionException { public: MismatchedTokenException(); /// Expected range / not range MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefAST node_, int lower, int upper_, bool matchNot ); // Expected token / not token MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefAST node_, int expecting_, bool matchNot ); // Expected BitSet / not BitSet MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefAST node_, BitSet set_, bool matchNot ); // Expected range / not range MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefToken token_, int lower, int upper_, bool matchNot, const ANTLR_USE_NAMESPACE(std)string& fileName_ ); // Expected token / not token MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefToken token_, int expecting_, bool matchNot, const ANTLR_USE_NAMESPACE(std)string& fileName_ ); // Expected BitSet / not BitSet MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefToken token_, BitSet set_, bool matchNot, const ANTLR_USE_NAMESPACE(std)string& fileName_ ); ~MismatchedTokenException() throw() {} /** * Returns a clean error message (no line number/column information) */ ANTLR_USE_NAMESPACE(std)string getMessage() const; public: /// The token that was encountered const RefToken token; /// The offending AST node if tree walking const RefAST node; /// taken from node or token object ANTLR_USE_NAMESPACE(std)string tokenText; /// Types of tokens #ifndef NO_STATIC_CONSTS static const int TOKEN = 1; static const int NOT_TOKEN = 2; static const int RANGE = 3; static const int NOT_RANGE = 4; static const int SET = 5; static const int NOT_SET = 6; #else enum { TOKEN = 1, NOT_TOKEN = 2, RANGE = 3, NOT_RANGE = 4, SET = 5, NOT_SET = 6 }; #endif public: /// One of the above int mismatchType; /// For TOKEN/NOT_TOKEN and RANGE/NOT_RANGE int expecting; /// For RANGE/NOT_RANGE (expecting is lower bound of range) int upper; /// For SET/NOT_SET BitSet set; private: /// Token names array for formatting const char* const* tokenNames; /// Max number of tokens in tokenNames const int numTokens; /// Return token name for tokenType ANTLR_USE_NAMESPACE(std)string tokenName(int tokenType) const; }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_MismatchedTokenException_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenStreamIOException.hpp0000644000175000017500000000156510522211615022333 0ustar twernertwerner#ifndef INC_TokenStreamIOException_hpp__ #define INC_TokenStreamIOException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TokenStreamIOException.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class TokenStreamIOException : public TokenStreamException { public: TokenStreamIOException() : TokenStreamException() { } TokenStreamIOException(const ANTLR_USE_NAMESPACE(std)exception& e) : TokenStreamException(e.what()) , io(e) { } ~TokenStreamIOException() throw() { } private: ANTLR_USE_NAMESPACE(std)exception io; }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenStreamIOException_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenStreamRecognitionException.hpp0000644000175000017500000000252410522211615024300 0ustar twernertwerner#ifndef INC_TokenStreamRecognitionException_hpp__ #define INC_TokenStreamRecognitionException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TokenStreamRecognitionException.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** Exception thrown from generated lexers when there's no default error * handler specified. * @see TokenStream */ class TokenStreamRecognitionException : public TokenStreamException { public: TokenStreamRecognitionException(RecognitionException& re) : TokenStreamException(re.getMessage()) , recog(re) { } virtual ~TokenStreamRecognitionException() throw() { } virtual ANTLR_USE_NAMESPACE(std)string toString() const { return recog.getFileLineColumnString()+getMessage(); } virtual ANTLR_USE_NAMESPACE(std)string getFilename() const throw() { return recog.getFilename(); } virtual int getLine() const throw() { return recog.getLine(); } virtual int getColumn() const throw() { return recog.getColumn(); } private: RecognitionException recog; }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenStreamRecognitionException_hpp__ antlr-2.7.7/lib/cpp/antlr/ASTFactory.hpp0000644000175000017500000001317410522211615017746 0ustar twernertwerner#ifndef INC_ASTFactory_hpp__ #define INC_ASTFactory_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/ASTFactory.hpp#2 $ */ #include #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif // Using these extra types to appease MSVC typedef RefAST (*factory_type_)(); typedef ANTLR_USE_NAMESPACE(std)pair< const char*, factory_type_ > factory_descriptor_; typedef ANTLR_USE_NAMESPACE(std)vector< factory_descriptor_* > factory_descriptor_list_; /** AST Super Factory shared by TreeParser and Parser. * This super factory maintains a map of all AST node types to their respective * AST factories. One instance should be shared among a parser/treeparser * chain. * * @todo check all this code for possible use of references in * stead of RefAST's. */ class ANTLR_API ASTFactory { public: typedef factory_type_ factory_type; typedef factory_descriptor_ factory_descriptor; typedef factory_descriptor_list_ factory_descriptor_list; protected: /* The mapping of AST node type to factory.. */ factory_descriptor default_factory_descriptor; factory_descriptor_list nodeFactories; public: /// Make new factory. Per default (Ref)CommonAST instances are generated. ASTFactory(); /** Initialize factory with a non default node type. * factory_node_name should be the name of the AST node type the factory * generates. (should exist during the existance of this ASTFactory * instance) */ ASTFactory( const char* factory_node_name, factory_type factory ); /// Destroy factory virtual ~ASTFactory(); /// Register a node factory for the node type type with name ast_name void registerFactory( int type, const char* ast_name, factory_type factory ); /// Set the maximum node (AST) type this factory may encounter void setMaxNodeType( int type ); /// Add a child to the current AST void addASTChild(ASTPair& currentAST, RefAST child); /// Create new empty AST node. The right default type shou virtual RefAST create(); /// Create AST node of the right type for 'type' RefAST create(int type); /// Create AST node of the right type for 'type' and initialize with txt RefAST create(int type, const ANTLR_USE_NAMESPACE(std)string& txt); /// Create duplicate of tr RefAST create(RefAST tr); /// Create new AST node and initialize contents from a token. RefAST create(RefToken tok); /// Create new AST node and initialize contents from a stream. RefAST create(const ANTLR_USE_NAMESPACE(std)string& txt, ANTLR_USE_NAMESPACE(std)istream& infile ); /** Deep copy a single node. This function the new clone() methods in the * AST interface. Returns a new RefAST(nullASTptr) if t is null. */ RefAST dup(RefAST t); /// Duplicate tree including siblings of root. RefAST dupList(RefAST t); /** Duplicate a tree, assuming this is a root node of a tree-- * duplicate that node and what's below; ignore siblings of root node. */ RefAST dupTree(RefAST t); /** Make a tree from a list of nodes. The first element in the * array is the root. If the root is null, then the tree is * a simple list not a tree. Handles null children nodes correctly. * For example, make(a, b, null, c) yields tree (a b c). make(null,a,b) * yields tree (nil a b). */ RefAST make(ANTLR_USE_NAMESPACE(std)vector& nodes); /** Make a tree from a list of nodes, where the nodes are contained * in an ASTArray object. The ASTArray is deleted after use. * @todo FIXME! I have a feeling we can get rid of this ugly ASTArray thing */ RefAST make(ASTArray* nodes); /// Make an AST the root of current AST void makeASTRoot(ASTPair& currentAST, RefAST root); /** Set a new default AST type. * factory_node_name should be the name of the AST node type the factory * generates. (should exist during the existance of this ASTFactory * instance). * Only change factory between parser runs. You might get unexpected results * otherwise. */ void setASTNodeFactory( const char* factory_node_name, factory_type factory ); #ifdef ANTLR_SUPPORT_XML /** Load a XML AST from stream. Make sure you have all the factories * registered before use. * @note this 'XML' stuff is quite rough still. YMMV. */ RefAST LoadAST( ANTLR_USE_NAMESPACE(std)istream& infile ); #endif protected: void loadChildren( ANTLR_USE_NAMESPACE(std)istream& infile, RefAST current ); void loadSiblings( ANTLR_USE_NAMESPACE(std)istream& infile, RefAST current ); bool checkCloseTag( ANTLR_USE_NAMESPACE(std)istream& infile ); #ifdef ANTLR_VECTOR_HAS_AT /// construct a node of 'type' inline RefAST getNodeOfType( unsigned int type ) { return RefAST(nodeFactories.at(type)->second()); } /// get the name of the node 'type' const char* getASTNodeType( unsigned int type ) { return nodeFactories.at(type)->first; } /// get the factory used for node 'type' factory_type getASTNodeFactory( unsigned int type ) { return nodeFactories.at(type)->second; } #else inline RefAST getNodeOfType( unsigned int type ) { return RefAST(nodeFactories[type]->second()); } /// get the name of the node 'type' const char* getASTNodeType( unsigned int type ) { return nodeFactories[type]->first; } factory_type getASTNodeFactory( unsigned int type ) { return nodeFactories[type]->second; } #endif private: // no copying and such.. ASTFactory( const ASTFactory& ); ASTFactory& operator=( const ASTFactory& ); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_ASTFactory_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenStreamHiddenTokenFilter.hpp0000644000175000017500000000437610522211615023512 0ustar twernertwerner#ifndef INC_TokenStreamHiddenTokenFilter_hpp__ #define INC_TokenStreamHiddenTokenFilter_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/TokenStreamHiddenTokenFilter.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /**This object filters a token stream coming from a lexer * or another TokenStream so that only certain token channels * get transmitted to the parser. * * Any of the channels can be filtered off as "hidden" channels whose * tokens can be accessed from the parser. */ class ANTLR_API TokenStreamHiddenTokenFilter : public TokenStreamBasicFilter { // protected BitSet discardMask; protected: BitSet hideMask; private: RefToken nextMonitoredToken; protected: /** track tail of hidden list emanating from previous * monitored token */ RefToken lastHiddenToken; RefToken firstHidden; // = null; public: TokenStreamHiddenTokenFilter(TokenStream& input); protected: void consume(); private: void consumeFirst(); public: BitSet getDiscardMask() const; /** Return a ptr to the hidden token appearing immediately after * token t in the input stream. */ RefToken getHiddenAfter(RefToken t); /** Return a ptr to the hidden token appearing immediately before * token t in the input stream. */ RefToken getHiddenBefore(RefToken t); BitSet getHideMask() const; /** Return the first hidden token if one appears * before any monitored token. */ RefToken getInitialHiddenToken(); void hide(int m); void hide(const BitSet& mask); protected: RefToken LA(int i); public: /** Return the next monitored token. * Test the token following the monitored token. * If following is another monitored token, save it * for the next invocation of nextToken (like a single * lookahead token) and return it then. * If following is unmonitored, nondiscarded (hidden) * channel token, add it to the monitored token. * * Note: EOF must be a monitored Token. */ RefToken nextToken(); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenStreamHiddenTokenFilter_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenRefCount.hpp0000644000175000017500000000330310522211615020506 0ustar twernertwerner#ifndef INC_TokenRefCount_hpp__ # define INC_TokenRefCount_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ # include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class Token; struct ANTLR_API TokenRef { Token* const ptr; unsigned int count; TokenRef(Token* p); ~TokenRef(); TokenRef* increment() { ++count; return this; } bool decrement() { return (--count==0); } static TokenRef* getRef(const Token* p); private: TokenRef( const TokenRef& ); TokenRef& operator=( const TokenRef& ); }; template class ANTLR_API TokenRefCount { private: TokenRef* ref; public: TokenRefCount(const Token* p=0) : ref(p ? TokenRef::getRef(p) : 0) { } TokenRefCount(const TokenRefCount& other) : ref(other.ref ? other.ref->increment() : 0) { } ~TokenRefCount() { if (ref && ref->decrement()) delete ref; } TokenRefCount& operator=(Token* other) { TokenRef* tmp = TokenRef::getRef(other); if (ref && ref->decrement()) delete ref; ref=tmp; return *this; } TokenRefCount& operator=(const TokenRefCount& other) { if( other.ref != ref ) { TokenRef* tmp = other.ref ? other.ref->increment() : 0; if (ref && ref->decrement()) delete ref; ref=tmp; } return *this; } operator T* () const { return ref ? static_cast(ref->ptr) : 0; } T* operator->() const { return ref ? static_cast(ref->ptr) : 0; } T* get() const { return ref ? static_cast(ref->ptr) : 0; } }; typedef TokenRefCount RefToken; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_TokenRefCount_hpp__ antlr-2.7.7/lib/cpp/antlr/CommonHiddenStreamToken.hpp0000644000175000017500000000175610522211615022513 0ustar twernertwerner#ifndef INC_CommonHiddenStreamToken_hpp__ #define INC_CommonHiddenStreamToken_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/CommonHiddenStreamToken.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API CommonHiddenStreamToken : public CommonToken { protected: RefToken hiddenBefore; RefToken hiddenAfter; public: CommonHiddenStreamToken(); CommonHiddenStreamToken(int t, const ANTLR_USE_NAMESPACE(std)string& txt); CommonHiddenStreamToken(const ANTLR_USE_NAMESPACE(std)string& s); RefToken getHiddenAfter(); RefToken getHiddenBefore(); static RefToken factory(); void setHiddenAfter(RefToken t); void setHiddenBefore(RefToken t); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_CommonHiddenStreamToken_hpp__ antlr-2.7.7/lib/cpp/antlr/ANTLRException.hpp0000644000175000017500000000264610522211615020530 0ustar twernertwerner#ifndef INC_ANTLRException_hpp__ #define INC_ANTLRException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/ANTLRException.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API ANTLRException { public: /// Create ANTLR base exception without error message ANTLRException() : text("") { } /// Create ANTLR base exception with error message ANTLRException(const ANTLR_USE_NAMESPACE(std)string& s) : text(s) { } virtual ~ANTLRException() throw() { } /** Return complete error message with line/column number info (if present) * @note for your own exceptions override this one. Call getMessage from * here to get the 'clean' error message stored in the text attribute. */ virtual ANTLR_USE_NAMESPACE(std)string toString() const { return text; } /** Return error message without additional info (if present) * @note when making your own exceptions classes override toString * and call in toString getMessage which relays the text attribute * from here. */ virtual ANTLR_USE_NAMESPACE(std)string getMessage() const { return text; } private: ANTLR_USE_NAMESPACE(std)string text; }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_ANTLRException_hpp__ antlr-2.7.7/lib/cpp/antlr/NoViableAltException.hpp0000644000175000017500000000201510522211615021776 0ustar twernertwerner#ifndef INC_NoViableAltException_hpp__ #define INC_NoViableAltException_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/NoViableAltException.hpp#2 $ */ #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif class ANTLR_API NoViableAltException : public RecognitionException { public: const RefToken token; const RefAST node; // handles parsing and treeparsing NoViableAltException(RefAST t); NoViableAltException(RefToken t,const ANTLR_USE_NAMESPACE(std)string& fileName_); ~NoViableAltException() throw() {} /** * Returns a clean error message (no line number/column information) */ ANTLR_USE_NAMESPACE(std)string getMessage() const; }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_NoViableAltException_hpp__ antlr-2.7.7/lib/cpp/antlr/TokenStreamRewriteEngine.hpp0000644000175000017500000003045410522211615022713 0ustar twernertwerner#ifndef INC_TokenStreamRewriteEngine_hpp__ #define INC_TokenStreamRewriteEngine_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html */ #include #include #include #include #include #include #include #include #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** This token stream tracks the *entire* token stream coming from * a lexer, but does not pass on the whitespace (or whatever else * you want to discard) to the parser. * * This class can then be asked for the ith token in the input stream. * Useful for dumping out the input stream exactly after doing some * augmentation or other manipulations. Tokens are index from 0..n-1 * * You can insert stuff, replace, and delete chunks. Note that the * operations are done lazily--only if you convert the buffer to a * String. This is very efficient because you are not moving data around * all the time. As the buffer of tokens is converted to strings, the * toString() method(s) check to see if there is an operation at the * current index. If so, the operation is done and then normal String * rendering continues on the buffer. This is like having multiple Turing * machine instruction streams (programs) operating on a single input tape. :) * * Since the operations are done lazily at toString-time, operations do not * screw up the token index values. That is, an insert operation at token * index i does not change the index values for tokens i+1..n-1. * * Because operations never actually alter the buffer, you may always get * the original token stream back without undoing anything. Since * the instructions are queued up, you can easily simulate transactions and * roll back any changes if there is an error just by removing instructions. * For example, * * TokenStreamRewriteEngine rewriteEngine = * new TokenStreamRewriteEngine(lexer); * JavaRecognizer parser = new JavaRecognizer(rewriteEngine); * ... * rewriteEngine.insertAfter("pass1", t, "foobar");} * rewriteEngine.insertAfter("pass2", u, "start");} * System.out.println(rewriteEngine.toString("pass1")); * System.out.println(rewriteEngine.toString("pass2")); * * You can also have multiple "instruction streams" and get multiple * rewrites from a single pass over the input. Just name the instruction * streams and use that name again when printing the buffer. This could be * useful for generating a C file and also its header file--all from the * same buffer. * * If you don't use named rewrite streams, a "default" stream is used. * * Terence Parr, parrt@cs.usfca.edu * University of San Francisco * February 2004 */ class TokenStreamRewriteEngine : public TokenStream { public: typedef ANTLR_USE_NAMESPACE(std)vector token_list; static const char* DEFAULT_PROGRAM_NAME; #ifndef NO_STATIC_CONSTS static const size_t MIN_TOKEN_INDEX; static const int PROGRAM_INIT_SIZE; #else enum { MIN_TOKEN_INDEX = 0, PROGRAM_INIT_SIZE = 100 }; #endif struct tokenToStream { tokenToStream( ANTLR_USE_NAMESPACE(std)ostream& o ) : out(o) {} template void operator() ( const T& t ) { out << t->getText(); } ANTLR_USE_NAMESPACE(std)ostream& out; }; class RewriteOperation { protected: RewriteOperation( size_t idx, const ANTLR_USE_NAMESPACE(std)string& txt ) : index(idx), text(txt) { } public: virtual ~RewriteOperation() { } /** Execute the rewrite operation by possibly adding to the buffer. * Return the index of the next token to operate on. */ virtual size_t execute( ANTLR_USE_NAMESPACE(std)ostream& /* out */ ) { return index; } virtual size_t getIndex() const { return index; } virtual const char* type() const { return "RewriteOperation"; } protected: size_t index; ANTLR_USE_NAMESPACE(std)string text; }; struct executeOperation { ANTLR_USE_NAMESPACE(std)ostream& out; executeOperation( ANTLR_USE_NAMESPACE(std)ostream& s ) : out(s) {} void operator () ( RewriteOperation* t ) { t->execute(out); } }; /// list of rewrite operations typedef ANTLR_USE_NAMESPACE(std)list operation_list; /// map program name to tuple typedef ANTLR_USE_NAMESPACE(std)map program_map; class InsertBeforeOp : public RewriteOperation { public: InsertBeforeOp( size_t index, const ANTLR_USE_NAMESPACE(std)string& text ) : RewriteOperation(index, text) { } virtual ~InsertBeforeOp() {} virtual size_t execute( ANTLR_USE_NAMESPACE(std)ostream& out ) { out << text; return index; } virtual const char* type() const { return "InsertBeforeOp"; } }; class ReplaceOp : public RewriteOperation { public: ReplaceOp(size_t from, size_t to, ANTLR_USE_NAMESPACE(std)string text) : RewriteOperation(from,text) , lastIndex(to) { } virtual ~ReplaceOp() {} virtual size_t execute( ANTLR_USE_NAMESPACE(std)ostream& out ) { out << text; return lastIndex+1; } virtual const char* type() const { return "ReplaceOp"; } protected: size_t lastIndex; }; class DeleteOp : public ReplaceOp { public: DeleteOp(size_t from, size_t to) : ReplaceOp(from,to,"") { } virtual const char* type() const { return "DeleteOp"; } }; TokenStreamRewriteEngine(TokenStream& upstream); TokenStreamRewriteEngine(TokenStream& upstream, size_t initialSize); RefToken nextToken( void ); void rollback(size_t instructionIndex) { rollback(DEFAULT_PROGRAM_NAME, instructionIndex); } /** Rollback the instruction stream for a program so that * the indicated instruction (via instructionIndex) is no * longer in the stream. UNTESTED! */ void rollback(const ANTLR_USE_NAMESPACE(std)string& programName, size_t instructionIndex ); void deleteProgram() { deleteProgram(DEFAULT_PROGRAM_NAME); } /** Reset the program so that no instructions exist */ void deleteProgram(const ANTLR_USE_NAMESPACE(std)string& programName) { rollback(programName, MIN_TOKEN_INDEX); } void insertAfter( RefTokenWithIndex t, const ANTLR_USE_NAMESPACE(std)string& text ) { insertAfter(DEFAULT_PROGRAM_NAME, t, text); } void insertAfter(size_t index, const ANTLR_USE_NAMESPACE(std)string& text) { insertAfter(DEFAULT_PROGRAM_NAME, index, text); } void insertAfter( const ANTLR_USE_NAMESPACE(std)string& programName, RefTokenWithIndex t, const ANTLR_USE_NAMESPACE(std)string& text ) { insertAfter(programName, t->getIndex(), text); } void insertAfter( const ANTLR_USE_NAMESPACE(std)string& programName, size_t index, const ANTLR_USE_NAMESPACE(std)string& text ) { // to insert after, just insert before next index (even if past end) insertBefore(programName,index+1, text); } void insertBefore( RefTokenWithIndex t, const ANTLR_USE_NAMESPACE(std)string& text ) { // std::cout << "insertBefore index " << t->getIndex() << " " << text << std::endl; insertBefore(DEFAULT_PROGRAM_NAME, t, text); } void insertBefore(size_t index, const ANTLR_USE_NAMESPACE(std)string& text) { insertBefore(DEFAULT_PROGRAM_NAME, index, text); } void insertBefore( const ANTLR_USE_NAMESPACE(std)string& programName, RefTokenWithIndex t, const ANTLR_USE_NAMESPACE(std)string& text ) { insertBefore(programName, t->getIndex(), text); } void insertBefore( const ANTLR_USE_NAMESPACE(std)string& programName, size_t index, const ANTLR_USE_NAMESPACE(std)string& text ) { addToSortedRewriteList(programName, new InsertBeforeOp(index,text)); } void replace(size_t index, const ANTLR_USE_NAMESPACE(std)string& text) { replace(DEFAULT_PROGRAM_NAME, index, index, text); } void replace( size_t from, size_t to, const ANTLR_USE_NAMESPACE(std)string& text) { replace(DEFAULT_PROGRAM_NAME, from, to, text); } void replace( RefTokenWithIndex indexT, const ANTLR_USE_NAMESPACE(std)string& text ) { replace(DEFAULT_PROGRAM_NAME, indexT->getIndex(), indexT->getIndex(), text); } void replace( RefTokenWithIndex from, RefTokenWithIndex to, const ANTLR_USE_NAMESPACE(std)string& text ) { replace(DEFAULT_PROGRAM_NAME, from, to, text); } void replace(const ANTLR_USE_NAMESPACE(std)string& programName, size_t from, size_t to, const ANTLR_USE_NAMESPACE(std)string& text ) { addToSortedRewriteList(programName,new ReplaceOp(from, to, text)); } void replace( const ANTLR_USE_NAMESPACE(std)string& programName, RefTokenWithIndex from, RefTokenWithIndex to, const ANTLR_USE_NAMESPACE(std)string& text ) { replace(programName, from->getIndex(), to->getIndex(), text); } void remove(size_t index) { remove(DEFAULT_PROGRAM_NAME, index, index); } void remove(size_t from, size_t to) { remove(DEFAULT_PROGRAM_NAME, from, to); } void remove(RefTokenWithIndex indexT) { remove(DEFAULT_PROGRAM_NAME, indexT, indexT); } void remove(RefTokenWithIndex from, RefTokenWithIndex to) { remove(DEFAULT_PROGRAM_NAME, from, to); } void remove( const ANTLR_USE_NAMESPACE(std)string& programName, size_t from, size_t to) { replace(programName,from,to,""); } void remove( const ANTLR_USE_NAMESPACE(std)string& programName, RefTokenWithIndex from, RefTokenWithIndex to ) { replace(programName,from,to,""); } void discard(int ttype) { discardMask.add(ttype); } RefToken getToken( size_t i ) { return RefToken(tokens.at(i)); } size_t getTokenStreamSize() const { return tokens.size(); } void originalToStream( ANTLR_USE_NAMESPACE(std)ostream& out ) const { ANTLR_USE_NAMESPACE(std)for_each( tokens.begin(), tokens.end(), tokenToStream(out) ); } void originalToStream( ANTLR_USE_NAMESPACE(std)ostream& out, size_t start, size_t end ) const; void toStream( ANTLR_USE_NAMESPACE(std)ostream& out ) const { toStream( out, MIN_TOKEN_INDEX, getTokenStreamSize()); } void toStream( ANTLR_USE_NAMESPACE(std)ostream& out, const ANTLR_USE_NAMESPACE(std)string& programName ) const { toStream( out, programName, MIN_TOKEN_INDEX, getTokenStreamSize()); } void toStream( ANTLR_USE_NAMESPACE(std)ostream& out, size_t start, size_t end ) const { toStream(out, DEFAULT_PROGRAM_NAME, start, end); } void toStream( ANTLR_USE_NAMESPACE(std)ostream& out, const ANTLR_USE_NAMESPACE(std)string& programName, size_t firstToken, size_t lastToken ) const; void toDebugStream( ANTLR_USE_NAMESPACE(std)ostream& out ) const { toDebugStream( out, MIN_TOKEN_INDEX, getTokenStreamSize()); } void toDebugStream( ANTLR_USE_NAMESPACE(std)ostream& out, size_t start, size_t end ) const; size_t getLastRewriteTokenIndex() const { return getLastRewriteTokenIndex(DEFAULT_PROGRAM_NAME); } /** Return the last index for the program named programName * return 0 if the program does not exist or the program is empty. * (Note this is different from the java implementation that returns -1) */ size_t getLastRewriteTokenIndex(const ANTLR_USE_NAMESPACE(std)string& programName) const { program_map::const_iterator rewrites = programs.find(programName); if( rewrites == programs.end() ) return 0; const operation_list& prog = rewrites->second; if( !prog.empty() ) { operation_list::const_iterator last = prog.end(); --last; return (*last)->getIndex(); } return 0; } protected: /** If op.index > lastRewriteTokenIndexes, just add to the end. * Otherwise, do linear */ void addToSortedRewriteList(RewriteOperation* op) { addToSortedRewriteList(DEFAULT_PROGRAM_NAME, op); } void addToSortedRewriteList( const ANTLR_USE_NAMESPACE(std)string& programName, RewriteOperation* op ); protected: /** Who do we suck tokens from? */ TokenStream& stream; /** track index of tokens */ size_t index; /** Track the incoming list of tokens */ token_list tokens; /** You may have multiple, named streams of rewrite operations. * I'm calling these things "programs." * Maps String (name) -> rewrite (List) */ program_map programs; /** Which (whitespace) token(s) to throw out */ BitSet discardMask; }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif antlr-2.7.7/lib/cpp/antlr/Token.hpp0000644000175000017500000000431010522211615017037 0ustar twernertwerner#ifndef INC_Token_hpp__ #define INC_Token_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/Token.hpp#2 $ */ #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif struct TokenRef; /** A token is minimally a token type. Subclasses can add the text matched * for the token and line info. */ class ANTLR_API Token { public: // constants #ifndef NO_STATIC_CONSTS static const int MIN_USER_TYPE = 4; static const int NULL_TREE_LOOKAHEAD = 3; static const int INVALID_TYPE = 0; static const int EOF_TYPE = 1; static const int SKIP = -1; #else enum { MIN_USER_TYPE = 4, NULL_TREE_LOOKAHEAD = 3, INVALID_TYPE = 0, EOF_TYPE = 1, SKIP = -1 }; #endif Token() : ref(0) , type(INVALID_TYPE) { } Token(int t) : ref(0) , type(t) { } Token(int t, const ANTLR_USE_NAMESPACE(std)string& txt) : ref(0) , type(t) { setText(txt); } virtual ~Token() { } virtual int getColumn() const; virtual int getLine() const; virtual ANTLR_USE_NAMESPACE(std)string getText() const; virtual const ANTLR_USE_NAMESPACE(std)string& getFilename() const; virtual int getType() const; virtual void setColumn(int c); virtual void setLine(int l); virtual void setText(const ANTLR_USE_NAMESPACE(std)string& t); virtual void setType(int t); virtual void setFilename( const std::string& file ); virtual ANTLR_USE_NAMESPACE(std)string toString() const; private: friend struct TokenRef; TokenRef* ref; int type; ///< the type of the token Token(RefToken other); Token& operator=(const Token& other); Token& operator=(RefToken other); Token(const Token&); }; extern ANTLR_API RefToken nullToken; #ifdef NEEDS_OPERATOR_LESS_THAN // RK: Added after 2.7.2 previously it was undefined. // AL: what to return if l and/or r point to nullToken??? inline bool operator<( RefToken l, RefToken r ) { return nullToken == l ? ( nullToken == r ? false : true ) : l->getType() < r->getType(); } #endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_Token_hpp__ antlr-2.7.7/lib/cpp/antlr/LLkParser.hpp0000644000175000017500000000307610522211615017626 0ustar twernertwerner#ifndef INC_LLkParser_hpp__ #define INC_LLkParser_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/LLkParser.hpp#2 $ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /**An LL(k) parser. * * @see antlr.Token * @see antlr.TokenBuffer * @see antlr.LL1Parser */ class ANTLR_API LLkParser : public Parser { public: LLkParser(const ParserSharedInputState& lexer, int k_); LLkParser(TokenBuffer& tokenBuf, int k_); LLkParser(TokenStream& lexer, int k_); /** Consume another token from the input stream. Can only write sequentially! * If you need 3 tokens ahead, you must consume() 3 times. *

* Note that it is possible to overwrite tokens that have not been matched. * For example, calling consume() 3 times when k=2, means that the first token * consumed will be overwritten with the 3rd. */ virtual inline void consume() { inputState->getInput().consume(); } virtual inline int LA(unsigned int i) { return inputState->getInput().LA(i); } virtual inline RefToken LT(unsigned int i) { return inputState->getInput().LT(i); } protected: /// the lookahead this LL(k) parser is using. int k; private: void trace(const char* ee, const char* rname); public: virtual void traceIn(const char* rname); virtual void traceOut(const char* rname); }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_LLkParser_hpp__ antlr-2.7.7/lib/cpp/antlr/ANTLRUtil.hpp0000644000175000017500000000320710522211615017501 0ustar twernertwerner#ifndef INC_ANTLRUtil_hpp__ #define INC_ANTLRUtil_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** Eat whitespace from the input stream * @param is the stream to read from */ ANTLR_USE_NAMESPACE(std)istream& eatwhite( ANTLR_USE_NAMESPACE(std)istream& is ); /** Read a string enclosed by '"' from a stream. Also handles escaping of \". * Skips leading whitespace. * @param in the istream to read from. * @returns the string read from file exclusive the '"' * @throws ios_base::failure if string is badly formatted */ ANTLR_USE_NAMESPACE(std)string read_string( ANTLR_USE_NAMESPACE(std)istream& in ); /* Read a ([A-Z][0-9][a-z]_)* kindoff thing. Skips leading whitespace. * @param in the istream to read from. */ ANTLR_USE_NAMESPACE(std)string read_identifier( ANTLR_USE_NAMESPACE(std)istream& in ); /** Read a attribute="value" thing. Leading whitespace is skipped. * Between attribute and '=' no whitespace is allowed. After the '=' it is * permitted. * @param in the istream to read from. * @param attribute string the attribute name is put in * @param value string the value of the attribute is put in * @throws ios_base::failure if something is fishy. E.g. malformed quoting * or missing '=' */ void read_AttributeNValue( ANTLR_USE_NAMESPACE(std)istream& in, ANTLR_USE_NAMESPACE(std)string& attribute, ANTLR_USE_NAMESPACE(std)string& value ); #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif antlr-2.7.7/lib/cpp/antlr/config.hpp0000644000175000017500000002065110522211615017232 0ustar twernertwerner#ifndef INC_config_hpp__ #define INC_config_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/config.hpp#2 $ */ /* * Just a simple configuration file to differentiate between the * various compilers used and reconfigure stuff for any oddities of the * compiler in question. * * These are the defaults. Per compiler these are amended. */ #define ANTLR_USE_NAMESPACE(_x_) _x_:: #define ANTLR_USING_NAMESPACE(_x_) using namespace _x_; #define ANTLR_CXX_SUPPORTS_NAMESPACE 1 #define ANTLR_C_USING(_x_) #define ANTLR_API #ifndef CUSTOM_API # define CUSTOM_API #endif #define ANTLR_IOS_BASE ios_base /** define if cctype functions/macros need a std:: prefix. A lot of compilers * define these as macros, in which case something barfs. */ #define ANTLR_CCTYPE_NEEDS_STD /// Define if C++ compiler supports std::uncaught_exception #define ANTLR_CXX_SUPPORTS_UNCAUGHT_EXCEPTION #define ANTLR_ATOI_IN_STD /******************************************************************************/ /*{{{ Microsoft Visual C++ */ // NOTE: If you provide patches for a specific MSVC version guard them for // the specific version!!!! // _MSC_VER == 1100 for Microsoft Visual C++ 5.0 // _MSC_VER == 1200 for Microsoft Visual C++ 6.0 // _MSC_VER == 1300 for Microsoft Visual C++ 7.0 #if defined(_MSC_VER) # if _MSC_VER < 1300 # define NOMINMAX # pragma warning(disable : 4786) # define min _cpp_min # endif // This warning really gets on my nerves. // It's the one about symbol longer than 256 chars, and it happens // all the time with STL. # pragma warning( disable : 4786 4231 ) // this shuts up some DLL interface warnings for STL # pragma warning( disable : 4251 ) # ifdef ANTLR_CXX_USE_STLPORT # undef ANTLR_CXX_SUPPORTS_UNCAUGHT_EXCEPTION # endif # if ( _MSC_VER < 1300 ) && ( defined(ANTLR_EXPORTS) || defined(ANTLR_IMPORTS) ) # error "DLL Build not supported on these MSVC versions." // see comment in lib/cpp/src/dll.cpp # endif // For the DLL support originally contributed by Stephen Naughton // If you are building statically leave ANTLR_EXPORTS/ANTLR_IMPORTS undefined // If you are building the DLL define ANTLR_EXPORTS // If you are compiling code to be used with the DLL define ANTLR_IMPORTS # ifdef ANTLR_EXPORTS # undef ANTLR_API # define ANTLR_API __declspec(dllexport) # endif # ifdef ANTLR_IMPORTS # undef ANTLR_API # define ANTLR_API __declspec(dllimport) # endif # if ( _MSC_VER < 1200 ) // supposedly only for MSVC5 and before... // Using vector requires operator<(X,X) to be defined # define NEEDS_OPERATOR_LESS_THAN # endif // VC6 # if ( _MSC_VER == 1200 ) # undef ANTLR_ATOI_IN_STD # endif # if ( _MSC_VER < 1310 ) // Supposedly only for MSVC7 and before... // Not allowed to put 'static const int XXX=20;' in a class definition # define NO_STATIC_CONSTS # define NO_TEMPLATE_PARTS # endif // No strcasecmp in the C library (so use stricmp instead) // - Anyone know which is in which standard? # define NO_STRCASECMP # undef ANTLR_CCTYPE_NEEDS_STD # define NO_STATIC_CONSTS #endif // End of Microsoft Visual C++ /*}}}*/ /******************************************************************************/ /*{{{ SunPro Compiler (Using OBJECTSPACE STL) *****************************************************************************/ #ifdef __SUNPRO_CC # if (__SUNPRO_CC >= 0x500) # define NEEDS_OPERATOR_LESS_THAN # define NO_TEMPLATE_PARTS # else # undef namespace # define namespace # if (__SUNPRO_CC == 0x420) /* This code is specif to SunWspro Compiler 4.2, and will compile with the objectspace 2.1 toolkit for Solaris2.6 */ # define HAS_NOT_CASSERT_H # define HAS_NOT_CSTRING_H # define HAS_NOT_CCTYPE_H # define HAS_NOT_CSTDIO_H # define HAS_OSTREAM_H /* #define OS_SOLARIS_2_6 #define OS_NO_WSTRING #define OS_NO_ALLOCATORS #define OS_MULTI_THREADED #define OS_SOLARIS_NATIVE #define OS_REALTIME #define __OSVERSION__=5 #define SVR4 */ // ObjectSpace + some specific templates constructions with stl. /* #define OS_NO_ALLOCATOR */ // This great compiler does not have the namespace feature. # undef ANTLR_USE_NAMESPACE # define ANTLR_USE_NAMESPACE(_x_) # undef ANTLR_USING_NAMESPACE # define ANTLR_USING_NAMESPACE(_x_) # undef ANTLR_CXX_SUPPORTS_NAMESPACE # endif // End __SUNPRO_CC == 0x420 # undef explicit # define explicit # define exception os_exception # define bad_exception os_bad_exception // Not allowed to put 'static const int XXX=20;' in a class definition # define NO_STATIC_CONSTS // Using vector requires operator<(X,X) to be defined # define NEEDS_OPERATOR_LESS_THAN # endif # undef ANTLR_CCTYPE_NEEDS_STD #endif // end __SUNPRO_CC /*}}}*/ /*****************************************************************************/ /*{{{ Inprise C++ Builder 3.0 *****************************************************************************/ #ifdef __BCPLUSPLUS__ # define NO_TEMPLATE_PARTS # define NO_STRCASECMP # undef ANTLR_CCTYPE_NEEDS_STD #endif // End of C++ Builder 3.0 /*}}}*/ /*****************************************************************************/ /*{{{ IBM VisualAge C++ ( which includes the Dinkumware C++ Library ) *****************************************************************************/ #ifdef __IBMCPP__ // No strcasecmp in the C library (so use stricmp instead) // - Anyone know which is in which standard? #if (defined(_AIX) && (__IBMCPP__ >= 600)) # define NO_STATIC_CONSTS #else # define NO_STRCASECMP # undef ANTLR_CCTYPE_NEEDS_STD #endif #endif // end IBM VisualAge C++ /*}}}*/ /*****************************************************************************/ /*{{{ Metrowerks Codewarrior *****************************************************************************/ #ifdef __MWERKS__ # if (__MWERKS__ <= 0x2201) # define NO_TEMPLATE_PARTS # endif // CW 6.0 and 7.0 still do not have it. # define ANTLR_REALLY_NO_STRCASECMP # undef ANTLR_C_USING # define ANTLR_C_USING(_x_) using std:: ## _x_; # define ANTLR_CCTYPE_NEEDS_STD # undef ANTLR_CXX_SUPPORTS_UNCAUGHT_EXCEPTION #endif // End of Metrowerks Codewarrior /*}}}*/ /*****************************************************************************/ /*{{{ SGI Irix 6.5.10 MIPSPro compiler *****************************************************************************/ // (contributed by Anna Winkler) // Note: you can't compile ANTLR with the MIPSPro compiler on // anything < 6.5.10 because SGI just fixed a big bug dealing with // namespaces in that release. #ifdef __sgi # define HAS_NOT_CCTYPE_H # define HAS_NOT_CSTRING_H # define HAS_NOT_CSTDIO_H # undef ANTLR_CCTYPE_NEEDS_STD #endif // End IRIX MIPSPro /*}}}*/ /*****************************************************************************/ /*{{{ G++ in various incarnations *****************************************************************************/ // With the gcc-2.95 and 3.0 being in the near future we should start handling // incompatabilities between the various libstdc++'s. #if defined(__GNUC__) || defined(__GNUG__) // gcc 2 branch.. # if (__GNUC__ == 2 ) # if (__GNUC_MINOR__ <= 8 ) # undef ANTLR_USE_NAMESPACE # define ANTLR_USE_NAMESPACE(_x_) # undef ANTLR_USING_NAMESPACE # define ANTLR_USING_NAMESPACE(_x_) # undef ANTLR_CXX_SUPPORTS_NAMESPACE # endif # if (__GNUC_MINOR__ > 8 && __GNUC_MINOR__ <= 95 ) # undef ANTLR_IOS_BASE # define ANTLR_IOS_BASE ios # undef ANTLR_CCTYPE_NEEDS_STD // compiling with -ansi ? # ifdef __STRICT_ANSI__ # undef ANTLR_REALLY_NO_STRCASECMP # define ANTLR_REALLY_NO_STRCASECMP # endif # else // experimental .96 .97 branches.. # undef ANTLR_CCTYPE_NEEDS_STD # endif # endif #endif // ! __GNUC__ /*}}}*/ /*****************************************************************************/ /*{{{ Digital CXX (Tru64) *****************************************************************************/ #ifdef __DECCXX #define __USE_STD_IOSTREAM #endif /*}}}*/ /*****************************************************************************/ #ifdef __BORLANDC__ # if __BORLANDC__ >= 560 # include # include # define ANTLR_CCTYPE_NEEDS_STD # else # error "sorry, compiler is too old - consider an update." # endif #endif // Redefine these for backwards compatability.. #undef ANTLR_BEGIN_NAMESPACE #undef ANTLR_END_NAMESPACE #if ANTLR_CXX_SUPPORTS_NAMESPACE == 1 # define ANTLR_BEGIN_NAMESPACE(_x_) namespace _x_ { # define ANTLR_END_NAMESPACE } #else # define ANTLR_BEGIN_NAMESPACE(_x_) # define ANTLR_END_NAMESPACE #endif #endif //INC_config_hpp__ antlr-2.7.7/lib/cpp/antlr/RefCount.hpp0000644000175000017500000000264010522211615017510 0ustar twernertwerner#ifndef INC_RefCount_hpp__ #define INC_RefCount_hpp__ /* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/antlr/RefCount.hpp#2 $ */ #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif template class ANTLR_API RefCount { private: struct Ref { T* const ptr; unsigned int count; Ref(T* p) : ptr(p), count(1) {} ~Ref() {delete ptr;} Ref* increment() {++count;return this;} bool decrement() {return (--count==0);} private: Ref(const Ref&); Ref& operator=(const Ref&); }* ref; public: explicit RefCount(T* p = 0) : ref(p ? new Ref(p) : 0) { } RefCount(const RefCount& other) : ref(other.ref ? other.ref->increment() : 0) { } ~RefCount() { if (ref && ref->decrement()) delete ref; } RefCount& operator=(const RefCount& other) { Ref* tmp = other.ref ? other.ref->increment() : 0; if (ref && ref->decrement()) delete ref; ref = tmp; return *this; } operator T* () const { return ref ? ref->ptr : 0; } T* operator->() const { return ref ? ref->ptr : 0; } T* get() const { return ref ? ref->ptr : 0; } template operator RefCount() { return RefCount(ref); } }; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif #endif //INC_RefCount_hpp__ antlr-2.7.7/lib/cpp/src/0000755000175000017500000000000010522211615014717 5ustar twernertwernerantlr-2.7.7/lib/cpp/src/TokenStreamHiddenTokenFilter.cpp0000644000175000017500000001030310522211615023137 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/TokenStreamHiddenTokenFilter.cpp#2 $ */ #include "antlr/TokenStreamHiddenTokenFilter.hpp" #include "antlr/CommonHiddenStreamToken.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /**This object filters a token stream coming from a lexer * or another TokenStream so that only certain token channels * get transmitted to the parser. * * Any of the channels can be filtered off as "hidden" channels whose * tokens can be accessed from the parser. */ TokenStreamHiddenTokenFilter::TokenStreamHiddenTokenFilter(TokenStream& input) : TokenStreamBasicFilter(input) { } void TokenStreamHiddenTokenFilter::consume() { nextMonitoredToken = input->nextToken(); } void TokenStreamHiddenTokenFilter::consumeFirst() { consume(); // Handle situation where hidden or discarded tokens // appear first in input stream RefToken p; // while hidden or discarded scarf tokens while ( hideMask.member(LA(1)->getType()) || discardMask.member(LA(1)->getType()) ) { if ( hideMask.member(LA(1)->getType()) ) { if ( !p ) { p = LA(1); } else { static_cast(p.get())->setHiddenAfter(LA(1)); static_cast(LA(1).get())->setHiddenBefore(p); // double-link p = LA(1); } lastHiddenToken = p; if (!firstHidden) firstHidden = p; // record hidden token if first } consume(); } } BitSet TokenStreamHiddenTokenFilter::getDiscardMask() const { return discardMask; } /** Return a ptr to the hidden token appearing immediately after * token t in the input stream. */ RefToken TokenStreamHiddenTokenFilter::getHiddenAfter(RefToken t) { return static_cast(t.get())->getHiddenAfter(); } /** Return a ptr to the hidden token appearing immediately before * token t in the input stream. */ RefToken TokenStreamHiddenTokenFilter::getHiddenBefore(RefToken t) { return static_cast(t.get())->getHiddenBefore(); } BitSet TokenStreamHiddenTokenFilter::getHideMask() const { return hideMask; } /** Return the first hidden token if one appears * before any monitored token. */ RefToken TokenStreamHiddenTokenFilter::getInitialHiddenToken() { return firstHidden; } void TokenStreamHiddenTokenFilter::hide(int m) { hideMask.add(m); } void TokenStreamHiddenTokenFilter::hide(const BitSet& mask) { hideMask = mask; } RefToken TokenStreamHiddenTokenFilter::LA(int) { return nextMonitoredToken; } /** Return the next monitored token. * Test the token following the monitored token. * If following is another monitored token, save it * for the next invocation of nextToken (like a single * lookahead token) and return it then. * If following is unmonitored, nondiscarded (hidden) * channel token, add it to the monitored token. * * Note: EOF must be a monitored Token. */ RefToken TokenStreamHiddenTokenFilter::nextToken() { // handle an initial condition; don't want to get lookahead // token of this splitter until first call to nextToken if ( !LA(1) ) { consumeFirst(); } // we always consume hidden tokens after monitored, thus, // upon entry LA(1) is a monitored token. RefToken monitored = LA(1); // point to hidden tokens found during last invocation static_cast(monitored.get())->setHiddenBefore(lastHiddenToken); lastHiddenToken = nullToken; // Look for hidden tokens, hook them into list emanating // from the monitored tokens. consume(); RefToken p = monitored; // while hidden or discarded scarf tokens while ( hideMask.member(LA(1)->getType()) || discardMask.member(LA(1)->getType()) ) { if ( hideMask.member(LA(1)->getType()) ) { // attach the hidden token to the monitored in a chain // link forwards static_cast(p.get())->setHiddenAfter(LA(1)); // link backwards if (p != monitored) { //hidden cannot point to monitored tokens static_cast(LA(1).get())->setHiddenBefore(p); } p = lastHiddenToken = LA(1); } consume(); } return monitored; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/LLkParser.cpp0000644000175000017500000000262510522211615017267 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/LLkParser.cpp#2 $ */ #include "antlr/LLkParser.hpp" #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif ANTLR_USING_NAMESPACE(std) /**An LL(k) parser. * * @see antlr.Token * @see antlr.TokenBuffer * @see antlr.LL1Parser */ // LLkParser(int k_); LLkParser::LLkParser(const ParserSharedInputState& state, int k_) : Parser(state), k(k_) { } LLkParser::LLkParser(TokenBuffer& tokenBuf, int k_) : Parser(tokenBuf), k(k_) { } LLkParser::LLkParser(TokenStream& lexer, int k_) : Parser(new TokenBuffer(lexer)), k(k_) { } void LLkParser::trace(const char* ee, const char* rname) { traceIndent(); cout << ee << rname << ((inputState->guessing>0)?"; [guessing]":"; "); for (int i = 1; i <= k; i++) { if (i != 1) { cout << ", "; } cout << "LA(" << i << ")=="; string temp; try { temp = LT(i)->getText().c_str(); } catch( ANTLRException& ae ) { temp = "[error: "; temp += ae.toString(); temp += ']'; } cout << temp; } cout << endl; } void LLkParser::traceIn(const char* rname) { traceDepth++; trace("> ",rname); } void LLkParser::traceOut(const char* rname) { trace("< ",rname); traceDepth--; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/NoViableAltException.cpp0000644000175000017500000000243210522211615021443 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/NoViableAltException.cpp#2 $ */ #include "antlr/NoViableAltException.hpp" #include "antlr/String.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif ANTLR_USING_NAMESPACE(std) NoViableAltException::NoViableAltException(RefAST t) : RecognitionException("NoViableAlt","",-1,-1), token(0), node(t) { } NoViableAltException::NoViableAltException( RefToken t, const ANTLR_USE_NAMESPACE(std)string& fileName_ ) : RecognitionException("NoViableAlt",fileName_,t->getLine(),t->getColumn()), token(t), node(nullASTptr) { } ANTLR_USE_NAMESPACE(std)string NoViableAltException::getMessage() const { if (token) { if( token->getType() == Token::EOF_TYPE ) return string("unexpected end of file"); else if( token->getType() == Token::NULL_TREE_LOOKAHEAD ) return string("unexpected end of tree"); else return string("unexpected token: ")+token->getText(); } // must a tree parser error if token==null if (!node) return "unexpected end of subtree"; return string("unexpected AST node: ")+node->toString(); } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/TokenStreamSelector.cpp0000644000175000017500000000500110522211615021354 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/TokenStreamSelector.cpp#2 $ */ #include "antlr/TokenStreamSelector.hpp" #include "antlr/TokenStreamRetryException.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** A token stream MUX (multiplexor) knows about n token streams * and can multiplex them onto the same channel for use by token * stream consumer like a parser. This is a way to have multiple * lexers break up the same input stream for a single parser. * Or, you can have multiple instances of the same lexer handle * multiple input streams; this works great for includes. */ TokenStreamSelector::TokenStreamSelector() : input(0) { } TokenStreamSelector::~TokenStreamSelector() { } void TokenStreamSelector::addInputStream(TokenStream* stream, const ANTLR_USE_NAMESPACE(std)string& key) { inputStreamNames[key] = stream; } TokenStream* TokenStreamSelector::getCurrentStream() const { return input; } TokenStream* TokenStreamSelector::getStream(const ANTLR_USE_NAMESPACE(std)string& sname) const { inputStreamNames_coll::const_iterator i = inputStreamNames.find(sname); if (i == inputStreamNames.end()) { throw ANTLR_USE_NAMESPACE(std)string("TokenStream ")+sname+" not found"; } return (*i).second; } RefToken TokenStreamSelector::nextToken() { // keep looking for a token until you don't // get a retry exception for (;;) { try { return input->nextToken(); } catch (TokenStreamRetryException&) { // just retry "forever" } } } TokenStream* TokenStreamSelector::pop() { TokenStream* stream = streamStack.top(); streamStack.pop(); select(stream); return stream; } void TokenStreamSelector::push(TokenStream* stream) { streamStack.push(input); select(stream); } void TokenStreamSelector::push(const ANTLR_USE_NAMESPACE(std)string& sname) { streamStack.push(input); select(sname); } void TokenStreamSelector::retry() { throw TokenStreamRetryException(); } /** Set the stream without pushing old stream */ void TokenStreamSelector::select(TokenStream* stream) { input = stream; } void TokenStreamSelector::select(const ANTLR_USE_NAMESPACE(std)string& sname) { inputStreamNames_coll::const_iterator i = inputStreamNames.find(sname); if (i == inputStreamNames.end()) { throw ANTLR_USE_NAMESPACE(std)string("TokenStream ")+sname+" not found"; } input = (*i).second; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/Makefile.in0000755000175000017500000001176210522211615016776 0ustar twernertwerner############################################################################## # $Id:$ ############################################################################### ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stdvars@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ## do not change this value subdir=lib/cpp/src all : lib lib: @ANTLR_LIB@ compile: $(antlr_obj_FILES) antlr_cxx_FILES = \ @abs_top_srcdir@/lib/cpp/src/ANTLRUtil.cpp \ @abs_top_srcdir@/lib/cpp/src/ASTFactory.cpp \ @abs_top_srcdir@/lib/cpp/src/ASTNULLType.cpp \ @abs_top_srcdir@/lib/cpp/src/ASTRefCount.cpp \ @abs_top_srcdir@/lib/cpp/src/BaseAST.cpp \ @abs_top_srcdir@/lib/cpp/src/BitSet.cpp \ @abs_top_srcdir@/lib/cpp/src/CharBuffer.cpp \ @abs_top_srcdir@/lib/cpp/src/CharScanner.cpp \ @abs_top_srcdir@/lib/cpp/src/CommonAST.cpp \ @abs_top_srcdir@/lib/cpp/src/CommonASTWithHiddenTokens.cpp \ @abs_top_srcdir@/lib/cpp/src/CommonHiddenStreamToken.cpp \ @abs_top_srcdir@/lib/cpp/src/CommonToken.cpp \ @abs_top_srcdir@/lib/cpp/src/InputBuffer.cpp \ @abs_top_srcdir@/lib/cpp/src/LLkParser.cpp \ @abs_top_srcdir@/lib/cpp/src/MismatchedCharException.cpp \ @abs_top_srcdir@/lib/cpp/src/MismatchedTokenException.cpp \ @abs_top_srcdir@/lib/cpp/src/NoViableAltException.cpp \ @abs_top_srcdir@/lib/cpp/src/NoViableAltForCharException.cpp \ @abs_top_srcdir@/lib/cpp/src/Parser.cpp \ @abs_top_srcdir@/lib/cpp/src/RecognitionException.cpp \ @abs_top_srcdir@/lib/cpp/src/String.cpp \ @abs_top_srcdir@/lib/cpp/src/Token.cpp \ @abs_top_srcdir@/lib/cpp/src/TokenBuffer.cpp \ @abs_top_srcdir@/lib/cpp/src/TokenStreamBasicFilter.cpp \ @abs_top_srcdir@/lib/cpp/src/TokenStreamHiddenTokenFilter.cpp \ @abs_top_srcdir@/lib/cpp/src/TokenStreamSelector.cpp \ @abs_top_srcdir@/lib/cpp/src/TokenStreamRewriteEngine.cpp \ @abs_top_srcdir@/lib/cpp/src/TreeParser.cpp \ @abs_top_srcdir@/lib/cpp/src/TokenRefCount.cpp \ $(eol) ## contents of this varialbe could also be processed by ## some advanced GNU make 'scripting' features. This may ## simplify maintenance but makes Makefile far less read- ## able and non-portable. antlr_obj_FILES = \ @abs_this_builddir@/lib/cpp/src/ANTLRUtil@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/ASTFactory@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/ASTNULLType@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/ASTRefCount@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/BaseAST@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/BitSet@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/CharBuffer@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/CharScanner@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/CommonAST@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/CommonASTWithHiddenTokens@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/CommonHiddenStreamToken@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/CommonToken@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/InputBuffer@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/LLkParser@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/MismatchedCharException@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/MismatchedTokenException@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/NoViableAltException@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/NoViableAltForCharException@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/Parser@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/RecognitionException@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/String@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/Token@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/TokenBuffer@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/TokenStreamBasicFilter@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/TokenStreamHiddenTokenFilter@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/TokenStreamSelector@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/TokenStreamRewriteEngine@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/TreeParser@OBJEXT@ \ @abs_this_builddir@/lib/cpp/src/TokenRefCount@OBJEXT@ \ $(eol) ## GNU make - how to make object file @abs_this_builddir@/lib/cpp/src/%@OBJEXT@ : @abs_top_srcdir@/lib/cpp/src/%.cpp @ @CXX_COMPILE_CMD@ $< ## Build static library. How the library is build as well as ## libraries's name is hidden by our script, ie. configured. @ANTLR_LIB@ :: $(antlr_obj_FILES) @ @CXX_LIB_CMD@ $(antlr_obj_FILES) clean: @RMF@ *.obj *.o *.a *.lib *.so *.dll *~ @ANTLR_LIB@ ## use this target if you just want to rebuild the lib without ## compiling again. clean-lib: @RMF@ @ANTLR_LIB@ distclean: clean @RMF@ Makefile test: install: this-install antlr_lib_FILES = \ @ANTLR_LIB@ \ $(eol) this-install: @ANTLR_LIB@ @$(MKDIR) -p "$(libdir)" @@ECHO@ "install C++ core files .. " @for f in $(antlr_lib_FILES) ; do \ @ECHO@ "install $${f}" ; \ if test -f "$${f}" ; then \ $(INSTALL) -m 444 "$${f}" "$(libdir)" ; \ fi ;\ done .PHONY: all clean distclean compile lib install test clean-lib this-install ## dependencies $(antlr_obj_FILES) : @abs_this_builddir@/scripts/cxx.sh @ANTLR_LIB@ :: @abs_this_builddir@/scripts/lib.sh ## other dependencies to be listed below ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @stddeps@ ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx antlr-2.7.7/lib/cpp/src/ANTLRUtil.cpp0000644000175000017500000000707410522211615017151 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** Eat whitespace from the input stream * @param is the stream to read from */ ANTLR_USE_NAMESPACE(std)istream& eatwhite( ANTLR_USE_NAMESPACE(std)istream& is ) { char c; while( is.get(c) ) { #ifdef ANTLR_CCTYPE_NEEDS_STD if( !ANTLR_USE_NAMESPACE(std)isspace(c) ) #else if( !isspace(c) ) #endif { is.putback(c); break; } } return is; } /** Read a string enclosed by '"' from a stream. Also handles escaping of \". * Skips leading whitespace. * @param in the istream to read from. * @returns the string read from file exclusive the '"' * @throws IOException if string is badly formatted */ ANTLR_USE_NAMESPACE(std)string read_string( ANTLR_USE_NAMESPACE(std)istream& in ) { char ch; ANTLR_USE_NAMESPACE(std)string ret(""); // States for a simple state machine... enum { START, READING, ESCAPE, FINISHED }; int state = START; eatwhite(in); while( state != FINISHED && in.get(ch) ) { switch( state ) { case START: // start state: check wether starting with " then switch to READING if( ch != '"' ) throw IOException("string must start with '\"'"); state = READING; continue; case READING: // reading state: look out for escape sequences and closing " if( ch == '\\' ) // got escape sequence { state = ESCAPE; continue; } if( ch == '"' ) // close quote -> stop { state = FINISHED; continue; } ret += ch; // else append... continue; case ESCAPE: switch(ch) { case '\\': ret += ch; state = READING; continue; case '"': ret += ch; state = READING; continue; case '0': ret += '\0'; state = READING; continue; default: // unrecognized escape is not mapped ret += '\\'; ret += ch; state = READING; continue; } } } if( state != FINISHED ) throw IOException("badly formatted string: "+ret); return ret; } /* Read a ([A-Z][0-9][a-z]_)* kindoff thing. Skips leading whitespace. * @param in the istream to read from. */ ANTLR_USE_NAMESPACE(std)string read_identifier( ANTLR_USE_NAMESPACE(std)istream& in ) { char ch; ANTLR_USE_NAMESPACE(std)string ret(""); eatwhite(in); while( in.get(ch) ) { #ifdef ANTLR_CCTYPE_NEEDS_STD if( ANTLR_USE_NAMESPACE(std)isupper(ch) || ANTLR_USE_NAMESPACE(std)islower(ch) || ANTLR_USE_NAMESPACE(std)isdigit(ch) || ch == '_' ) #else if( isupper(ch) || islower(ch) || isdigit(ch) || ch == '_' ) #endif ret += ch; else { in.putback(ch); break; } } return ret; } /** Read a attribute="value" thing. Leading whitespace is skipped. * Between attribute and '=' no whitespace is allowed. After the '=' it is * permitted. * @param in the istream to read from. * @param attribute string the attribute name is put in * @param value string the value of the attribute is put in * @throws IOException if something is fishy. E.g. malformed quoting * or missing '=' */ void read_AttributeNValue( ANTLR_USE_NAMESPACE(std)istream& in, ANTLR_USE_NAMESPACE(std)string& attribute, ANTLR_USE_NAMESPACE(std)string& value ) { attribute = read_identifier(in); char ch; if( in.get(ch) && ch == '=' ) value = read_string(in); else throw IOException("invalid attribute=value thing "+attribute); } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/ASTRefCount.cpp0000644000175000017500000000127410522211615017524 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/ASTRefCount.cpp#2 $ */ #include "antlr/ASTRefCount.hpp" #include "antlr/AST.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif ASTRef::ASTRef(AST* p) : ptr(p), count(1) { if (p && !p->ref) p->ref = this; } ASTRef::~ASTRef() { delete ptr; } ASTRef* ASTRef::getRef(const AST* p) { if (p) { AST* pp = const_cast(p); if (pp->ref) return pp->ref->increment(); else return new ASTRef(pp); } else return 0; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/TokenBuffer.cpp0000644000175000017500000000423210522211615017636 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/TokenBuffer.cpp#2 $ */ #include "antlr/TokenBuffer.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /**A Stream of Token objects fed to the parser from a TokenStream that can * be rewound via mark()/rewind() methods. *

* A dynamic array is used to buffer up all the input tokens. Normally, * "k" tokens are stored in the buffer. More tokens may be stored during * guess mode (testing syntactic predicate), or when LT(i>k) is referenced. * Consumption of tokens is deferred. In other words, reading the next * token is not done by conume(), but deferred until needed by LA or LT. *

* * @see antlr.Token * @see antlr.TokenStream * @see antlr.TokenQueue */ /** Create a token buffer */ TokenBuffer::TokenBuffer( TokenStream& inp ) : input(inp) , nMarkers(0) , markerOffset(0) , numToConsume(0) { } TokenBuffer::~TokenBuffer( void ) { } /** Ensure that the token buffer is sufficiently full */ void TokenBuffer::fill(unsigned int amount) { syncConsume(); // Fill the buffer sufficiently to hold needed tokens while (queue.entries() < (amount + markerOffset)) { // Append the next token queue.append(input.nextToken()); } } /** Get a lookahead token value */ int TokenBuffer::LA(unsigned int i) { fill(i); return queue.elementAt(markerOffset+i-1)->getType(); } /** Get a lookahead token */ RefToken TokenBuffer::LT(unsigned int i) { fill(i); return queue.elementAt(markerOffset+i-1); } /** Return an integer marker that can be used to rewind the buffer to * its current state. */ unsigned int TokenBuffer::mark() { syncConsume(); nMarkers++; return markerOffset; } /**Rewind the token buffer to a marker. * @param mark Marker returned previously from mark() */ void TokenBuffer::rewind(unsigned int mark) { syncConsume(); markerOffset=mark; nMarkers--; } /// Get number of non-consumed tokens unsigned int TokenBuffer::entries() const { return queue.entries() - markerOffset; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/InputBuffer.cpp0000644000175000017500000000346710522211615017666 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/InputBuffer.cpp#2 $ */ #include "antlr/config.hpp" #include "antlr/InputBuffer.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** Ensure that the character buffer is sufficiently full */ void InputBuffer::fill(unsigned int amount) { syncConsume(); // Fill the buffer sufficiently to hold needed characters while (queue.entries() < amount + markerOffset) { // Append the next character queue.append(getChar()); } } /** get the current lookahead characters as a string * @warning it may treat 0 and EOF values wrong */ ANTLR_USE_NAMESPACE(std)string InputBuffer::getLAChars( void ) const { ANTLR_USE_NAMESPACE(std)string ret; for(unsigned int i = markerOffset; i < queue.entries(); i++) ret += queue.elementAt(i); return ret; } /** get the current marked characters as a string * @warning it may treat 0 and EOF values wrong */ ANTLR_USE_NAMESPACE(std)string InputBuffer::getMarkedChars( void ) const { ANTLR_USE_NAMESPACE(std)string ret; for(unsigned int i = 0; i < markerOffset; i++) ret += queue.elementAt(i); return ret; } /** Return an integer marker that can be used to rewind the buffer to * its current state. */ unsigned int InputBuffer::mark() { syncConsume(); nMarkers++; return markerOffset; } /** Rewind the character buffer to a marker. * @param mark Marker returned previously from mark() */ void InputBuffer::rewind(unsigned int mark) { syncConsume(); markerOffset = mark; nMarkers--; } unsigned int InputBuffer::entries() const { //assert(queue.entries() >= markerOffset); return queue.entries() - markerOffset; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/CommonAST.cpp0000644000175000017500000000176410522211615017233 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/CommonAST.cpp#2 $ */ #include "antlr/config.hpp" #include #include #include "antlr/CommonAST.hpp" #include "antlr/ANTLRUtil.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif const char* const CommonAST::TYPE_NAME = "CommonAST"; #ifdef ANTLR_SUPPORT_XML void CommonAST::initialize( ANTLR_USE_NAMESPACE(std)istream& in ) { ANTLR_USE_NAMESPACE(std)string t1, t2, text; // text read_AttributeNValue( in, t1, text ); read_AttributeNValue( in, t1, t2 ); #ifdef ANTLR_ATOI_IN_STD int type = ANTLR_USE_NAMESPACE(std)atoi(t2.c_str()); #else int type = atoi(t2.c_str()); #endif // initialize first part of AST. this->initialize( type, text ); } #endif RefAST CommonAST::factory() { return RefAST(new CommonAST); } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/BitSet.cpp0000644000175000017500000000227010522211615016616 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/BitSet.cpp#2 $ */ #include "antlr/BitSet.hpp" #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif BitSet::BitSet(unsigned int nbits) : storage(nbits) { for (unsigned int i = 0; i < nbits ; i++ ) storage[i] = false; } BitSet::BitSet( const unsigned long* bits_, unsigned int nlongs ) : storage(nlongs*32) { for ( unsigned int i = 0 ; i < (nlongs * 32); i++) storage[i] = (bits_[i>>5] & (1UL << (i&31))) ? true : false; } BitSet::~BitSet() { } void BitSet::add(unsigned int el) { if( el >= storage.size() ) storage.resize( el+1, false ); storage[el] = true; } bool BitSet::member(unsigned int el) const { if ( el >= storage.size()) return false; return storage[el]; } ANTLR_USE_NAMESPACE(std)vector BitSet::toArray() const { ANTLR_USE_NAMESPACE(std)vector elems; for (unsigned int i = 0; i < storage.size(); i++) { if (storage[i]) elems.push_back(i); } return elems; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/TokenRefCount.cpp0000644000175000017500000000121410522211615020147 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ #include "antlr/TokenRefCount.hpp" #include "antlr/Token.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif TokenRef::TokenRef(Token* p) : ptr(p), count(1) { if (p && !p->ref) p->ref = this; } TokenRef::~TokenRef() { delete ptr; } TokenRef* TokenRef::getRef(const Token* p) { if (p) { Token* pp = const_cast(p); if (pp->ref) return pp->ref->increment(); else return new TokenRef(pp); } else return 0; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/RecognitionException.cpp0000644000175000017500000000326110522211615021564 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/RecognitionException.cpp#2 $ */ #include "antlr/RecognitionException.hpp" #include "antlr/String.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif RecognitionException::RecognitionException() : ANTLRException("parsing error") , line(-1) , column(-1) { } RecognitionException::RecognitionException(const ANTLR_USE_NAMESPACE(std)string& s) : ANTLRException(s) , line(-1) , column(-1) { } RecognitionException::RecognitionException(const ANTLR_USE_NAMESPACE(std)string& s, const ANTLR_USE_NAMESPACE(std)string& fileName_, int line_,int column_) : ANTLRException(s) , fileName(fileName_) , line(line_) , column(column_) { } ANTLR_USE_NAMESPACE(std)string RecognitionException::getFileLineColumnString() const { ANTLR_USE_NAMESPACE(std)string fileLineColumnString; if ( fileName.length() > 0 ) fileLineColumnString = fileName + ":"; if ( line != -1 ) { if ( fileName.length() == 0 ) fileLineColumnString = fileLineColumnString + "line "; fileLineColumnString = fileLineColumnString + line; if ( column != -1 ) fileLineColumnString = fileLineColumnString + ":" + column; fileLineColumnString = fileLineColumnString + ":"; } fileLineColumnString = fileLineColumnString + " "; return fileLineColumnString; } ANTLR_USE_NAMESPACE(std)string RecognitionException::toString() const { return getFileLineColumnString()+getMessage(); } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/TreeParser.cpp0000644000175000017500000000405010522211615017476 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/TreeParser.cpp#2 $ */ #include "antlr/TreeParser.hpp" #include "antlr/ASTNULLType.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** The AST Null object; the parsing cursor is set to this when * it is found to be null. This way, we can test the * token type of a node without having to have tests for null * everywhere. */ RefAST TreeParser::ASTNULL(new ASTNULLType); /** Parser error-reporting function can be overridden in subclass */ void TreeParser::reportError(const RecognitionException& ex) { ANTLR_USE_NAMESPACE(std)cerr << ex.toString().c_str() << ANTLR_USE_NAMESPACE(std)endl; } /** Parser error-reporting function can be overridden in subclass */ void TreeParser::reportError(const ANTLR_USE_NAMESPACE(std)string& s) { ANTLR_USE_NAMESPACE(std)cerr << "error: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; } /** Parser warning-reporting function can be overridden in subclass */ void TreeParser::reportWarning(const ANTLR_USE_NAMESPACE(std)string& s) { ANTLR_USE_NAMESPACE(std)cerr << "warning: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; } /** Procedure to write out an indent for traceIn and traceOut */ void TreeParser::traceIndent() { for( int i = 0; i < traceDepth; i++ ) ANTLR_USE_NAMESPACE(std)cout << " "; } void TreeParser::traceIn(const char* rname, RefAST t) { traceDepth++; traceIndent(); ANTLR_USE_NAMESPACE(std)cout << "> " << rname << "(" << (t ? t->toString().c_str() : "null") << ")" << ((inputState->guessing>0)?" [guessing]":"") << ANTLR_USE_NAMESPACE(std)endl; } void TreeParser::traceOut(const char* rname, RefAST t) { traceIndent(); ANTLR_USE_NAMESPACE(std)cout << "< " << rname << "(" << (t ? t->toString().c_str() : "null") << ")" << ((inputState->guessing>0)?" [guessing]":"") << ANTLR_USE_NAMESPACE(std)endl; traceDepth--; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/String.cpp0000644000175000017500000000311310522211615016667 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/String.cpp#2 $ */ #include "antlr/String.hpp" #include #ifdef HAS_NOT_CSTDIO_H #include #else #include #endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif // wh: hack for Borland C++ 5.6 #if __BORLANDC__ using std::sprintf; #endif // RK: should be using snprintf actually... (or stringstream) ANTLR_C_USING(sprintf) ANTLR_USE_NAMESPACE(std)string operator+( const ANTLR_USE_NAMESPACE(std)string& lhs, const int rhs ) { char tmp[100]; sprintf(tmp,"%d",rhs); return lhs+tmp; } ANTLR_USE_NAMESPACE(std)string operator+( const ANTLR_USE_NAMESPACE(std)string& lhs, size_t rhs ) { char tmp[100]; sprintf(tmp,"%u",rhs); return lhs+tmp; } /** Convert character to readable string */ ANTLR_USE_NAMESPACE(std)string charName(int ch) { if (ch == EOF) return "EOF"; else { ANTLR_USE_NAMESPACE(std)string s; // when you think you've seen it all.. an isprint that crashes... ch = ch & 0xFF; #ifdef ANTLR_CCTYPE_NEEDS_STD if( ANTLR_USE_NAMESPACE(std)isprint( ch ) ) #else if( isprint( ch ) ) #endif { s.append("'"); s += ch; s.append("'"); // s += "'"+ch+"'"; } else { s += "0x"; unsigned int t = ch >> 4; if( t < 10 ) s += t | 0x30; else s += t + 0x37; t = ch & 0xF; if( t < 10 ) s += t | 0x30; else s += t + 0x37; } return s; } } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/BaseAST.cpp0000644000175000017500000001510710522211615016651 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/BaseAST.cpp#2 $ */ #include "antlr/config.hpp" #include #include "antlr/AST.hpp" #include "antlr/BaseAST.hpp" ANTLR_USING_NAMESPACE(std) #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif size_t BaseAST::getNumberOfChildren() const { RefBaseAST t = this->down; size_t n = 0; if( t ) { n = 1; while( t->right ) { t = t->right; n++; } return n; } return n; } void BaseAST::doWorkForFindAll( ANTLR_USE_NAMESPACE(std)vector& v, RefAST target,bool partialMatch) { // Start walking sibling lists, looking for matches. for (RefAST sibling=this; sibling; sibling=sibling->getNextSibling()) { if ( (partialMatch && sibling->equalsTreePartial(target)) || (!partialMatch && sibling->equalsTree(target)) ) { v.push_back(sibling); } // regardless of match or not, check any children for matches if ( sibling->getFirstChild() ) { RefBaseAST(sibling->getFirstChild())->doWorkForFindAll(v, target, partialMatch); } } } /** Is t an exact structural and equals() match of this tree. The * 'this' reference is considered the start of a sibling list. */ bool BaseAST::equalsList(RefAST t) const { // the empty tree is not a match of any non-null tree. if (!t) return false; // Otherwise, start walking sibling lists. First mismatch, return false. RefAST sibling=this; for (;sibling && t; sibling=sibling->getNextSibling(), t=t->getNextSibling()) { // as a quick optimization, check roots first. if (!sibling->equals(t)) return false; // if roots match, do full list match test on children. if (sibling->getFirstChild()) { if (!sibling->getFirstChild()->equalsList(t->getFirstChild())) return false; } // sibling has no kids, make sure t doesn't either else if (t->getFirstChild()) return false; } if (!sibling && !t) return true; // one sibling list has more than the other return false; } /** Is 'sub' a subtree of this list? * The siblings of the root are NOT ignored. */ bool BaseAST::equalsListPartial(RefAST sub) const { // the empty tree is always a subset of any tree. if (!sub) return true; // Otherwise, start walking sibling lists. First mismatch, return false. RefAST sibling=this; for (;sibling && sub; sibling=sibling->getNextSibling(), sub=sub->getNextSibling()) { // as a quick optimization, check roots first. if (!sibling->equals(sub)) return false; // if roots match, do partial list match test on children. if (sibling->getFirstChild()) if (!sibling->getFirstChild()->equalsListPartial(sub->getFirstChild())) return false; } if (!sibling && sub) // nothing left to match in this tree, but subtree has more return false; // either both are null or sibling has more, but subtree doesn't return true; } /** Is tree rooted at 'this' equal to 't'? The siblings * of 'this' are ignored. */ bool BaseAST::equalsTree(RefAST t) const { // check roots first if (!equals(t)) return false; // if roots match, do full list match test on children. if (getFirstChild()) { if (!getFirstChild()->equalsList(t->getFirstChild())) return false; } // sibling has no kids, make sure t doesn't either else if (t->getFirstChild()) return false; return true; } /** Is 'sub' a subtree of the tree rooted at 'this'? The siblings * of 'this' are ignored. */ bool BaseAST::equalsTreePartial(RefAST sub) const { // the empty tree is always a subset of any tree. if (!sub) return true; // check roots first if (!equals(sub)) return false; // if roots match, do full list partial match test on children. if (getFirstChild()) if (!getFirstChild()->equalsListPartial(sub->getFirstChild())) return false; return true; } /** Walk the tree looking for all exact subtree matches. Return * an ASTEnumerator that lets the caller walk the list * of subtree roots found herein. */ ANTLR_USE_NAMESPACE(std)vector BaseAST::findAll(RefAST target) { ANTLR_USE_NAMESPACE(std)vector roots; // the empty tree cannot result in an enumeration if (target) { doWorkForFindAll(roots,target,false); // find all matches recursively } return roots; } /** Walk the tree looking for all subtrees. Return * an ASTEnumerator that lets the caller walk the list * of subtree roots found herein. */ ANTLR_USE_NAMESPACE(std)vector BaseAST::findAllPartial(RefAST target) { ANTLR_USE_NAMESPACE(std)vector roots; // the empty tree cannot result in an enumeration if (target) doWorkForFindAll(roots,target,true); // find all matches recursively return roots; } ANTLR_USE_NAMESPACE(std)string BaseAST::toStringList() const { ANTLR_USE_NAMESPACE(std)string ts=""; if (getFirstChild()) { ts+=" ( "; ts+=toString(); ts+=getFirstChild()->toStringList(); ts+=" )"; } else { ts+=" "; ts+=toString(); } if (getNextSibling()) ts+=getNextSibling()->toStringList(); return ts; } ANTLR_USE_NAMESPACE(std)string BaseAST::toStringTree() const { ANTLR_USE_NAMESPACE(std)string ts = ""; if (getFirstChild()) { ts+=" ( "; ts+=toString(); ts+=getFirstChild()->toStringList(); ts+=" )"; } else { ts+=" "; ts+=toString(); } return ts; } #ifdef ANTLR_SUPPORT_XML /* This whole XML output stuff needs a little bit more thought * I'd like to store extra XML data in the node. e.g. for custom ast's * with for instance symboltable references. This * should be more pluggable.. * @returns boolean value indicating wether a closetag should be produced. */ bool BaseAST::attributesToStream( ANTLR_USE_NAMESPACE(std)ostream& out ) const { out << "text=\"" << this->getText() << "\" type=\"" << this->getType() << "\""; return false; } void BaseAST::toStream( ANTLR_USE_NAMESPACE(std)ostream& out ) const { for( RefAST node = this; node != 0; node = node->getNextSibling() ) { out << "<" << this->typeName() << " "; // Write out attributes and if there is extra data... bool need_close_tag = node->attributesToStream( out ); if( need_close_tag ) { // got children so write them... if( node->getFirstChild() != 0 ) node->getFirstChild()->toStream( out ); // and a closing tag.. out << "typeName() << ">" << endl; } } } #endif // this is nasty, but it makes the code generation easier ANTLR_API RefAST nullAST; #if defined(_MSC_VER) && !defined(__ICL) // Microsoft Visual C++ extern ANTLR_API AST* const nullASTptr = 0; #else ANTLR_API AST* const nullASTptr = 0; #endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/CommonHiddenStreamToken.cpp0000644000175000017500000000213610522211615022146 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/CommonHiddenStreamToken.cpp#2 $ */ #include "antlr/CommonHiddenStreamToken.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif CommonHiddenStreamToken::CommonHiddenStreamToken() : CommonToken() { } CommonHiddenStreamToken::CommonHiddenStreamToken(int t, const ANTLR_USE_NAMESPACE(std)string& txt) : CommonToken(t,txt) { } CommonHiddenStreamToken::CommonHiddenStreamToken(const ANTLR_USE_NAMESPACE(std)string& s) : CommonToken(s) { } RefToken CommonHiddenStreamToken::getHiddenAfter() { return hiddenAfter; } RefToken CommonHiddenStreamToken::getHiddenBefore() { return hiddenBefore; } RefToken CommonHiddenStreamToken::factory() { return RefToken(new CommonHiddenStreamToken); } void CommonHiddenStreamToken::setHiddenAfter(RefToken t) { hiddenAfter = t; } void CommonHiddenStreamToken::setHiddenBefore(RefToken t) { hiddenBefore = t; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/MismatchedCharException.cpp0000644000175000017500000000621510522211615022162 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/MismatchedCharException.cpp#2 $ */ #include "antlr/CharScanner.hpp" #include "antlr/MismatchedCharException.hpp" #include "antlr/String.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif MismatchedCharException::MismatchedCharException() : RecognitionException("Mismatched char") {} // Expected range / not range MismatchedCharException::MismatchedCharException( int c, int lower, int upper_, bool matchNot, CharScanner* scanner_ ) : RecognitionException("Mismatched char", scanner_->getFilename(), scanner_->getLine(), scanner_->getColumn()) , mismatchType(matchNot ? NOT_RANGE : RANGE) , foundChar(c) , expecting(lower) , upper(upper_) , scanner(scanner_) { } // Expected token / not token MismatchedCharException::MismatchedCharException( int c, int expecting_, bool matchNot, CharScanner* scanner_ ) : RecognitionException("Mismatched char", scanner_->getFilename(), scanner_->getLine(), scanner_->getColumn()) , mismatchType(matchNot ? NOT_CHAR : CHAR) , foundChar(c) , expecting(expecting_) , scanner(scanner_) { } // Expected BitSet / not BitSet MismatchedCharException::MismatchedCharException( int c, BitSet set_, bool matchNot, CharScanner* scanner_ ) : RecognitionException("Mismatched char", scanner_->getFilename(), scanner_->getLine(), scanner_->getColumn()) , mismatchType(matchNot ? NOT_SET : SET) , foundChar(c) , set(set_) , scanner(scanner_) { } ANTLR_USE_NAMESPACE(std)string MismatchedCharException::getMessage() const { ANTLR_USE_NAMESPACE(std)string s; switch (mismatchType) { case CHAR : s += "expecting '" + charName(expecting) + "', found '" + charName(foundChar) + "'"; break; case NOT_CHAR : s += "expecting anything but '" + charName(expecting) + "'; got it anyway"; break; case RANGE : s += "expecting token in range: '" + charName(expecting) + "'..'" + charName(upper) + "', found '" + charName(foundChar) + "'"; break; case NOT_RANGE : s += "expecting token NOT in range: " + charName(expecting) + "'..'" + charName(upper) + "', found '" + charName(foundChar) + "'"; break; case SET : case NOT_SET : { s += ANTLR_USE_NAMESPACE(std)string("expecting ") + (mismatchType == NOT_SET ? "NOT " : "") + "one of ("; ANTLR_USE_NAMESPACE(std)vector elems = set.toArray(); for ( unsigned int i = 0; i < elems.size(); i++ ) { s += " '"; s += charName(elems[i]); s += "'"; } s += "), found '" + charName(foundChar) + "'"; } break; default : s += RecognitionException::getMessage(); break; } return s; } #ifndef NO_STATIC_CONSTS const int MismatchedCharException::CHAR; const int MismatchedCharException::NOT_CHAR; const int MismatchedCharException::RANGE; const int MismatchedCharException::NOT_RANGE; const int MismatchedCharException::SET; const int MismatchedCharException::NOT_SET; #endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/Token.cpp0000644000175000017500000000241710522211615016507 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/Token.cpp#2 $ */ #include "antlr/Token.hpp" #include "antlr/String.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif int Token::getColumn() const { return 0; } int Token::getLine() const { return 0; } ANTLR_USE_NAMESPACE(std)string Token::getText() const { return ""; } int Token::getType() const { return type; } void Token::setColumn(int) { } void Token::setLine(int) { } void Token::setText(const ANTLR_USE_NAMESPACE(std)string&) { } void Token::setType(int t) { type = t; } void Token::setFilename(const ANTLR_USE_NAMESPACE(std)string&) { } ANTLR_USE_NAMESPACE(std)string emptyString(""); const ANTLR_USE_NAMESPACE(std)string& Token::getFilename() const { return emptyString; } ANTLR_USE_NAMESPACE(std)string Token::toString() const { return "[\""+getText()+"\",<"+type+">]"; } ANTLR_API RefToken nullToken; #ifndef NO_STATIC_CONSTS const int Token::MIN_USER_TYPE; const int Token::NULL_TREE_LOOKAHEAD; const int Token::INVALID_TYPE; const int Token::EOF_TYPE; const int Token::SKIP; #endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/TokenStreamRewriteEngine.cpp0000644000175000017500000001310710522211615022351 0ustar twernertwerner#include #include #include #include #include #include #include #include #include #include #include #include #include #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif #ifndef NO_STATIC_CONSTS const size_t TokenStreamRewriteEngine::MIN_TOKEN_INDEX = 0; const int TokenStreamRewriteEngine::PROGRAM_INIT_SIZE = 100; #endif const char* TokenStreamRewriteEngine::DEFAULT_PROGRAM_NAME = "default"; namespace { struct compareOperationIndex { typedef TokenStreamRewriteEngine::RewriteOperation RewriteOperation; bool operator() ( const RewriteOperation* a, const RewriteOperation* b ) const { return a->getIndex() < b->getIndex(); } }; struct dumpTokenWithIndex { dumpTokenWithIndex( ANTLR_USE_NAMESPACE(std)ostream& o ) : out(o) {} void operator() ( const RefTokenWithIndex& t ) { out << "[txt='" << t->getText() << "' tp=" << t->getType() << " idx=" << t->getIndex() << "]\n"; } ANTLR_USE_NAMESPACE(std)ostream& out; }; } TokenStreamRewriteEngine::TokenStreamRewriteEngine(TokenStream& upstream) : stream(upstream) , index(MIN_TOKEN_INDEX) , tokens() , programs() , discardMask() { } TokenStreamRewriteEngine::TokenStreamRewriteEngine(TokenStream& upstream, size_t initialSize ) : stream(upstream) , index(MIN_TOKEN_INDEX) , tokens(initialSize) , programs() , discardMask() { } RefToken TokenStreamRewriteEngine::nextToken( void ) { RefTokenWithIndex t; // suck tokens until end of stream or we find a non-discarded token do { t = RefTokenWithIndex(stream.nextToken()); if ( t ) { t->setIndex(index); // what is t's index in list? if ( t->getType() != Token::EOF_TYPE ) { tokens.push_back(t); // track all tokens except EOF } index++; // move to next position } } while ( t && discardMask.member(t->getType()) ); return RefToken(t); } void TokenStreamRewriteEngine::rollback( const std::string& programName, size_t instructionIndex ) { program_map::iterator rewrite = programs.find(programName); if( rewrite != programs.end() ) { operation_list& prog = rewrite->second; operation_list::iterator j = prog.begin(), end = prog.end(); std::advance(j,instructionIndex); if( j != end ) prog.erase(j, end); } } void TokenStreamRewriteEngine::originalToStream( std::ostream& out, size_t start, size_t end ) const { token_list::const_iterator s = tokens.begin(); std::advance( s, start ); token_list::const_iterator e = s; std::advance( e, end-start ); std::for_each( s, e, tokenToStream(out) ); } void TokenStreamRewriteEngine::toStream( std::ostream& out, const std::string& programName, size_t firstToken, size_t lastToken ) const { if( tokens.size() == 0 ) return; program_map::const_iterator rewriter = programs.find(programName); if ( rewriter == programs.end() ) return; // get the prog and some iterators in it... const operation_list& prog = rewriter->second; operation_list::const_iterator rewriteOpIndex = prog.begin(), rewriteOpEnd = prog.end(); size_t tokenCursor = firstToken; // make sure we don't run out of the tokens we have... if( lastToken > (tokens.size() - 1) ) lastToken = tokens.size() - 1; while ( tokenCursor <= lastToken ) { // std::cout << "tokenCursor = " << tokenCursor << " first prog index = " << (*rewriteOpIndex)->getIndex() << std::endl; if( rewriteOpIndex != rewriteOpEnd ) { size_t up_to_here = std::min(lastToken,(*rewriteOpIndex)->getIndex()); while( tokenCursor < up_to_here ) out << tokens[tokenCursor++]->getText(); } while ( rewriteOpIndex != rewriteOpEnd && tokenCursor == (*rewriteOpIndex)->getIndex() && tokenCursor <= lastToken ) { tokenCursor = (*rewriteOpIndex)->execute(out); ++rewriteOpIndex; } if( tokenCursor <= lastToken ) out << tokens[tokenCursor++]->getText(); } // std::cout << "Handling tail operations # left = " << std::distance(rewriteOpIndex,rewriteOpEnd) << std::endl; // now see if there are operations (append) beyond last token index std::for_each( rewriteOpIndex, rewriteOpEnd, executeOperation(out) ); rewriteOpIndex = rewriteOpEnd; } void TokenStreamRewriteEngine::toDebugStream( std::ostream& out, size_t start, size_t end ) const { token_list::const_iterator s = tokens.begin(); std::advance( s, start ); token_list::const_iterator e = s; std::advance( e, end-start ); std::for_each( s, e, dumpTokenWithIndex(out) ); } void TokenStreamRewriteEngine::addToSortedRewriteList( const std::string& programName, RewriteOperation* op ) { program_map::iterator rewrites = programs.find(programName); // check if we got the program already.. if ( rewrites == programs.end() ) { // no prog make a new one... operation_list ops; ops.push_back(op); programs.insert(std::make_pair(programName,ops)); return; } operation_list& prog = rewrites->second; if( prog.empty() ) { prog.push_back(op); return; } operation_list::iterator i, end = prog.end(); i = end; --i; // if at or beyond last op's index, just append if ( op->getIndex() >= (*i)->getIndex() ) { prog.push_back(op); // append to list of operations return; } i = prog.begin(); if( i != end ) { operation_list::iterator pos = std::upper_bound( i, end, op, compareOperationIndex() ); prog.insert(pos,op); } else prog.push_back(op); } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/CommonASTWithHiddenTokens.cpp0000644000175000017500000000346610522211615022370 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/CommonASTWithHiddenTokens.cpp#2 $ */ #include "antlr/config.hpp" #include "antlr/AST.hpp" #include "antlr/BaseAST.hpp" #include "antlr/CommonAST.hpp" #include "antlr/CommonASTWithHiddenTokens.hpp" #include "antlr/CommonHiddenStreamToken.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif const char* const CommonASTWithHiddenTokens::TYPE_NAME = "CommonASTWithHiddenTokens"; // RK: Do not put constructor and destructor into the header file here.. // this triggers something very obscure in gcc 2.95.3 (and 3.0) // missing vtables and stuff. // Although this may be a problem with with binutils. CommonASTWithHiddenTokens::CommonASTWithHiddenTokens() : CommonAST() { } CommonASTWithHiddenTokens::~CommonASTWithHiddenTokens() { } void CommonASTWithHiddenTokens::initialize(int t,const ANTLR_USE_NAMESPACE(std)string& txt) { CommonAST::initialize(t,txt); } void CommonASTWithHiddenTokens::initialize(RefAST t) { CommonAST::initialize(t); hiddenBefore = RefCommonASTWithHiddenTokens(t)->getHiddenBefore(); hiddenAfter = RefCommonASTWithHiddenTokens(t)->getHiddenAfter(); } void CommonASTWithHiddenTokens::initialize(RefToken t) { CommonAST::initialize(t); hiddenBefore = static_cast(t.get())->getHiddenBefore(); hiddenAfter = static_cast(t.get())->getHiddenAfter(); } RefAST CommonASTWithHiddenTokens::factory() { return RefAST(new CommonASTWithHiddenTokens); } RefAST CommonASTWithHiddenTokens::clone( void ) const { CommonASTWithHiddenTokens *ast = new CommonASTWithHiddenTokens( *this ); return RefAST(ast); } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/CommonToken.cpp0000644000175000017500000000163510522211615017661 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/CommonToken.cpp#2 $ */ #include "antlr/CommonToken.hpp" #include "antlr/String.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif CommonToken::CommonToken() : Token(), line(1), col(1), text("") {} CommonToken::CommonToken(int t, const ANTLR_USE_NAMESPACE(std)string& txt) : Token(t) , line(1) , col(1) , text(txt) {} CommonToken::CommonToken(const ANTLR_USE_NAMESPACE(std)string& s) : Token() , line(1) , col(1) , text(s) {} ANTLR_USE_NAMESPACE(std)string CommonToken::toString() const { return "[\""+getText()+"\",<"+getType()+">,line="+getLine()+",column="+getColumn()+"]"; } RefToken CommonToken::factory() { return RefToken(new CommonToken); } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/MismatchedTokenException.cpp0000644000175000017500000001246510522211615022371 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/MismatchedTokenException.cpp#2 $ */ #include "antlr/MismatchedTokenException.hpp" #include "antlr/String.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif MismatchedTokenException::MismatchedTokenException() : RecognitionException("Mismatched Token: expecting any AST node","",-1,-1) , token(0) , node(nullASTptr) , tokenNames(0) , numTokens(0) { } // Expected range / not range MismatchedTokenException::MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefAST node_, int lower, int upper_, bool matchNot ) : RecognitionException("Mismatched Token","",-1,-1) , token(0) , node(node_) , tokenText( (node_ ? node_->toString(): ANTLR_USE_NAMESPACE(std)string("")) ) , mismatchType(matchNot ? NOT_RANGE : RANGE) , expecting(lower) , upper(upper_) , tokenNames(tokenNames_) , numTokens(numTokens_) { } // Expected token / not token MismatchedTokenException::MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefAST node_, int expecting_, bool matchNot ) : RecognitionException("Mismatched Token","",-1,-1) , token(0) , node(node_) , tokenText( (node_ ? node_->toString(): ANTLR_USE_NAMESPACE(std)string("")) ) , mismatchType(matchNot ? NOT_TOKEN : TOKEN) , expecting(expecting_) , tokenNames(tokenNames_) , numTokens(numTokens_) { } // Expected BitSet / not BitSet MismatchedTokenException::MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefAST node_, BitSet set_, bool matchNot ) : RecognitionException("Mismatched Token","",-1,-1) , token(0) , node(node_) , tokenText( (node_ ? node_->toString(): ANTLR_USE_NAMESPACE(std)string("")) ) , mismatchType(matchNot ? NOT_SET : SET) , set(set_) , tokenNames(tokenNames_) , numTokens(numTokens_) { } // Expected range / not range MismatchedTokenException::MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefToken token_, int lower, int upper_, bool matchNot, const ANTLR_USE_NAMESPACE(std)string& fileName_ ) : RecognitionException("Mismatched Token",fileName_,token_->getLine(),token_->getColumn()) , token(token_) , node(nullASTptr) , tokenText(token_->getText()) , mismatchType(matchNot ? NOT_RANGE : RANGE) , expecting(lower) , upper(upper_) , tokenNames(tokenNames_) , numTokens(numTokens_) { } // Expected token / not token MismatchedTokenException::MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefToken token_, int expecting_, bool matchNot, const ANTLR_USE_NAMESPACE(std)string& fileName_ ) : RecognitionException("Mismatched Token",fileName_,token_->getLine(),token_->getColumn()) , token(token_) , node(nullASTptr) , tokenText(token_->getText()) , mismatchType(matchNot ? NOT_TOKEN : TOKEN) , expecting(expecting_) , tokenNames(tokenNames_) , numTokens(numTokens_) { } // Expected BitSet / not BitSet MismatchedTokenException::MismatchedTokenException( const char* const* tokenNames_, const int numTokens_, RefToken token_, BitSet set_, bool matchNot, const ANTLR_USE_NAMESPACE(std)string& fileName_ ) : RecognitionException("Mismatched Token",fileName_,token_->getLine(),token_->getColumn()) , token(token_) , node(nullASTptr) , tokenText(token_->getText()) , mismatchType(matchNot ? NOT_SET : SET) , set(set_) , tokenNames(tokenNames_) , numTokens(numTokens_) { } ANTLR_USE_NAMESPACE(std)string MismatchedTokenException::getMessage() const { ANTLR_USE_NAMESPACE(std)string s; switch (mismatchType) { case TOKEN: s += "expecting " + tokenName(expecting) + ", found '" + tokenText + "'"; break; case NOT_TOKEN: s += "expecting anything but " + tokenName(expecting) + "; got it anyway"; break; case RANGE: s += "expecting token in range: " + tokenName(expecting) + ".." + tokenName(upper) + ", found '" + tokenText + "'"; break; case NOT_RANGE: s += "expecting token NOT in range: " + tokenName(expecting) + ".." + tokenName(upper) + ", found '" + tokenText + "'"; break; case SET: case NOT_SET: { s += ANTLR_USE_NAMESPACE(std)string("expecting ") + (mismatchType == NOT_SET ? "NOT " : "") + "one of ("; ANTLR_USE_NAMESPACE(std)vector elems = set.toArray(); for ( unsigned int i = 0; i < elems.size(); i++ ) { s += " "; s += tokenName(elems[i]); } s += "), found '" + tokenText + "'"; } break; default: s = RecognitionException::getMessage(); break; } return s; } ANTLR_USE_NAMESPACE(std)string MismatchedTokenException::tokenName(int tokenType) const { if (tokenType == Token::INVALID_TYPE) return ""; else if (tokenType < 0 || tokenType >= numTokens) return ANTLR_USE_NAMESPACE(std)string("<") + tokenType + ">"; else return tokenNames[tokenType]; } #ifndef NO_STATIC_CONSTS const int MismatchedTokenException::TOKEN; const int MismatchedTokenException::NOT_TOKEN; const int MismatchedTokenException::RANGE; const int MismatchedTokenException::NOT_RANGE; const int MismatchedTokenException::SET; const int MismatchedTokenException::NOT_SET; #endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/Parser.cpp0000644000175000017500000000652210522211615016664 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/Parser.cpp#2 $ */ #include "antlr/Parser.hpp" #include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** A generic ANTLR parser (LL(k) for k>=1) containing a bunch of * utility routines useful at any lookahead depth. We distinguish between * the LL(1) and LL(k) parsers because of efficiency. This may not be * necessary in the near future. * * Each parser object contains the state of the parse including a lookahead * cache (the form of which is determined by the subclass), whether or * not the parser is in guess mode, where tokens come from, etc... * *

* During guess mode, the current lookahead token(s) and token type(s) * cache must be saved because the token stream may not have been informed * to save the token (via mark) before the try block. * Guessing is started by: *

    *
  1. saving the lookahead cache. *
  2. marking the current position in the TokenBuffer. *
  3. increasing the guessing level. *
* * After guessing, the parser state is restored by: *
    *
  1. restoring the lookahead cache. *
  2. rewinding the TokenBuffer. *
  3. decreasing the guessing level. *
* * @see antlr.Token * @see antlr.TokenBuffer * @see antlr.TokenStream * @see antlr.LL1Parser * @see antlr.LLkParser */ bool DEBUG_PARSER = false; /** Parser error-reporting function can be overridden in subclass */ void Parser::reportError(const RecognitionException& ex) { ANTLR_USE_NAMESPACE(std)cerr << ex.toString().c_str() << ANTLR_USE_NAMESPACE(std)endl; } /** Parser error-reporting function can be overridden in subclass */ void Parser::reportError(const ANTLR_USE_NAMESPACE(std)string& s) { if ( getFilename()=="" ) ANTLR_USE_NAMESPACE(std)cerr << "error: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; else ANTLR_USE_NAMESPACE(std)cerr << getFilename().c_str() << ": error: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; } /** Parser warning-reporting function can be overridden in subclass */ void Parser::reportWarning(const ANTLR_USE_NAMESPACE(std)string& s) { if ( getFilename()=="" ) ANTLR_USE_NAMESPACE(std)cerr << "warning: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; else ANTLR_USE_NAMESPACE(std)cerr << getFilename().c_str() << ": warning: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; } /** Set or change the input token buffer */ // void setTokenBuffer(TokenBuffer* t); void Parser::traceIndent() { for( int i = 0; i < traceDepth; i++ ) ANTLR_USE_NAMESPACE(std)cout << " "; } void Parser::traceIn(const char* rname) { traceDepth++; for( int i = 0; i < traceDepth; i++ ) ANTLR_USE_NAMESPACE(std)cout << " "; ANTLR_USE_NAMESPACE(std)cout << "> " << rname << "; LA(1)==" << LT(1)->getText().c_str() << ((inputState->guessing>0)?" [guessing]":"") << ANTLR_USE_NAMESPACE(std)endl; } void Parser::traceOut(const char* rname) { for( int i = 0; i < traceDepth; i++ ) ANTLR_USE_NAMESPACE(std)cout << " "; ANTLR_USE_NAMESPACE(std)cout << "< " << rname << "; LA(1)==" << LT(1)->getText().c_str() << ((inputState->guessing>0)?" [guessing]":"") << ANTLR_USE_NAMESPACE(std)endl; traceDepth--; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/dll.cpp0000644000175000017500000002370510522211615016205 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ /* * DLL stub for MSVC++. Based upon versions of Stephen Naughton and Michael * T. Richter */ // RK: Uncommented by instruction of Alexander Lenski //#if _MSC_VER > 1000 //# pragma once //#endif // _MSC_VER > 1000 // Exclude rarely-used stuff from Windows headers #define WIN32_LEAN_AND_MEAN #include #if defined( _MSC_VER ) && ( _MSC_VER < 1300 ) # error "DLL Build not supported on old MSVC's" // Ok it seems to be possible with STLPort in stead of the vanilla MSVC STL // implementation. This needs some work though. (and don't try it if you're // not that familiar with compilers/building C++ DLL's in windows) #endif #include #include "antlr/config.hpp" #include "antlr/Token.hpp" #include "antlr/CircularQueue.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif // Take care of necessary implicit instantiations of templates from STL // This should take care of MSVC 7.0 #if defined( _MSC_VER ) && ( _MSC_VER == 1300 ) // these come from AST.hpp template class ANTLR_API ASTRefCount< AST >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< RefAST >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)vector< RefAST >; //template ANTLR_API int operator<( ASTRefCount< AST >, ASTRefCount< AST > ); // ASTFactory.hpp template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< factory_descriptor_* >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const char*, factory_type_ > >; template struct ANTLR_API ANTLR_USE_NAMESPACE(std)pair< const char*, factory_type_ >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Vector_val< factory_descriptor_*, ANTLR_USE_NAMESPACE(std)allocator< factory_descriptor_* > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)vector< factory_descriptor_* >; // BitSet.hpp template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< bool >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Vector_val< bool, ANTLR_USE_NAMESPACE(std)allocator< bool > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)vector< bool >; // CharScanner.hpp template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< ANTLR_USE_NAMESPACE(std)string, int > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, int > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)_Tree_nod< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, int, CharScannerLiteralsLess, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, int > >, false > >::_Node >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)_Tree_ptr< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, int, CharScannerLiteralsLess, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, int > >, false > >::_Nodeptr >; template struct ANTLR_API ANTLR_USE_NAMESPACE(std)pair< ANTLR_USE_NAMESPACE(std)string, int >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, int, CharScannerLiteralsLess, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, int > >,false >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tree_nod< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, int, CharScannerLiteralsLess, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, int > >,false > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tree_ptr< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, int, CharScannerLiteralsLess, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, int > >,false > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tree_val< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, int, CharScannerLiteralsLess, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, int > >,false > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tree< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, int, CharScannerLiteralsLess, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, int > >,false > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)map< ANTLR_USE_NAMESPACE(std)string, int, CharScannerLiteralsLess >; // CircularQueue.hpp // RK: it might well be that a load of these ints need to be unsigned ints // (made some more stuff unsigned) template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< int >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Vector_val< int, ANTLR_USE_NAMESPACE(std)allocator< int > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)vector< int >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)vector< int, ANTLR_USE_NAMESPACE(std)allocator< int > >; // template ANTLR_API inline int CircularQueue< int >::entries() const; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< RefToken >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Vector_val< RefToken, ANTLR_USE_NAMESPACE(std)allocator< RefToken > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)vector< RefToken >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)vector< RefToken, ANTLR_USE_NAMESPACE(std)allocator< RefToken > >; // template ANTLR_API inline int CircularQueue< RefToken >::entries() const; // CommonAST.hpp template class ANTLR_API ASTRefCount< CommonAST >; // CommonASTWithHiddenTokenTypes.hpp template class ANTLR_API ASTRefCount< CommonASTWithHiddenTokens >; // LexerSharedInputState.hpp template class ANTLR_API RefCount< LexerInputState >; // ParserSharedInputState.hpp template class ANTLR_API RefCount< ParserInputState >; // TokenStreamSelector.hpp template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< ANTLR_USE_NAMESPACE(std)string, TokenStream* > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, TokenStream* > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)_Tree_nod< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, TokenStream*, ANTLR_USE_NAMESPACE(std)less< ANTLR_USE_NAMESPACE(std)string >, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, TokenStream* > >, false > >::_Node >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)_Tree_ptr< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, TokenStream*, ANTLR_USE_NAMESPACE(std)less< ANTLR_USE_NAMESPACE(std)string >, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, TokenStream* > >, false > >::_Nodeptr >; template struct ANTLR_API ANTLR_USE_NAMESPACE(std)pair< ANTLR_USE_NAMESPACE(std)string, TokenStream* >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, TokenStream*, ANTLR_USE_NAMESPACE(std)less< ANTLR_USE_NAMESPACE(std)string >, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, TokenStream* > >,false >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tree_nod< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, TokenStream*, ANTLR_USE_NAMESPACE(std)less< ANTLR_USE_NAMESPACE(std)string >, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, TokenStream* > >,false > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tree_ptr< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, TokenStream*, ANTLR_USE_NAMESPACE(std)less< ANTLR_USE_NAMESPACE(std)string >, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, TokenStream* > >,false > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tree_val< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, TokenStream*, ANTLR_USE_NAMESPACE(std)less< ANTLR_USE_NAMESPACE(std)string >, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, TokenStream* > >,false > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Tree< ANTLR_USE_NAMESPACE(std)_Tmap_traits< ANTLR_USE_NAMESPACE(std)string, TokenStream*, ANTLR_USE_NAMESPACE(std)less< ANTLR_USE_NAMESPACE(std)string >, ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)pair< const ANTLR_USE_NAMESPACE(std)string, TokenStream* > >,false > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)map< ANTLR_USE_NAMESPACE(std)string, TokenStream* >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< TokenStream* >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)allocator< ANTLR_USE_NAMESPACE(std)_Deque_map< TokenStream* , ANTLR_USE_NAMESPACE(std)allocator< TokenStream* > >::_Tptr >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Deque_map< TokenStream*, ANTLR_USE_NAMESPACE(std)allocator< TokenStream* > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)_Deque_val< TokenStream*, ANTLR_USE_NAMESPACE(std)allocator< TokenStream* > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)deque< TokenStream*, ANTLR_USE_NAMESPACE(std)allocator< TokenStream* > >; template class ANTLR_API ANTLR_USE_NAMESPACE(std)stack< TokenStream*, ANTLR_USE_NAMESPACE(std)deque >; #elif defined( _MSC_VER ) && ( _MSC_VER == 1310 ) // Instantiations for MSVC 7.1 template class ANTLR_API CircularQueue< int >; template class ANTLR_API CircularQueue< RefToken >; // #else future msvc's #endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif BOOL APIENTRY DllMain( HANDLE hModule, DWORD ul_reason_for_call, LPVOID lpReserved ) { return TRUE; } antlr-2.7.7/lib/cpp/src/ASTNULLType.cpp0000644000175000017500000000421610522211615017412 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id:$ */ #include "antlr/config.hpp" #include "antlr/AST.hpp" #include "antlr/ASTNULLType.hpp" #include ANTLR_USING_NAMESPACE(std) #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif RefAST ASTNULLType::clone( void ) const { return RefAST(this); } void ASTNULLType::addChild( RefAST ) { } size_t ASTNULLType::getNumberOfChildren() const { return 0; } bool ASTNULLType::equals( RefAST ) const { return false; } bool ASTNULLType::equalsList( RefAST ) const { return false; } bool ASTNULLType::equalsListPartial( RefAST ) const { return false; } bool ASTNULLType::equalsTree( RefAST ) const { return false; } bool ASTNULLType::equalsTreePartial( RefAST ) const { return false; } vector ASTNULLType::findAll( RefAST ) { return vector(); } vector ASTNULLType::findAllPartial( RefAST ) { return vector(); } RefAST ASTNULLType::getFirstChild() const { return this; } RefAST ASTNULLType::getNextSibling() const { return this; } string ASTNULLType::getText() const { return ""; } int ASTNULLType::getType() const { return Token::NULL_TREE_LOOKAHEAD; } void ASTNULLType::initialize( int, const string& ) { } void ASTNULLType::initialize( RefAST ) { } void ASTNULLType::initialize( RefToken ) { } #ifdef ANTLR_SUPPORT_XML void ASTNULLType::initialize( istream& ) { } #endif void ASTNULLType::setFirstChild( RefAST ) { } void ASTNULLType::setNextSibling( RefAST ) { } void ASTNULLType::setText( const string& ) { } void ASTNULLType::setType( int ) { } string ASTNULLType::toString() const { return getText(); } string ASTNULLType::toStringList() const { return getText(); } string ASTNULLType::toStringTree() const { return getText(); } #ifdef ANTLR_SUPPORT_XML bool ASTNULLType::attributesToStream( ostream& ) const { return false; } void ASTNULLType::toStream( ostream& out ) const { out << "" << endl; } #endif const char* ASTNULLType::typeName( void ) const { return "ASTNULLType"; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/CharBuffer.cpp0000644000175000017500000000261210522211615017433 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/CharBuffer.cpp#2 $ */ #include "antlr/CharBuffer.hpp" #include //#include #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /* RK: Per default istream does not throw exceptions. This can be * enabled with: * stream.exceptions(ios_base::badbit|ios_base::failbit|ios_base::eofbit); * * We could try catching the bad/fail stuff. But handling eof via this is * not a good idea. EOF is best handled as a 'normal' character. * * So this does not work yet with gcc... Comment it until I get to a platform * that does.. */ /** Create a character buffer. Enable fail and bad exceptions, if supported * by platform. */ CharBuffer::CharBuffer(ANTLR_USE_NAMESPACE(std)istream& input_) : input(input_) { // input.exceptions(ANTLR_USE_NAMESPACE(std)ios_base::badbit| // ANTLR_USE_NAMESPACE(std)ios_base::failbit); } /** Get the next character from the stream. May throw CharStreamIOException * when something bad happens (not EOF) (if supported by platform). */ int CharBuffer::getChar() { // try { return input.get(); // } // catch (ANTLR_USE_NAMESPACE(std)ios_base::failure& e) { // throw CharStreamIOException(e); // } } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/ASTFactory.cpp0000644000175000017500000003010510522211615017401 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/ASTFactory.cpp#2 $ */ #include "antlr/CommonAST.hpp" #include "antlr/ANTLRException.hpp" #include "antlr/IOException.hpp" #include "antlr/ASTFactory.hpp" #include "antlr/ANTLRUtil.hpp" #include #include using namespace std; #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** AST Support code shared by TreeParser and Parser. * We use delegation to share code (and have only one * bit of code to maintain) rather than subclassing * or superclassing (forces AST support code to be * loaded even when you don't want to do AST stuff). * * This class collects all factories of AST types used inside the code. * New AST node types are registered with the registerFactory method. * On creation of an ASTFactory object a default AST node factory may be * specified. * * When registering types gaps between different types are filled with entries * for the default factory. */ /// Initialize factory ASTFactory::ASTFactory() : default_factory_descriptor(ANTLR_USE_NAMESPACE(std)make_pair(CommonAST::TYPE_NAME,&CommonAST::factory)) { nodeFactories.resize( Token::MIN_USER_TYPE, &default_factory_descriptor ); } /** Initialize factory with a non default node type. * factory_node_name should be the name of the AST node type the factory * generates. (should exist during the existance of this ASTFactory instance) */ ASTFactory::ASTFactory( const char* factory_node_name, factory_type fact ) : default_factory_descriptor(ANTLR_USE_NAMESPACE(std)make_pair(factory_node_name, fact)) { nodeFactories.resize( Token::MIN_USER_TYPE, &default_factory_descriptor ); } /// Delete ASTFactory ASTFactory::~ASTFactory() { factory_descriptor_list::iterator i = nodeFactories.begin(); while( i != nodeFactories.end() ) { if( *i != &default_factory_descriptor ) delete *i; i++; } } /// Register a factory for a given AST type void ASTFactory::registerFactory( int type, const char* ast_name, factory_type factory ) { // check validity of arguments... if( type < Token::MIN_USER_TYPE ) throw ANTLRException("Internal parser error invalid type passed to RegisterFactory"); if( factory == 0 ) throw ANTLRException("Internal parser error 0 factory passed to RegisterFactory"); // resize up to and including 'type' and initalize any gaps to default // factory. if( nodeFactories.size() < (static_cast(type)+1) ) nodeFactories.resize( type+1, &default_factory_descriptor ); // And add new thing.. nodeFactories[type] = new ANTLR_USE_NAMESPACE(std)pair( ast_name, factory ); } void ASTFactory::setMaxNodeType( int type ) { if( nodeFactories.size() < (static_cast(type)+1) ) nodeFactories.resize( type+1, &default_factory_descriptor ); } /** Create a new empty AST node; if the user did not specify * an AST node type, then create a default one: CommonAST. */ RefAST ASTFactory::create() { RefAST node = nodeFactories[0]->second(); node->setType(Token::INVALID_TYPE); return node; } RefAST ASTFactory::create(int type) { RefAST t = nodeFactories[type]->second(); t->initialize(type,""); return t; } RefAST ASTFactory::create(int type, const ANTLR_USE_NAMESPACE(std)string& txt) { RefAST t = nodeFactories[type]->second(); t->initialize(type,txt); return t; } #ifdef ANTLR_SUPPORT_XML RefAST ASTFactory::create(const ANTLR_USE_NAMESPACE(std)string& type_name, ANTLR_USE_NAMESPACE(std)istream& infile ) { factory_descriptor_list::iterator fact = nodeFactories.begin(); while( fact != nodeFactories.end() ) { if( type_name == (*fact)->first ) { RefAST t = (*fact)->second(); t->initialize(infile); return t; } fact++; } string error = "ASTFactory::create: Unknown AST type '" + type_name + "'"; throw ANTLRException(error); } #endif /** Create a new empty AST node; if the user did not specify * an AST node type, then create a default one: CommonAST. */ RefAST ASTFactory::create(RefAST tr) { if (!tr) return nullAST; // cout << "create(tr)" << endl; RefAST t = nodeFactories[tr->getType()]->second(); t->initialize(tr); return t; } RefAST ASTFactory::create(RefToken tok) { // cout << "create( tok="<< tok->getType() << ", " << tok->getText() << ")" << nodeFactories.size() << endl; RefAST t = nodeFactories[tok->getType()]->second(); t->initialize(tok); return t; } /** Add a child to the current AST */ void ASTFactory::addASTChild(ASTPair& currentAST, RefAST child) { if (child) { if (!currentAST.root) { // Make new child the current root currentAST.root = child; } else { if (!currentAST.child) { // Add new child to current root currentAST.root->setFirstChild(child); } else { currentAST.child->setNextSibling(child); } } // Make new child the current child currentAST.child = child; currentAST.advanceChildToEnd(); } } /** Deep copy a single node. This function the new clone() methods in the AST * interface. Returns nullAST if t is null. */ RefAST ASTFactory::dup(RefAST t) { if( t ) return t->clone(); else return RefAST(nullASTptr); } /** Duplicate tree including siblings of root. */ RefAST ASTFactory::dupList(RefAST t) { RefAST result = dupTree(t); // if t == null, then result==null RefAST nt = result; while( t ) { // for each sibling of the root t = t->getNextSibling(); nt->setNextSibling(dupTree(t)); // dup each subtree, building new tree nt = nt->getNextSibling(); } return result; } /** Duplicate a tree, assuming this is a root node of a tree * duplicate that node and what's below; ignore siblings of root node. */ RefAST ASTFactory::dupTree(RefAST t) { RefAST result = dup(t); // make copy of root // copy all children of root. if( t ) result->setFirstChild( dupList(t->getFirstChild()) ); return result; } /** Make a tree from a list of nodes. The first element in the * array is the root. If the root is null, then the tree is * a simple list not a tree. Handles null children nodes correctly. * For example, make(a, b, null, c) yields tree (a b c). make(null,a,b) * yields tree (nil a b). */ RefAST ASTFactory::make(ANTLR_USE_NAMESPACE(std)vector& nodes) { if ( nodes.size() == 0 ) return RefAST(nullASTptr); RefAST root = nodes[0]; RefAST tail = RefAST(nullASTptr); if( root ) root->setFirstChild(RefAST(nullASTptr)); // don't leave any old pointers set // link in children; for( unsigned int i = 1; i < nodes.size(); i++ ) { if ( nodes[i] == 0 ) // ignore null nodes continue; if ( root == 0 ) // Set the root and set it up for a flat list root = tail = nodes[i]; else if ( tail == 0 ) { root->setFirstChild(nodes[i]); tail = root->getFirstChild(); } else { tail->setNextSibling(nodes[i]); tail = tail->getNextSibling(); } if( tail ) // RK: I cannot fathom why this missing check didn't bite anyone else... { // Chase tail to last sibling while (tail->getNextSibling()) tail = tail->getNextSibling(); } } return root; } /** Make a tree from a list of nodes, where the nodes are contained * in an ASTArray object */ RefAST ASTFactory::make(ASTArray* nodes) { RefAST ret = make(nodes->array); delete nodes; return ret; } /// Make an AST the root of current AST void ASTFactory::makeASTRoot( ASTPair& currentAST, RefAST root ) { if (root) { // Add the current root as a child of new root root->addChild(currentAST.root); // The new current child is the last sibling of the old root currentAST.child = currentAST.root; currentAST.advanceChildToEnd(); // Set the new root currentAST.root = root; } } void ASTFactory::setASTNodeFactory( const char* factory_node_name, factory_type factory ) { default_factory_descriptor.first = factory_node_name; default_factory_descriptor.second = factory; } #ifdef ANTLR_SUPPORT_XML bool ASTFactory::checkCloseTag( ANTLR_USE_NAMESPACE(std)istream& in ) { char ch; if( in.get(ch) ) { if( ch == '<' ) { char ch2; if( in.get(ch2) ) { if( ch2 == '/' ) { in.putback(ch2); in.putback(ch); return true; } in.putback(ch2); in.putback(ch); return false; } } in.putback(ch); return false; } return false; } void ASTFactory::loadChildren( ANTLR_USE_NAMESPACE(std)istream& infile, RefAST current ) { char ch; for(;;) // for all children of this node.... { eatwhite(infile); infile.get(ch); // '<' if( ch != '<' ) { string error = "Invalid XML file... no '<' found ("; error += ch + ")"; throw IOException(error); } infile.get(ch); // / or text.... if( ch == '/' ) // check for close tag... { string temp; // read until '>' and see if it matches the open tag... if not trouble temp = read_identifier( infile ); if( strcmp(temp.c_str(), current->typeName() ) != 0 ) { string error = "Invalid XML file... close tag does not match start tag: "; error += current->typeName(); error += " closed by " + temp; throw IOException(error); } infile.get(ch); // must be a '>' if( ch != '>' ) { string error = "Invalid XML file... no '>' found ("; error += ch + ")"; throw IOException(error); } // close tag => exit loop break; } // put our 'look ahead' back where it came from infile.putback(ch); infile.putback('<'); // and recurse into the tree... RefAST child = LoadAST(infile); current->addChild( child ); } } void ASTFactory::loadSiblings(ANTLR_USE_NAMESPACE(std)istream& infile, RefAST current ) { for(;;) { eatwhite(infile); if( infile.eof() ) break; if( checkCloseTag(infile) ) break; RefAST sibling = LoadAST(infile); current->setNextSibling(sibling); } } RefAST ASTFactory::LoadAST( ANTLR_USE_NAMESPACE(std)istream& infile ) { RefAST current = nullAST; char ch; eatwhite(infile); if( !infile.get(ch) ) return nullAST; if( ch != '<' ) { string error = "Invalid XML file... no '<' found ("; error += ch + ")"; throw IOException(error); } string ast_type = read_identifier(infile); // create the ast of type 'ast_type' current = create( ast_type, infile ); if( current == nullAST ) { string error = "Unsuported AST type: " + ast_type; throw IOException(error); } eatwhite(infile); infile.get(ch); // now if we have a '/' here it's a single node. If it's a '>' we get // a tree with children if( ch == '/' ) { infile.get(ch); // get the closing '>' if( ch != '>' ) { string error = "Invalid XML file... no '>' found after '/' ("; error += ch + ")"; throw IOException(error); } // get the rest on this level loadSiblings( infile, current ); return current; } // and finaly see if we got the close tag... if( ch != '>' ) { string error = "Invalid XML file... no '>' found ("; error += ch + ")"; throw IOException(error); } // handle the ones below this level.. loadChildren( infile, current ); // load the rest on this level... loadSiblings( infile, current ); return current; } #endif // ANTLR_SUPPORT_XML #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif /* Heterogeneous AST/XML-I/O ramblings... * * So there is some heterogeneous AST support.... * basically in the code generators a new custom ast is generated without * going throug the factory. It also expects the RefXAST to be defined. * * Is it maybe better to register all AST types with the ASTFactory class * together with the respective factory methods. * * More and more I get the impression that hetero ast was a kindoff hack * on top of ANTLR's normal AST system. * * The heteroast stuff will generate trouble for all astFactory.create( ... ) * invocations. Most of this is handled via getASTCreateString methods in the * codegenerator. At the moment getASTCreateString(GrammarAtom, String) has * slightly to little info to do it's job (ok the hack that is in now * works, but it's an ugly hack) * * An extra caveat is the 'nice' action.g thing. Which also judiciously calls * getASTCreateString methods because it handles the #( ... ) syntax. * And converts that to ASTFactory calls. * * */ antlr-2.7.7/lib/cpp/src/TokenStreamBasicFilter.cpp0000644000175000017500000000173410522211615021774 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/TokenStreamBasicFilter.cpp#2 $ */ #include "antlr/TokenStreamBasicFilter.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif /** This object is a TokenStream that passes through all * tokens except for those that you tell it to discard. * There is no buffering of the tokens. */ TokenStreamBasicFilter::TokenStreamBasicFilter(TokenStream& input_) : input(&input_) { } void TokenStreamBasicFilter::discard(int ttype) { discardMask.add(ttype); } void TokenStreamBasicFilter::discard(const BitSet& mask) { discardMask = mask; } RefToken TokenStreamBasicFilter::nextToken() { RefToken tok = input->nextToken(); while ( tok && discardMask.member(tok->getType()) ) { tok = input->nextToken(); } return tok; } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/CharScanner.cpp0000644000175000017500000000576010522211615017622 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/CharScanner.cpp#2 $ */ #include #include "antlr/CharScanner.hpp" #include "antlr/CommonToken.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif ANTLR_C_USING(exit) CharScanner::CharScanner(InputBuffer& cb, bool case_sensitive ) : saveConsumedInput(true) //, caseSensitiveLiterals(true) , caseSensitive(case_sensitive) , literals(CharScannerLiteralsLess(this)) , inputState(new LexerInputState(cb)) , commitToPath(false) , tabsize(8) , traceDepth(0) { setTokenObjectFactory(&CommonToken::factory); } CharScanner::CharScanner(InputBuffer* cb, bool case_sensitive ) : saveConsumedInput(true) //, caseSensitiveLiterals(true) , caseSensitive(case_sensitive) , literals(CharScannerLiteralsLess(this)) , inputState(new LexerInputState(cb)) , commitToPath(false) , tabsize(8) , traceDepth(0) { setTokenObjectFactory(&CommonToken::factory); } CharScanner::CharScanner( const LexerSharedInputState& state, bool case_sensitive ) : saveConsumedInput(true) //, caseSensitiveLiterals(true) , caseSensitive(case_sensitive) , literals(CharScannerLiteralsLess(this)) , inputState(state) , commitToPath(false) , tabsize(8) , traceDepth(0) { setTokenObjectFactory(&CommonToken::factory); } /** Report exception errors caught in nextToken() */ void CharScanner::reportError(const RecognitionException& ex) { ANTLR_USE_NAMESPACE(std)cerr << ex.toString().c_str() << ANTLR_USE_NAMESPACE(std)endl; } /** Parser error-reporting function can be overridden in subclass */ void CharScanner::reportError(const ANTLR_USE_NAMESPACE(std)string& s) { if (getFilename() == "") ANTLR_USE_NAMESPACE(std)cerr << "error: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; else ANTLR_USE_NAMESPACE(std)cerr << getFilename().c_str() << ": error: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; } /** Parser warning-reporting function can be overridden in subclass */ void CharScanner::reportWarning(const ANTLR_USE_NAMESPACE(std)string& s) { if (getFilename() == "") ANTLR_USE_NAMESPACE(std)cerr << "warning: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; else ANTLR_USE_NAMESPACE(std)cerr << getFilename().c_str() << ": warning: " << s.c_str() << ANTLR_USE_NAMESPACE(std)endl; } void CharScanner::traceIndent() { for( int i = 0; i < traceDepth; i++ ) ANTLR_USE_NAMESPACE(std)cout << " "; } void CharScanner::traceIn(const char* rname) { traceDepth++; traceIndent(); ANTLR_USE_NAMESPACE(std)cout << "> lexer " << rname << "; c==" << LA(1) << ANTLR_USE_NAMESPACE(std)endl; } void CharScanner::traceOut(const char* rname) { traceIndent(); ANTLR_USE_NAMESPACE(std)cout << "< lexer " << rname << "; c==" << LA(1) << ANTLR_USE_NAMESPACE(std)endl; traceDepth--; } #ifndef NO_STATIC_CONSTS const int CharScanner::NO_CHAR; const int CharScanner::EOF_CHAR; #endif #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/src/NoViableAltForCharException.cpp0000644000175000017500000000210610522211615022706 0ustar twernertwerner/* ANTLR Translator Generator * Project led by Terence Parr at http://www.jGuru.com * Software rights: http://www.antlr.org/license.html * * $Id: //depot/code/org.antlr/release/antlr-2.7.7/lib/cpp/src/NoViableAltForCharException.cpp#2 $ */ #include "antlr/NoViableAltForCharException.hpp" #include "antlr/String.hpp" #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE namespace antlr { #endif NoViableAltForCharException::NoViableAltForCharException(int c, CharScanner* scanner) : RecognitionException("NoViableAlt", scanner->getFilename(), scanner->getLine(),scanner->getColumn()), foundChar(c) { } NoViableAltForCharException::NoViableAltForCharException( int c, const ANTLR_USE_NAMESPACE(std)string& fileName_, int line_, int column_) : RecognitionException("NoViableAlt",fileName_,line_,column_), foundChar(c) { } ANTLR_USE_NAMESPACE(std)string NoViableAltForCharException::getMessage() const { return ANTLR_USE_NAMESPACE(std)string("unexpected char: ")+charName(foundChar); } #ifdef ANTLR_CXX_SUPPORTS_NAMESPACE } #endif antlr-2.7.7/lib/cpp/scripts/0000755000175000017500000000000010522211615015617 5ustar twernertwernerantlr-2.7.7/lib/cpp/scripts/make_change_log.tcl0000755000175000017500000000227410522211615021416 0ustar twernertwerner#!/bin/sh # the next line restarts using tclsh \ exec tclsh8.3 $0 $* # # Sort the (C++) changes recorded in the repository by change number and # print them to stdout # set depots {//depot/code/org.antlr/dev/klaren.dev //depot/code/org.antlr/main/main } set files { /lib/cpp/... /antlr/... } set filespec "" foreach depot $depots { foreach file $files { append filespec "$depot$file " } } puts stderr "Gettting changes from: $filespec" catch {set file [open "|p4 changes -l $filespec" r]} set cnt 0 set changes {} set text "" set change_nr -1 while {![eof $file]} { set line [gets $file] if { [regexp -- {^Change ([0-9]+).*$} $line dummy tmp] } { # append the number to the list of found changes lappend changes $tmp if { $change_nr != -1 } { # were already working on change.. # so we have text to store.. set description($change_nr) $text } # remember number... set change_nr $tmp # reinit text set text "[string trim $line]\n" } else { append text " [string trim $line]\n" } } set description($change_nr) $text catch {close $file} set sorted_changes [lsort -unique -integer -decreasing $changes] foreach change $sorted_changes { puts $description($change) } antlr-2.7.7/lib/cpp/scripts/cr_stripper.sh0000644000175000017500000000022110522211615020502 0ustar twernertwerner#!/bin/sh for f in antlr/*.hpp src/*.cpp; do cat "$f" | tr -d "\r" > $$ cmp -s $$ "$f" || (p4 edit "$f" && mv $$ "$f" && echo "$f Fixed") done antlr-2.7.7/lib/cpp/ChangeLog0000644000175000017500000005735310522211615015717 0ustar twernertwernerChange 499 on 2001/05/31 by klaren@klaren.hawking.dev - Ported action.g fixes from C++ to Java action.g. Warnings and errors in actions are now correctly reported in java mode as well. - Ported $lookaheadSet functionality to java mode. - Moved processActionForTreeSpecifiers from CodeGenerator.java to JavaCodeGenerator.java and made it abstract. Added setFileName call to it. - Added a lot of getLine's to calls of processActionForTreeSpecifiers in JavaCodegen. - Moved a number one liner methods to the header file for better inlining. - Added clear methods to the Queue type objects (java/C++). - Added reset methods to the Input/TokenBuffer objects (java/C++). - Added reset methods to the SharedInputState objects (java/C++). - Added to the C++ LexerSharedInputState an initialize function that reinitializes the thing with a new stream. - Added docu. - Bugfix: Initialized attribute filename a little bit earlier so error message shows the filename in stead of 'null'. Change 495 on 2001/05/31 by klaren@klaren.hawking.dev Added -h/-help/--help options. Adapted the year range in the copyright in the help message. Change 494 on 2001/05/29 by klaren@klaren.hawking.dev Changed order of equals test a bit to be more efficient. Change 493 on 2001/05/29 by klaren@klaren.hawking.dev Moved some methods a bit around to get better inlining. Change 492 on 2001/05/22 by klaren@klaren.hawking.dev Dug up a mail from Bill Zheng about doxygen. Fixed a few things as a result. Change 491 on 2001/05/22 by klaren@klaren.hawking.dev More tweaks for the ditching of the tokenNames vector. GNU C++ had some trouble with the previous incarnation. Change 490 on 2001/05/21 by klaren@klaren.hawking.dev Changed all remaining "antlr/xxx.hpp" includes to ones in the header files. Change 489 on 2001/05/18 by klaren@klaren.hawking.dev Missed a few tracer class changes in this one. Also some missing virtuals fixed. Change 488 on 2001/05/18 by klaren@klaren.hawking.dev Removed /** from license comments, they brought doxygen of track. Replaced ANTLR_BEGIN/END_NAMESPACE defines with a ANTLR_CXX_SUPPORTS_NAMESPACE macro, again for doxygen. Here and there some reformatting and reordering. Change 487 on 2001/05/18 by klaren@klaren.hawking.dev Optimizations in Tracer classes (dumped string's). Removed setTokenNames from the support library. Switched tokenNames to use a char* array. Generate NUM_TOKENS attribute in parsers. Added getNumTokens methods to parsers. Changes in MismatchedTokenException to reflect the previous. Change 486 on 2001/05/18 by klaren@klaren.hawking.dev Allow whitespace between $setxxx and following '('. Change 485 on 2001/05/14 by klaren@klaren.hawking.dev And now really fix giving errors... stupid java *snicker* Change 484 on 2001/05/14 by klaren@klaren.hawking.dev Allow whitespace between $setxxx and following '('. Change 483 on 2001/05/14 by klaren@klaren.hawking.dev Give errors when target file/directory not writeable. Change 482 on 2001/05/11 by klaren@klaren.hawking.dev Added config file for doxygen. Change 480 on 2001/05/11 by klaren@klaren.hawking.dev Tagged a bug in the code. (heteroast trouble) Change 479 on 2001/05/11 by klaren@klaren.hawking.dev Added Ernest Pasour's $lookaheadSet feature to the C++ codegen. (This needs to be ported to Java mode). Also cleaned up action.g's error reporting this should also be ported to java mode. Change 476 on 2001/05/11 by klaren@klaren.hawking.dev More fixes for XML I/O. It's a bit tidier now. Some too advanced things removed (ios_base::failure). Embedding custom XML elements in the stream should be possible now. Change 475 on 2001/05/08 by parrt@parrt.foggy made default charbuffer bigger per token. Change 471 on 2001/04/26 by klaren@klaren.hawking.dev Bugfix: in case of a certain order of header actions (pre_include_xx etc.) one header action might overwrite another. Probably only affects C++. Change 468 on 2001/03/22 by klaren@klaren.hawking.dev A few extra ANTLR_API's to make antlr.dll work from other dll's. Thanks to Ernest Passour. Change 466 on 2001/03/15 by klaren@klaren.hawking.dev Fix from Emir Uner for KAI C++ cast string literal to 'const char*' for make_pair. Change 465 on 2001/03/08 by klaren@klaren.hawking.dev Fix for bugreport from Emir Uner, static member const char * initializations are not standard compliant. Change 464 on 2001/03/05 by klaren@klaren.hawking.dev Changes for XML input and output. Restructuring of ASTFactory and some fixes for heterogeneous AST's. Change 463 on 2001/02/05 by klaren@klaren.hawking.dev Improved exception handling in trace routines of parser. Patch submitted by John Fremlin. Tracer class now catch exceptions from lexer. Fixed forgotten message in BitSet.cpp. Change 462 on 2001/02/05 by klaren@klaren.hawking.dev Improved errormessage. Now says unexpected end of file and stuff like that. Change 461 on 2001/01/31 by klaren@klaren.hawking.dev Changed the position of the init actions for (..)* (..)+ to just inside the loop handling the closure. This way we can check EOF conditions in the init action for each loop invocation. Change 460 on 2001/01/31 by klaren@klaren.hawking.dev Fixed typo in error message. Change 458 on 2001/01/17 by klaren@klaren.hawking.dev Removed dos newlines. Change 457 on 2001/01/15 by klaren@klaren.hawking.dev Removed the superfluous '// line xxx' comments in genHashLines=false case. Change 454 on 2001/01/11 by klaren@klaren.hawking.dev Updated the changelog and fixed buglet in changelog script. Change 453 on 2001/01/11 by klaren@klaren.hawking.dev Tweaked the make_change_log script to include both my dev depot as well as the main branch. Change 452 on 2001/01/11 by klaren@klaren.hawking.dev Changes to let ANTLR only overwrite a file if it really changes. (Mainly for C++ mode). With supporting changes in mkxxxjar batch files. Change 451 on 2001/01/11 by klaren@klaren.hawking.dev Changed the charName in C++ mode and NoViableAltForCharException in java mode so that only printable characters are printed and non printable ones get 'hexdumped'. Change 450 on 2001/01/10 by klaren@klaren.hawking.dev Fixed some typo's in javadoc comments. Change 449 on 2001/01/08 by klaren@klaren.hawking.dev Removed StdAfx.h from the .dsp file. Change 448 on 2001/01/08 by klaren@klaren.hawking.dev Added implementations for getLAChars and getMarkedChars. Change 447 on 2001/01/05 by klaren@klaren.hawking.dev Small fix to the genBitSets method of CppCodeGenerator now the generated sets are also dumped for lexers. Change 446 on 2000/12/18 by klaren@klaren.hawking.dev Changes for MSCV DLL building. Now also includes dsp/dsw files. Contributed by Stephen Naughton. Change 444 on 2000/12/12 by klaren@klaren.hawking.dev Added include. (Needed for new gcc version 2.97) Change 441 on 2000/12/05 by klaren@klaren.hawking.dev Fixed problems with buggy tolower (truncating -1 (EOF) to 0xff) functions together with STLport/HPUX also reverted fix 422 since this one also catches that. Change 440 on 2000/11/30 by klaren@klaren.hawking.dev Fixed typo in code generated for $setText. Change 439 on 2000/11/22 by klaren@klaren.hawking.dev Few minor tweaks. And removed my name from the generated author section a bit too much copy'n'paste. Change 438 on 2000/11/20 by klaren@klaren.hawking.dev Fixed bug with C/C++ preprocessor constructs (#if's etc). Also changed code generated for $setText to be surrounded by '{}'. Change 437 on 2000/11/20 by klaren@klaren.hawking.dev SGI Irix 6.5.10 MIPSPro compiler support contributed by Anna Winkler. Change 436 on 2000/11/20 by klaren@klaren.hawking.dev Virtualized a most methods of Parser and LLkParser (as requested by Alexander Lenski) Change 434 on 2000/11/09 by klaren@klaren.hawking.dev First stab at a docbook codegenerator. It now produces something that get's parsed by jade and gives pretty ok HTML output. Change 433 on 2000/11/08 by klaren@klaren.hawking.dev Cleaned up some superfluous methods. Change 432 on 2000/11/08 by klaren@klaren.hawking.dev Cleaned up generated HTML added quoting for special entities (probably still missed some). Removed printing of parameters, returns, syntactic and semantic actions since these obscure the output. It's now HTML 4.01 Transitional compliant it seems. Change 427 on 2000/10/23 by klaren@klaren.hawking.dev Virtualized destructors in xxSharedInputState. This to support overloading. By request of Alexander Lenski. Change 426 on 2000/10/19 by klaren@klaren.hawking.dev Incorporated bugfix suggested by Joe Comuzzi. Fixes ommission of semantic predicates in the big unicode case. Change 425 on 2000/10/19 by klaren@klaren.hawking.dev Michael Schmitt's changes for a better exception hierarchy. Change 424 on 2000/10/19 by klaren@klaren.hawking.dev Cleaned up generated code a bit. Removed excess constructors etc. Change 422 on 2000/10/03 by klaren@klaren.hawking.dev Fix for VC++ 6.0 bug with tolower and setlocale Change 421 on 2000/10/03 by klaren@klaren.hawking.dev Integrate 2.7.1 main line into development version. Change 412 on 2000/10/01 by parrt@parrt.foggy changes to prevent Tool from runtime jar Change 410 on 2000/10/01 by parrt@parrt.foggy hetero tree labels are of the specified type if any instead of AST Change 409 on 2000/10/01 by parrt@parrt.foggy updated output of .g files Change 407 on 2000/10/01 by parrt@parrt.foggy added column tracking example, updated commontoken to print col. Change 405 on 2000/09/27 by parrt@parrt.foggy changed type Change 401 on 2000/09/27 by klaren@klaren.hawking.main ChangeLog updated. + tweak to script. Change 400 on 2000/09/27 by klaren@klaren.hawking.main Made little TCL script to pretty print a ChangeLog with C++ stuff. Change 399 on 2000/09/27 by klaren@klaren.hawking.main Fixed generating too many ASTNULL checks in wrong places. Change 397 on 2000/09/27 by klaren@klaren.hawking.main Some *UGLY* fixes for the last typecasting problems in Cpp codegen. It now works. In 2.7.2 or later I'll fix this in a nice way. Change 394 on 2000/09/26 by klaren@klaren.hawking.main Prefixed Unicode optimization checks with a ASTNULL check. Change 393 on 2000/09/25 by klaren@klaren.hawking.main Bumped up the version no from 2.7.1a4 to 2.7.1. Change 381 on 2000/09/24 by parrt@parrt.foggy updated to use addElement Change 380 on 2000/09/24 by parrt@parrt.foggy integrating ric's stuff into main Change 378 on 2000/09/23 by klaren@klaren.hawking.dev Forgot another ANTLR_USE_NAMESPACE macro. Change 377 on 2000/09/22 by klaren@klaren.hawking.dev More todo's collected from my mailbox.. Change 372 on 2000/09/22 by klaren@klaren.hawking.dev Updated. Change 371 on 2000/09/22 by klaren@klaren.hawking.dev More TODO's... Change 370 on 2000/09/22 by klaren@klaren.hawking.dev Added nested namespace support submitted by David Wagner. Change 369 on 2000/09/22 by klaren@klaren.hawking.dev Bug fix for #ast_in and #( #ast_in ) differences. Split of actions/java/action.g into a java and cpp part. C++-isms removed from java part. Added support in C++ part for some_method(static_cast(#ast)). Change 367 on 2000/09/22 by klaren@klaren.hawking.dev Port of Unicode optimizations from java. Fixes for custom AST usage. Change 366 on 2000/09/22 by klaren@klaren.hawking.dev Implemented missing initializes to CAWHT for Sun CC 6.0 and fixed throw out_of_range for STL_PORT in BitSet.cpp. Change 363 on 2000/09/13 by parrt@parrt.foggy set initial column to 1 instead of 0 Change 362 on 2000/09/13 by klaren@klaren.hawking.dev Some bugfixes for getASTCreateString( ... ) hopefully all mismatches between astFactory.create and it's parameters are fixed. (Maybe even a speed improvement) Change 360 on 2000/09/13 by klaren@klaren.hawking.dev Borland C++ builder 4.0 project files for antlr.lib donated by Ross Bencina Change 358 on 2000/09/11 by klaren@klaren.hawking.dev Removed a superfluous typename (caused problem with Irix Mips compiler) Change 349 on 2000/09/08 by klaren@klaren.hawking.dev More configure tweaks. Some libtool enhancements added. Change 348 on 2000/09/07 by klaren@klaren.hawking.main Small improvement in constructor of CommonAST. Change 346 on 2000/09/07 by klaren@klaren.hawking.dev Miniscule fix for Borland C++Builder 4.0/C++ 5.4. (extra parens) Change 344 on 2000/09/06 by klaren@klaren.hawking.main Fixed missing namespace in generated TreeParsers as reported by Ross Bencina. Change 343 on 2000/09/06 by klaren@klaren.hawking.dev Fixed namespace mishap with generated TreeParser constructors as reported by Ross Bencina. Change 342 on 2000/09/06 by klaren@klaren.hawking.dev Some small optimizations. And a maybe fix for Borland compiler warning. Change 341 on 2000/09/06 by klaren@klaren.hawking.main Miniscule fix for Borland C++Builder 4.0/C++ 5.4. (extra parens) Change 340 on 2000/09/05 by mika@y0 Sather code generation/runtime catching up with Java bug fixes/enhancements Change 338 on 2000/09/03 by parrt@parrt.foggy cleaned up formatting Change 336 on 2000/09/03 by parrt@parrt.foggy optimized out the large unicode sets from switches. Change 335 on 2000/09/03 by parrt@parrt.foggy up'd version to 2.7.1 Change 334 on 2000/09/03 by parrt@parrt.foggy added throws option for rules Change 330 on 2000/08/30 by klaren@klaren.hawking.dev Small 'fix' to RecognitionException getFileLineString. Change 325 on 2000/08/29 by klaren@klaren.hawking.dev Shut a few compiler warnings up. Change 323 on 2000/08/24 by klaren@klaren.hawking.dev importVocab statements now cause antlr to look in $PWD first for the imported vocabulary files, and if that fails, to check in the directory specified by the '-o ' command-line argument Change 322 on 2000/08/24 by klaren@klaren.hawking.dev More typecasts for nullAST's. (Should be the last) Change 319 on 2000/08/23 by klaren@klaren.hawking.dev Michael Schmitt found a missing static_cast in the generated C++ code. Change 318 on 2000/08/22 by klaren@klaren.hawking.dev So many things to do... Change 317 on 2000/08/22 by klaren@klaren.hawking.main Updated changelog for a5 (or was it 2.7.1) release.. Change 316 on 2000/08/22 by klaren@klaren.hawking.main All kinds of small Makefile/configure tweaks. All gcc-isms should be gone now. Change 315 on 2000/08/21 by bob@bob.melvin importVocab statements now cause antlr to look in $PWD first for the imported vocabulary files, and if that fails, to check in the directory specified by the '-o ' command-line argument Change 310 on 2000/08/16 by klaren@klaren.hawking.dev Fixed a bug concerning one of the nullAST initialization strings being unset. Change 309 on 2000/08/15 by klaren@klaren.hawking.main Integrate bugfixes from klaren.dev to MismatchedChar/TokenException. Change 308 on 2000/08/15 by klaren@klaren.hawking.dev Fixes for some cut'n'paste'o's in MismatchedToken/CharException (error messages are ok again) Change 307 on 2000/08/15 by klaren@klaren.hawking.dev Added todo list. Change 306 on 2000/08/14 by klaren@klaren.hawking.dev Configure fixes and a small cleanup. Change 305 on 2000/08/14 by klaren@klaren.hawking.dev Got rid of the last member template. At the expense of some casts. Change 304 on 2000/08/11 by klaren@klaren.hawking.dev Last changes for ASTLabelType support. It seems to work now. Change 303 on 2000/08/10 by klaren@klaren.kronecker.dev Changes for better support of ASTLabelType in C++ grammars. Change 302 on 2000/08/08 by klaren@klaren.kronecker.dev Sync with main tree. Change 301 on 2000/08/08 by klaren@klaren.kronecker.main Fix for the $setText bug in C++ code generation. More C++ fixes to action.g. Allow '->' in $setText arguments and some other places. Change 298 on 2000/08/08 by klaren@klaren.kronecker.dev Integrate main branch with playground. Change 297 on 2000/08/07 by klaren@klaren.kronecker.main Fixes for namespace/namespaceAntlr/namespaceStd/genHashLines options. Change 296 on 2000/08/07 by klaren@klaren.kronecker.main Virtualized all functions that someone should want to override. Probably necessary for heteroAST stuff. Change 291 on 2000/08/07 by klaren@klaren.kronecker.main Some tweaks to configure.in and Makefile.am's. Fix for CXXFLAGS being set incorrectly when not using gcc. Change 290 on 2000/08/05 by klaren@klaren.kronecker.main Updated prototype of toLower to definition in cpp file. It seems I messed them up a while back. Change 289 on 2000/08/05 by klaren@klaren.kronecker.main Added namespace macro to out_of_range exception. Change 288 on 2000/07/28 by parrt@parrt.foggy re-added toLower return type fix Change 285 on 2000/07/19 by klaren@klaren.kronecker.main Fixed thinko. Change 284 on 2000/07/19 by klaren@klaren.kronecker.main Dumped output of p4 changes -l into it... Change 283 on 2000/07/19 by klaren@klaren.kronecker.main Fix for bug found by Michael Ebner. Bitset size was not increased in add method. Change 280 on 2000/07/19 by klaren@klaren.kronecker.main Made namespaceAntlr, namespaceStd and genHashlines options file-level options. Removed nameSpace member from Tool class all is now handled in CppCodegenerator.java. Change 279 on 2000/07/18 by klaren@klaren.kronecker.main Added -diagnostic and -glib options to the usage message. Change 278 on 2000/07/18 by klaren@klaren.kronecker.main Java changes for indented traceIn/Out stuff. Change 276 on 2000/07/18 by klaren@klaren.kronecker.main C++ Changes for the indented traceXXXX output as invented by Monty Zukowski Change 275 on 2000/07/18 by klaren@klaren.kronecker.main Added missing initializer in generated code for TreeParser Change 272 on 2000/07/17 by klaren@klaren.kronecker.main Another workspace for MSVC6 has support for dll's (for version 2.6.1). Change 271 on 2000/07/17 by klaren@klaren.kronecker.main New autoconf/automake stuff for the C++ support library. Change 270 on 2000/07/17 by klaren@klaren.kronecker.main Fixed error within the NO_STATIC_CONSTS #ifdef Change 269 on 2000/07/17 by klaren@klaren.kronecker.main Move C++ files to lib/cpp/src as first step for autoconf setup Change 268 on 2000/07/17 by klaren@klaren.kronecker.main Add contrib dir and Microsoft Visual C++ 6.0 projects supplied by John Millaway Change 262 on 2000/07/16 by parrt@parrt.foggy changed version to 2.7.1a4 Change 261 on 2000/07/16 by parrt@parrt.foggy added constructors for this class Change 260 on 2000/07/14 by klaren@klaren.kronecker.main Fixed crashbugs/typos in constructors of Mismatched[Token|Char]Exception Change 259 on 2000/07/13 by parrt@parrt.foggy cutting ric's branch Change 258 on 2000/07/10 by parrt@parrt.foggy fixes per klaren Change 255 on 2000/07/09 by parrt@parrt.foggy removed magelang from tag line Change 252 on 2000/07/09 by parrt@parrt.foggy reformatted Change 251 on 2000/07/04 by mika@y0 Reflecting change to the Java action lexer in the Sather action lexer Change 249 on 2000/07/04 by parrt@parrt.foggy changed version number Change 248 on 2000/07/04 by parrt@parrt.foggy Ric Klaren's changes to C++ lib Change 247 on 2000/07/04 by parrt@parrt.foggy Ric Klaren's changes for namespaces Change 246 on 2000/06/16 by mika@y0 reflecting changes antlr/CodeGenerator.java #3 -> #4 Change 245 on 2000/06/16 by mika@y0 reflecting changes in antlr/actions/java/action.g #3 -> #4 Change 242 on 2000/06/06 by parrt@parrt.foggy fixed my new method addition to handle empty input case correctly Change 241 on 2000/06/06 by parrt@parrt.foggy allows whitespace now after ( in $setType. Change 240 on 2000/06/03 by parrt@parrt.foggy fixed a bug where duplicate grammars caused an exception Change 239 on 2000/06/03 by parrt@parrt.foggy adjusted so it works; header actions got converted to Token objects from Strings; lots of cast problems and then null ptr exceptions. Change 238 on 2000/06/03 by parrt@parrt.foggy made it ignore zero-length strings for processActionForTreeSpecifiers Change 237 on 2000/06/03 by parrt@parrt.foggy had to run ANTLR on antlr.g to make it compile. Change 236 on 2000/06/03 by parrt@parrt.foggy changed refs to headerActions to imply Token not String. Change 235 on 2000/05/31 by pete@pete.linux More changes to support #line generation in C++ (from Ric Klaren) Change 233 on 2000/05/30 by mika@y0 Bug fixes from Gilbert Roulot Change 232 on 2000/05/29 by parrt@parrt.foggy improved diagnostic DEBUG_ANALYZER output a lot and fixed a nasty FOLLOW cycle computation bug. I was being too aggressive with my locking; i locked block end nodes even when not computing FIRST(block start). Change 231 on 2000/05/29 by parrt@parrt.foggy added code to print out FOLLOW set for rule at end Change 230 on 2000/05/29 by parrt@parrt.foggy added code you can uncomment to print out grammar after code gen Change 229 on 2000/05/29 by parrt@parrt.foggy changed version to 2.7.1a2 Change 228 on 2000/05/29 by parrt@parrt.foggy added toString method to print out rules Change 227 on 2000/05/29 by parrt@parrt.foggy added code to dump lookahead sets for each alt in toString() Change 220 on 2000/05/29 by parrt@parrt.foggy changed char to int for toLower Change 219 on 2000/05/28 by pete@pete.linux Mirroring Java changes Change 218 on 2000/05/28 by pete@pete.linux Cleaned up the #line generator a little. Change 217 on 2000/05/27 by parrt@parrt.foggy bug fix: wasn't providing always 4 digits for escapeChar. Change 216 on 2000/05/27 by parrt@parrt.foggy added checking for unterminated rules Change 215 on 2000/05/27 by parrt@parrt.foggy added column tracking support; tabs are counted as 1 Change 214 on 2000/05/27 by parrt@parrt.foggy allow comments after tokens/options etc... Change 213 on 2000/05/27 by parrt@parrt.foggy setInputState was actually getInputState :( Change 212 on 2000/05/27 by parrt@parrt.foggy updated to handle } in tokens{} Change 211 on 2000/05/27 by parrt@parrt.foggy had same bug as JavaCodeGenerator related to ~(A|B) Change 208 on 2000/05/27 by parrt@parrt.foggy updated version Change 205 on 2000/05/24 by pete@pete.linux Add support for Metrowerks Codewarrior Change 203 on 2000/05/22 by pete@pete.linux Fix for multithreading from Jan Mikkelsen Change 202 on 2000/05/21 by pete@pete.linux Merged in some fixes from Ric Klaren for tracing TreeParsers, cleaner namespace code, and #line generation. Change 201 on 2000/05/21 by pete@pete.linux Added destructors with empty throw specs, as suggested by Dan Field. Change 200 on 2000/05/21 by pete@pete.linux Various performance improvements, mostly from Eric Dumas. Change 183 on 2000/02/08 by pete@pete.linux Added support for Sun CC 5.0 (from Michael Schmitt) Change 182 on 2000/02/08 by pete@pete.linux Fix a couple of minor problems with C++ generation (noted by Michael Schmitt) Change 162 on 2000/01/20 by mika@y0 heterogeneous AST change. not sure if correct. Change 153 on 2000/01/19 by parrt@parrt.foggy forgot to propogate b1 changes to mkjar etc.. Change 151 on 2000/01/19 by parrt@parrt.foggy pushing changes back into main/main from fixed tree stuff Change 142 on 2000/01/19 by parrt@parrt.foggy propogating mika's changes forward Change 132 on 2000/01/18 by parrt@parrt.foggy setting type to ktext for everything Change 131 on 2000/01/18 by parrt@parrt.foggy from dev back to main Change 1 on 1999/12/13 by parrt@parrt.foggy adding 2.6.0 from antlr site as initial main line antlr-2.7.7/lib/cpp/README0000644000175000017500000001434510522211615015017 0ustar twernertwernerANTLR C++ Support Libraries Additional Notes 1.1 Using Microsoft Visual C++ Currently this is still (or again) somewhat experimental. MSVC is not the development platform and I don't have access to the compiler currently. YMMV Make sure you compile the library *and* your project with the same settings. (multithreaded/debug/etc.) Visual C++ 6 only is supported for static builds. Some hacking and STLPort is needed to build a DLL (only for experts). Visual C++ 7.0 and 7.1 should support both static and DLL builds (DLL builds might be broken). In general the main problem is getting the right template instantiations into the DLL. For 7.0 you might have to tweak the list in lib/cpp/src/dll.cpp. I'm told 7.1 does not need this. For a static build (works probably best) 1. Create a win32 static library project. 2. Enable RTTI. (Run Time Type Information) 3. Add the source files from /antlr/lib/cpp/src to the project (except dll.cpp) put /antlr/lib/cpp in the search path for include files. For the DLL build (MSVC 7.0 tested) * Project settings ("create new project" dialogs) - Win32 project - Application Settings - Application type - DLL - Additional options - Export symbols * Project properties (change defaults to) - Configuration Properties - C/C++ - General - Additional Include Directories - drive:\antlr-2.7.2\lib\cpp - Preprocessor - Preprocessor Definitions - WIN32;_DEBUG;_WINDOWS;_USRDLL;ANTLR_EXPORTS - Code Generation - Runtime Library - Multi-threaded Debug DLL (/MDd) - Enable Function-Level Linking: - Yes - Language - Enable Run-Time Type Info - Yes - Precompiled Headers - Create/Use Precompiled Headers NOTE: Do not use the antlr generated and support library in a multithreaded way. It was not designed for a multithreaded environment. 1.3 Building with GCJ NOTE: outdated the new Makefiles do not support this anymore. It is also possible to build a native binary of ANTLR. This is somewhat experimental and can be enabled by giving the --enable-gcj option to configure. You need a recent GCC to do this and even then the constructed binary crashes on some platforms. 2. Tested Compilers for this release Don't get worried if your favourite compiler is not mentioned here. Any somewhat recent ISO compliant C++ compiler should have little trouble with the runtime library. *NOTE* this section was not updated for the new configure script/Makefiles some of the things listed here to pass different flags to configure may not work anymore. Check INSTALL.txt or handedit generated scripts after configure. 2.1 Solaris 2.1.1 Sun Workshop 6.0 Identifies itself as: CC: Sun WorkShop 6 2000/08/30 C++ 5.1 Patch 109490-01 Compiles out of the box configure using: CXX=CC CC=cc AR=CC ARFLAGS="-xar -o" ./configure Use CC to make the archive to ensure bundling of template instances. Check manpage for details. 2.1.2 GCC Tested 3.0.4, 3.2.1, 3.2.3, 3.3.2, 3.4.0. All tested gcc are using a recent GNU binutils for linker and assembler. You will probably run into trouble if you use the solaris linker/assembler. 2.2 Windows 2.2.1 Visual C++ Visual C++ 6.0 reported to work well with static build. DLL build not supported (reported to work when using STLPort in previous ANTLR versions). I heart that in some cases there could be problems with precompiled headers and the use of normal '/' in the #include directives (with service pack 5). Visual C++ 7.0 reported to work, might need some tweaks for DLL builds due to some shuffling around in the code. Visual C++ 7.1 reported to work, might need some tweaks, see above. My current guess is that DLL builds are all over the line broken. A workaround is to make a DLL from the complete generated parser including the static ANTLR support library. 2.2.2 Cygwin/MinGW Not expecting any big problems maybe some tweaks needed in configure. 3. Old notes for a number of compilers 3.1 SGI Irix 6.5.10 MIPSPro compiler You can't compile ANTLR with the MIPSPro compiler on anything < 6.5.10 because SGI just fixed a big bug dealing with namespaces in that release. Note: To get it to compile do basically the following: CC=cc CXX=CC CXXFLAGS=-LANG:std ./configure --prefix=/usr/local/antlr Note probably dates back to 2.7.0-2.7.1 era. 3.2 Sun CC 5 It may be you'll have to change one or two static_cast()'s to a C-style cast. (think that's a compiler bug) Configure using: CXX=CC CC=cc RANLIB="CC -xar" ./configure The custom ranlib is needed to get the template instances into the archive. Check manpages. Maybe the Sun CC 6 instructions above will work as well. 3.3 GCC on some platforms (Alpha Tru64) The -pipe option not supported it seems. Configure using: CFLAGS="-W -Wall" ./configure Or remove the -pipe's from the generated scripts/Config.make. 4. IT DOESN'T WORK!? 4.1 Compile problems The ANTLR code uses some relatively new features of C++ which not all compilers support yet (such as namespaces, and new style standard headers). At the moment, you may be able to work around the problem with a few nasty tricks: Try creating some header files like 'iostream' just containing: #include and compile with an option to define away the word 'std', such as CC .... -Dstd= .... Also in the antlr subdirectory there's a file config.hpp. Tweak this one to enable/disable the different bells and whistles used in the rest of the code. Don't forget to submit those changes back to us (along with compiler info) so we can incorporate them in our next release! 4.2 Reporting problems When reporting problems please try to be as specific as possible e.g. mention ANTLR release, and try to provide a clear and minimal example of what goes wrong and what you expected. Bug reports can be done to Terence or the current subsystem maintainers as mentioned in the doc directory. Another option is to use the mailing list linked from http://www.antlr.org. Before reporting a problem you might want to try with a development snapshot, there is a link to these in the File Sharing section of http://www.antlr.org. antlr-2.7.7/lib/cpp/TODO0000644000175000017500000000664710522211615014635 0ustar twernertwerner* ANTLR should issue a warning if you have protected rules and filter == true or filter=IGNORE in a lexer? This can be tackled by tracking rule references in a more general approach. * Have a look at the doc's. * Add allocators to the objects * Look more at exception handling * TreeParser.cpp around line 76 the MismatchedTokenException here does not use ttype to improve it's errormessage. Would require changing a bit in MismatchedTokenException.cpp * On Thu, Sep 21, 2000 at 12:33:48AM -0700, John Lambert wrote: > 1) The literal EOF is not defined and causes the define of EOF_CHAR in > CharScanner.hpp to fail. ANTLR with STL Port. Changing the EOF define to char_traits::eof() breaks things for gcc-2.95.2. Fix this in next release portably. http://www.egroups.com/message/antlr-interest/2520 * Fix heterogeneous AST stuff. It boils down to adding a method to AST types that knows how to duplicate the sucker. -> done clone() added. Knowing one factory is not enough. -> done in C++ have a superfactory. Also look at having to set the astfactory by hand (this is not 100% necessary). Double check generated code. http://groups.yahoo.com/group/antlr-interest/message/2496 * Look at messageLog stuff Ross Bencina proposed. Looks good at first glance. http://www.egroups.com/message/antlr-interest/2555 * Add RW_STL & CC 4.2 patch from Ulrich Teichert: See my mailbox.. and these comments from Ross Bencina: http://www.egroups.com/message/antlr-interest/2494 * in action.g (java and C++) ##.initialize / ##->initialize is not recognized as an assigment to the root node. In the case ## is followed by ./-> initialize transInfo.assignToRoot should be set to true. Report by Matthew Ford (12 march 2001) * Add TokenLabelType option for generated lexers. Hmmm can already set token factory. Then again.. you may run into a cast fest.. * Fix some #line counting oddities (Mike Barnett) > nonterm > { > ## = #([TOK,"TOK"], > ... Other stuff ... > ); > f(); > } generates wrong #line info need to fix action.g a bit better. * This one triggers a bug in antlr's codegen. #perform_action = #( create_tau_ast(#p1->getLine(),#p1->getColumn()), #p1 ); #p1 are replaced by p1 in stead of p1_AST. It's really time to rewrite this mess. Workaround: RefModest_AST tau = create_tau_ast(#p1->getLine(),#p1->getColumn()); #perform_action = #( tau, #p1 ); * Unicode and related. - The patch from Jean-Daniel Fekete is an approach. But has some issues. + It is probably necessary to discern an 'internal' string/char type and 'external' ones. The external ones are for the lexer input. The 'internal ones' are for standard antlr error messages etc. Translators from external to internal should be provided. Hmm on second thought.. probably not really an issue. + What should the lexer read? - Unicode units from a 'unicode reader' in a sense this unicode reader is a lexer itself. Just reading iconv/iconv_open manpages.. Maybe we can hide this with iconv in the InputBuffer mechanisms? - Interpret unicode ourselves. Ugh don't want to think of that right now. we probably redo something that has been done. Only problem is that we need something that's portable (C++ case) + What changes are necessary in the rest of the code to support a wide character set? Think most should be handled in/below the lexer level. antlr-2.7.7/lib/cpp/AUTHORS0000644000175000017500000000006110522211615015175 0ustar twernertwernerAuthor: Peter Wells antlr-2.7.7/lib/cpp/contrib/0000755000175000017500000000000010522211615015570 5ustar twernertwernerantlr-2.7.7/lib/cpp/contrib/bcb4/0000755000175000017500000000000010522211615016402 5ustar twernertwernerantlr-2.7.7/lib/cpp/contrib/bcb4/antlr.bpr0000644000175000017500000001201610522211615020227 0ustar twernertwerner# --------------------------------------------------------------------------- !if !$d(BCB) BCB = $(MAKEDIR)\.. !endif # --------------------------------------------------------------------------- # IDE SECTION # --------------------------------------------------------------------------- # The following section of the project makefile is managed by the BCB IDE. # It is recommended to use the IDE to change any of the values in this # section. # --------------------------------------------------------------------------- VERSION = BCB.04.04 # --------------------------------------------------------------------------- PROJECT = antlr.lib OBJFILES = antlr.obj ..\..\src\TreeParserSharedInputState.obj ..\..\src\ASTFactory.obj \ ..\..\src\ASTRefCount.obj ..\..\src\BaseAST.obj ..\..\src\BitSet.obj \ ..\..\src\CharBuffer.obj ..\..\src\CharScanner.obj ..\..\src\CommonAST.obj \ ..\..\src\CommonASTWithHiddenTokens.obj ..\..\src\CommonHiddenStreamToken.obj \ ..\..\src\CommonToken.obj ..\..\src\InputBuffer.obj \ ..\..\src\LexerSharedInputState.obj ..\..\src\LLkParser.obj \ ..\..\src\MismatchedCharException.obj ..\..\src\MismatchedTokenException.obj \ ..\..\src\NoViableAltException.obj ..\..\src\NoViableAltForCharException.obj \ ..\..\src\Parser.obj ..\..\src\ParserSharedInputState.obj \ ..\..\src\RecognitionException.obj ..\..\src\String.obj ..\..\src\Token.obj \ ..\..\src\TokenBuffer.obj ..\..\src\TokenStreamBasicFilter.obj \ ..\..\src\TokenStreamHiddenTokenFilter.obj ..\..\src\TokenStreamSelector.obj \ ..\..\src\TreeParser.obj ..\..\src\ANTLRException.obj DEFFILE = LIBFILES = RESDEPEN = $(RESFILES) RESFILES = LIBRARIES = PACKAGES = # --------------------------------------------------------------------------- PATHCPP = .;..\..\src PATHASM = .; PATHPAS = .; PATHRC = .; USERDEFINES = HAS_NOT_CSTDIO_H SYSDEFINES =NO_STRICT # --------------------------------------------------------------------------- CFLAG1 = -I..\..\src;$(BCB)\include;$(BCB)\include\vcl;..\.. -O2 -w -Ve -a8 -k- -vi -c \ -b- -w-par -w-inl -Vx -tWM -D$(SYSDEFINES);$(USERDEFINES) PFLAGS = -U..\..\src;$(DEBUGLIBPATH) -I..\..\src;$(BCB)\include;$(BCB)\include\vcl;..\.. \ -DHAS_NOT_CSTDIO_H -$Y- -$L- -$D- -v -JPHN -M AFLAGS = /i..\..\src /i$(BCB)\include /i$(BCB)\include\vcl /i..\..\..\cpp \ /dHAS_NOT_CSTDIO_H /mx /w2 /zn RFLAGS = -i..\..\src;$(BCB)\include;$(BCB)\include\vcl;..\.. LFLAGS = /P64 # --------------------------------------------------------------------------- ALLOBJ = $(OBJFILES) # --------------------------------------------------------------------------- !ifdef IDEOPTIONS [Version Info] IncludeVerInfo=0 AutoIncBuild=0 MajorVer=1 MinorVer=0 Release=0 Build=0 Debug=0 PreRelease=0 Special=0 Private=0 DLL=0 Locale=1033 CodePage=1252 [Version Info Keys] CompanyName= FileDescription= FileVersion=1.0.0.0 InternalName= LegalCopyright= LegalTrademarks= OriginalFilename= ProductName= ProductVersion=1.0.0.0 Comments= [HistoryLists\hlIncludePath] Count=4 Item0=..\..\src;$(BCB)\include;$(BCB)\include\vcl;..\..\..\cpp Item1=..\..\src;$(BCB)\include;$(BCB)\include\vcl;..\.. Item2=$(BCB)\include;$(BCB)\include\vcl;F:\antlr2.7.1k\lib\cpp Item3=$(BCB)\include;$(BCB)\include\vcl [HistoryLists\hlDebugSourcePath] Count=1 Item0=$(BCB)\source\vcl [HistoryLists\hlConditionals] Count=1 Item0=HAS_NOT_CSTDIO_H [Debugging] DebugSourceDirs=$(BCB)\source\vcl [Parameters] RunParams= HostApplication= RemoteHost= RemotePath= RemoteDebug=0 [Compiler] InMemoryExe=0 ShowInfoMsgs=0 !endif # --------------------------------------------------------------------------- # MAKE SECTION # --------------------------------------------------------------------------- # This section of the project makefile is not used by the BCB IDE. It is for # the benefit of building from the command-line using the MAKE utility. # --------------------------------------------------------------------------- .autodepend # --------------------------------------------------------------------------- !if !$d(BCC32) BCC32 = bcc32 !endif !if !$d(CPP32) CPP32 = cpp32 !endif !if !$d(DCC32) DCC32 = dcc32 !endif !if !$d(TASM32) TASM32 = tasm32 !endif !if !$d(LINKER) LINKER = TLib !endif # --------------------------------------------------------------------------- !if $d(PATHCPP) .PATH.CPP = $(PATHCPP) .PATH.C = $(PATHCPP) !endif !if $d(PATHPAS) .PATH.PAS = $(PATHPAS) !endif !if $d(PATHASM) .PATH.ASM = $(PATHASM) !endif # --------------------------------------------------------------------------- $(PROJECT): $(OBJFILES) $(LIBFILES) $(BCB)\BIN\$(LINKER) /u $@ @&&! $(LFLAGS) $? ! # --------------------------------------------------------------------------- .pas.hpp: $(BCB)\BIN\$(DCC32) $(PFLAGS) {$< } .pas.obj: $(BCB)\BIN\$(DCC32) $(PFLAGS) {$< } .cpp.obj: $(BCB)\BIN\$(BCC32) $(CFLAG1) -n$(@D) {$< } .c.obj: $(BCB)\BIN\$(BCC32) $(CFLAG1) -n$(@D) {$< } .c.i: $(BCB)\BIN\$(CPP32) $(CFLAG1) -n. {$< } .cpp.i: $(BCB)\BIN\$(CPP32) $(CFLAG1) -n. {$< } .asm.obj: $(BCB)\BIN\$(TASM32) $(AFLAGS) $<, $@ # --------------------------------------------------------------------------- antlr-2.7.7/lib/cpp/contrib/bcb4/antlr.cpp0000644000175000017500000000274410522211615020235 0ustar twernertwerner//--------------------------------------------------------------------------- #include #pragma hdrstop USEUNIT("..\..\src\TreeParserSharedInputState.cpp"); USEUNIT("..\..\src\ASTFactory.cpp"); USEUNIT("..\..\src\ASTRefCount.cpp"); USEUNIT("..\..\src\BaseAST.cpp"); USEUNIT("..\..\src\BitSet.cpp"); USEUNIT("..\..\src\CharBuffer.cpp"); USEUNIT("..\..\src\CharScanner.cpp"); USEUNIT("..\..\src\CommonAST.cpp"); USEUNIT("..\..\src\CommonASTWithHiddenTokens.cpp"); USEUNIT("..\..\src\CommonHiddenStreamToken.cpp"); USEUNIT("..\..\src\CommonToken.cpp"); USEUNIT("..\..\src\InputBuffer.cpp"); USEUNIT("..\..\src\LexerSharedInputState.cpp"); USEUNIT("..\..\src\LLkParser.cpp"); USEUNIT("..\..\src\MismatchedCharException.cpp"); USEUNIT("..\..\src\MismatchedTokenException.cpp"); USEUNIT("..\..\src\NoViableAltException.cpp"); USEUNIT("..\..\src\NoViableAltForCharException.cpp"); USEUNIT("..\..\src\Parser.cpp"); USEUNIT("..\..\src\ParserSharedInputState.cpp"); USEUNIT("..\..\src\RecognitionException.cpp"); USEUNIT("..\..\src\String.cpp"); USEUNIT("..\..\src\Token.cpp"); USEUNIT("..\..\src\TokenBuffer.cpp"); USEUNIT("..\..\src\TokenStreamBasicFilter.cpp"); USEUNIT("..\..\src\TokenStreamHiddenTokenFilter.cpp"); USEUNIT("..\..\src\TokenStreamSelector.cpp"); USEUNIT("..\..\src\TreeParser.cpp"); USEUNIT("..\..\src\ANTLRException.cpp"); //--------------------------------------------------------------------------- #define Library // To add a file to the library use the Project menu 'Add to Project'. antlr-2.7.7/lib/cpp/contrib/bcb4/README0000644000175000017500000000012510522211615017260 0ustar twernertwernerProject files for Borland C++Builder 4.0 to build antlr.lib Donated by Ross Bencina antlr-2.7.7/antlr.jar0000644000175000017500000154274410522212202014430 0ustar twernertwernerPK*ma5 META-INF/ハPKPK*ma5META-INF/MANIFEST.MFMフヒLK-.ム K-*ホフマウR0ヤ3瑯r.JM,IMムuェ 倩ナX*h8荀*8鉗薄、(x%irrPKRぃWFFPKma5antlr/ANTLRError.class]悉 ツ@EH4> AKュ|Eム*Xィリッd%n`藍`Q稷nqg跟ケサ<_FP5RウQキャ欝2ス傘佗ヌ説RォeュリG4{チYワ u7塩46g_!\Jケウユ6X/エ寿ネミUa)<。辿チxb槇crレ吊マヲ$賀ケnJク% ┯慳ワィIygメCcm蝸エH0蚫ナZス欄\ャ鏘Oc礁xシ柮zフィニュbモ慎o樸v諫 >ィ'ロ)e。ュtルP+ヌきテq1%5Uレュ85、「/ォウT]rーハ啼チy「8ナ7ョ8yXy#>nト」-+キ !vU udE囗ラ「リdャbァア卵#f]」4ワ欟ナォン}vリ6-瓣 vhオVhO8$ス#ケ2D:4hチ!'寰t臧M3f*ヘアリ[カr肩Mコ&M・[テッ<-C3G#Xa略クN。辨 ッゥ]G魃\重圉 {ホョS亶|カi*\エ」0lァ゙ク,)ネm好肩i-凱 ン%ス繍メAソ倆/pア_^Sャコョエマハ続zS9 ソコ瓮萓。}イ誦utナオzケ*cナylォマカァァヘメ4@aM」mXiミ Xp&,「チ、サルサU#Xpフg&F5 <#繋ぅ薪c喘zPu0CLrフ 釛討qJエチウらマテニ_シ,xEェ5チメ チ屬s n3゙<繖5 wァ$セ'8-チツ#ニヌマタニァ佼ケャ3+ソマチ ニソべ経ネ8/.セ,チ"纈犂煌戡=}♯oPK=)ウ$KOPKma5antlr/ANTLRHashString.classuTMoU=マc{lw8i$蕊NLロ、_q 鰔*AN MD+。爵I<;ヲクコ`XQレ R7ルtχ8H6,Xアd 73Mタ・イt}゙s゙ケン/}9\Oao%詑ルヘ9iホヒ蛞 I,ネ閧葛*.ノフeiョ、ー梓dエ$ ッネツ滑キDG`、コk=ーJMヒン)mzmヌンYPjンm∧m殿モvYケ*kレ渟m√Uヒ夬壷ーレ孛ヒu6!アOレホ=[`タlケ[Vウk+TH ^v\ヌォLキヘユチ邱「f. ェ嗣ッwユMォヨ萸 ┛ソ,・:7=椶ッ贓Vワセ゚オ夥゚ラvコキ仭%辛X戳Hムl?ホ筋ノ種]noヒサ依~$ソワ QイdMjウユmラ四トヘ鼾TフI゙ム0!  釜ユ0ゅfpZテ 健>隹/扇?TイMkネCWアャ*L 笞1ァ' \テ洛ソツ*マ@o灣゙シ筱R邦-カ旦*ケS,dィa茯N。BU瑛瑞m畭キフ阻)レ5>ム:T|タiンン瑪m0キナ>>、カ-vwUヘムヒラ 柆@コ2Jソ鯢エァ9乘エ廚ユネ3員U/_ァセサ彷 pィ鯖ォ聢 リ!x僚sGUPK靆\)ッDPKma5antlr/ANTLRLexer.class{y`TEユ~卷d&}I#ワw$&Dテ !伸b2Aケシ@o@6%AA]ルuuラXoヤuWW]wQォzヘd2襠#?)]ッォサゴァォォ黏ソ7yツムg?セd"舎タ腮 ΑLo チマフヌオ@87チ/p羆x~ ァレfMF、TL(フ(-ミUY垪モlニp3ZフaFォ#ヘeニVfエ1レ1flmニ6f5c[3カ3c{メ繋L:0鯤コ羯ヨ迂g&]teメ巧w&=做dメ紀o& L$1ア3鯏、/冬L3If2ノ@& f2ノP&テ g2B46rm$轍&ヘラ1LR厳タtgチ侈d モL8ム30ン鏑8ルТツa6fX ァ嗔f。テ8晉喨チ,^凩佻 覬 贅B+愧ヘ3厠2緲セ跿ク9リ市-0纒fシハ桁f徃F'キヘg"ョ3q1Yタm%\[ネ、ヤ結フクリFXホ vsュツ剣fシレ袈&ャカタ:0ゥaイуョa。・L1ケ杢稔載lTケイ滑゙」蠑キeケOM"_E゙`ツX 槲Jョュbr=惇Lヨ0Ykツu&\oツLx#BD。ァbアヒ晤@鞣pコ=eUノEeeョ"Oiサ:ケエシイ,yBゥDF7リヤワメ諌ハU]莅t・-tVヘt耗クb亨K慄eNwIrョァェヤ]2:)AヲUSw+Gゥロ漂S>゚U慕彑F-4}yヒ*]*ェX促ヤ罨r!ы)u陽ニ"エ7t坊$gク+k<、ヨ,4。uSW射YェメZロ&ラト'ヤ,X`ttv8\K]Uケt7Wア。メ駲i"疣ラROc"エ迂シヨ@fウ、/-rUjE蟲rif鋭x愡<-kフ<イQyョ」ミ鋤ヌオ)マハNマ,Lw、3_幅卵肘f+Osd螯オiZS V3メsニ允シ懼ツ 體33;イSsメオZ参綢UヒsRs夬騨"ゥケケ3oニlGFzョ&>=CEテクaBj訶杼V゚zVc"uMd鶫gE兀 SサkレG>Bdyレ番廝GF^zNェ,S棍妲V$ 9剴:L蟲'fLホネc}FE鉾霹Jヘ兔圜O4ヲ<捺>アミソ即`ヤ「|ンFΗnナC氛o)D歇V<?リ`ナ#4枡?ヌ蔵$'Pjロャewリ華悼H$達俗o7yテ,4゚86キユQシマォvw#濬*W夸Y+>マ[撰架ゥャ u+ニQニェ(/ッpkse/X$Gメ)"p塵"楔ツヌ>蝗= :ワX疽u擲ツ予;ヌ+&_3セ・逃ヤц留ネ騅sBネ>讚}イ~kDニ送甬尅DO魂2僚トウPK゙ワ」ュ゚ツIaヘ\\ZRJcヤトエ 2彪Oヤ[dbZメBP莅v薐;.q[ Bヤgォvyメサ9ヌ$bノvエHシネヌ4wm+チ3。侠ш0ツ\ヨEdムチ-IチM配ェ\5ュセ6ヨB=aロ畔M勧ヲ達/K{:サ )。 (qyQ縋盲葩$&O唆レHユメ*ハjハンシ'K ホミ痍}8/;ユェスオ.゙mリナRf:・エツキシk忌ムヘズヘヌY鈩モ]ユユホ鶴レ喰コx4Sニ9ワS6カ感+.゙鰕v0メerサョ)Yヌ+トロaS籤4ン&&゚塀xォ{yaトaッョo.ヺ縣5ヲ萪^GQ禧o鮪y9/?ヤェ沖弋U磋Lp-Fz1セ幕Gロト結尸輙8Nu薙D{\ユo g6Eマfヤ、厰歳ルオヘ8ナメノ(宍ァvュ3ョラθQ:年エ「屯!%洸p余ヨレ v**ユ>nンF$哽w*サゥ$PルGeゥ 4ョレ`:ニ^ゥンMソ3ョテ耕芙ィファr'TvPqSケ櫟府ィxィl。RC藺峪ホYg ッウY麝ヨ:[:[B擯ホ^艦ャC緤リ死ールヲ覡fソyv孃ネmcb ミエヒキui7ヒラユチラe 竟2ト 蓁 J 粱リユユラ58ーォ岷+"ーォサッォs`W_Wd`WO_Wァタョ^セ.k`Wo_W沼ョ_WL`W「キヒリ謎覗t=C%エ ノ3J。,ー來UdC9qnXー*\M迪*ェム LK(江ガa)貰Aクマタrョ」;・1 {Wソ;Udt炙1"ノボユアーQ*+ ヤgz6.ヒ-ャKルF=*庠)、2槎t*%T(ン*q~cソ'ワ%9TVモ炉Cキ弼oC麗)ハO~sサヘoョ捫ァBノP。ヌ竒<陛シメ}ノ9jfフkフ$フIfL3 3 f;3 f3C f-3C f 3テ ヲ漠Sタフλbf、チL`f斑 efエチ$23ニ`:2澱0Qフ5df慚タフ7/露5w兪0ッ2吐0マ23ム`杼&ン`ヤツナ夢:ウー塾Nハヘヤ6ノXッ 7&ナ> Z」yコo慓彗4モ喞ヲヤ{モエhu~オ ヲ輒スヤ選%徽v\qg梟ーレ_コpュr4サtェ\セ>6Jqイf%聿s0{ ァィ|j{f? s`営P@|=\ルW擒彗汕'>+メウ俎ョ逾憶z>テン班VQヨYC消オ盃ヨQ^サ[?ー/タFa尓クC奄{磐pッメS:ツJWリョ妊リュ=ハ リォ 撚ー_ pP *xLノ'<8ャ腓スRG'SタモJ)<ォク9・杆迷玉r8ュ\g鳩&xUケホ*巵マハンコイ゙Pカテ[ハnx[!ヌPタハ」。$|、4タァハモ咄|ョ怱ッ迫潴,|」シT ゚)ツハ尼 [~Vセ_ツッハ術A・+ェBチ0。「EX0BエBォA嬋ム"cD=ア滴ツホ「/&梶$。]呼~b:9,赳@眸A「句8Tャヌa.タ鯛~%v疉アヌp忸'委1M|ナO.ホ綸+N&&テム!」qコF;ョァi$ュWmィ湘3g♂V フ$o)ャyテ、mH頑hシァャmアIaラ1&サ+貧v[ラzpヨB・ンョUJ6 Uネ3釋ツンナ#wBW=4,;。ュンノ~bマb6」SkmWR:n蔗}纒エA而ノw 岐ヌJc*アイク\ーダ獣ィ鴦;レkS沼ルエ9-Р8RーXロ朗ツRレ.M\ ケエI-チ (ヌJz_MレjワD讃幃|ッ:\O甄8↓烋レミ~渓4s嵋ty$l>噸ノxト゚腓SpT蕨我ォ桂vQ%オ駁シ}ン>Nhヌm 9ネGナム='%匪箏#ハoミラ二n 聨ムクz瓰鍜オTシ搆キ煙キf濔猗hァイヨ齋クキ=GネXァ ソケ3兼d絳ニ肪)ュE`ア6オz(ロBmb。シカkナ-ニ゚ムja0トェwヤメPイVッTK>yケヒヨ|メ%テ&テ6クコモ9キ/ヲc=Rーイ)ネヌ」0氣2< /潭p_2龝>#ィk8恁&#モ&鬧+lヤ*ムミク ヌロAヌMレi3鉄Mヨニ薪/ュ「Iv試nメ゙レ|ホM]R檀<鮨oヌヲ'屡ュヘシ、娶シ}度鍋ク}>`ソロ,{LY靡ワ ゥ*-YZラhチ_1SMJ'タFミセス?羔W~?列Kマチキ衣`;サ痍リソヌ/∫q,ふWシ/`=| L9マ{槊zTMw虻サm ヘrァル拵 割E(ナ m廃隕X 飩$L・ヌ} エBC@ミニカエhCミb Z[へ滋オ'hZヌ h'CCMミカエホュ ANミzエュAK h陰ミNヨ匠耕1h}Z_vLミエmABミA{14エセ培*[ 、th#レH6・エアmAK%hあ ュ#Y-アナャ鋒ミ&エ 6腹M#hvIミイあ -棧%オエvKミfエYm6Aヒ'hs ZAエ唯CIミFオエB6匠エbよ"h レB7衵ミァ闖ョ磨VFミハ Z%Aサ國Uエj7CミA{%4エdへァナ-#hラエm%A[Eミョ'hkレレ h ュAKi1h7エ レ-mAロHミn%hキエMAミ^ m%将[,yワIミ"hエレじ腹ンOミカA;嚶ャ6コナャFロ}eAロKミエ Z-AロOミAchh レU-W摺=Nミエ'レュ棧5AShhンZキプ箱#hマエYA{□ Zhhアコプ國!hッエ?エW レY'Zエ?6唖o1hoエソエキレロ.A{泛}ミミレRХオX頃}Lミ>#hエ/レ+uエ7BCウ惇Vカ俔セ%h h゚エエレ゚ h -國ュh1h レ/ュレn!A(.$Ljエソ}h@サモZkvタ!|T`シィョチタモtコイjR5!殉圉ヒ昶(f%ャ4ナ(,ZAo CD 、X)笄@tRム*D<ワ*:テ「Kミ9Mヤナンヤ仏ァ>え^燵コZノ&メッミkミソB Whセ2ョロ+>Whセ3ョテ+k"タロ肴_トラ勒材ラh賈_」5?・タ5Tdキケ>kキUiサヘ」Uレmオハ>サm〃ケ゚nサFォワiキ9オハ-v[wュイZ_ィZ\ェOソ+カカチーDwサ`」^セヨx衝ヒ嚴ユn/゚ラ'゚ン+ラ$蝠ッE驀チン!ャeB線^8膿V9!nUデC疵M & /ウ介 =(@zSミQ∫E_8%タ+b C 1 ゙」烋1>聰s1ホ演゚ YL"Qd。ケhy)fck聡mナUリY8q著徐「ョ%x盜э <#ェob9+鰭アJォ蔽b昶qォ2Kワヲ維ァリ、,尓RqキイFlQn[塀ナ6 ア]9"゚F,構シ足ョリーンsU縢`iウ\ユ捏U;コ藥|D椎w打w毒打SJ:@)ゥ山メ#白Cノ 爆死Ge禮P:zロレクレョワv」Rシ_lムb、N.ュ倚*ミxjLK-湿岨 「ナy喬 v[ェm歎Bc。-3エ'hA汞_トi 3hッb+qcトリAシ◎ト[8Lシ喫=フc恢ヒトGA 煮mWn;ンaGヲxァレ"マ 楳mナ厘C怎$L゚C=(ヺ+モqサ?ク6)bX/Yカラk#再ィ/Oッリ0ケ鬼a゚\爽aXロtヒレ?iz゚ム~h3ネ0Cフ ク、ナRツヘ2 n逃)ソ逶fc=ァワ=モニ2;∩6fyフuGf+/|or&-!# JFAkル コK$ヒh*c Wカ!アAホセ/+ワidV`G弐フmカI゙、%|_閂ウ2ナリャ県2憂m慣艮ソ~ヌユ6ヨu|m翆K{%罕ソ/8ォ%+オD悵U.;WA<ヘシ=8鮫mdW({@ 92 fJ;萢p畢ァ (陛ak薊儔ーI氏2梍c癢/ノTウフキ腟LNΡ・テe&Fノy稔-8,=蠢\ヲ ( 径&カコニi_滲・ュュOJシヘト4\ウFwタZ鰯カャユ[蔀コ゚6Pャe3ヌユテ t1ユテ航賣x%sネネケdミAホ.2剃蟷0Hホイヲハd閻dリRX'チケこカI7pB^ 'eU請5Rh+Cセワ契チ沁ーメxウ"}ョクナ鴨喀q・s・ul蕣セ}-゚ ]シェロ6ゥヨカ+ララ6ヨ・vBワ!クIラ%ツ委ヲマテ]薫タWハ攤^ツロ艤シKタ]aャ埠xJト怜」Aア耐:ョ諺オO睿ヌ瘁7bEニFZェマセトR「L扣烋ウ誤ネ゚」nZ゙9浅ュスィンVネヘ 6カユAラコイェ'ュC舂O盛ユC[y X Ob。<%昇Hセ歯 ャ厄灑.ョ稻)゙(?#キハ/<ヌ藐A橦7゚寂'cuソ$i( ク[ナ・トルホワーチ騫ヘ紐 ンbテミアケユhwルヨネ。4 d腔ノ゚Aエレヒ。」7mI~_(マテp 、ィメT Sヤ0ネQM0[5テ\オ5,Tロ[劫綱;リャカJテ'F,ム-ぉqリ0ユ6v,Yナ ?桎x'OCN脾IN脾機'RスN、z(Nナラ5w#5ャj'h・vオtPpカ:遨アH豪ユ1鏃ヌ」GMナkヤノクJ揩kTョWッタ jnQsqァ嚊'ヤ凅J腱滯ッニV6タJi}Z*テp5愬クFラョニ5厂H汐セcQルx モ"u.Dィ・ホ岡 2ヤ澑チ鹵ネS+`爺|オゥK&u)ワェ. 夾gセo2f<ロニ絣o゚エ-〆姜亨Mヤ ツセ籃5ミMス ォ7テ 剛ヨャ?z鷲ムCW廂驤ivムnヘ膨:〃士璋uH'タ錠テgu3ィt゚;。ウziメァ1モミリロァqsカ斟Hwfォ!カe/P#スエaRキBエコ zェ軟s!t゚ソミス鍍"ンサIメス葉Bス夭-陸!メス殳 ン軟:メu+oセ$+?FZ'ュネハO脆o|Gミ]ok礫a>鰥慢/テウU*aTL瞼モエiサ「6P!?;|khYョ2 lマ?4タ'`アホヤ笑メ丼:ウ[cV鯲沙Igjフニ&タ□゙滝t!A=H孜Rユ」$単燹PKiョFFPKma5antlr/ANTLRParser.class| xTUメh捏4BaK:!-@XCネf摩Bメ@ 揆EヌQチDA\@d竡「".翰牋(n」#ホ誤ネォコvァヲ0スOソS}カェSァNU:釛O< ノ3揣ウ5sーオ. p!?クHGpアホ.a隷ヒ.」粢判仟ヨQ返ー^gWアu*サ嘸ッ5Dハ]Cケ Dr」ニョeラQンnPリ:b7アMl3ユワL`仰nQリュ:e[Uカ降o」ムカSn;ク畋$p貸ADvク孰リE リM猾 ワK>黔ソc?@TルC*{*P#*{梅Rヘc倍徨(ラDケPアYeOP 椄I>・テiハ=Cuヲ.G(,蝙」ワヤ(_何H%スャテ,粲佩^aッRッラT:セAス゚$莵(カバムルqョバ」 +タI*~HケSNS#扶Lソ八?」ワ゚(9蠕 w堋裕wタWT5蠕!-ソ」ワ眉タA燹ヤ/?福ゥ*Jケ゚ィw&5廢タテ:ホ Tガd**粘 htaヤNhtヲ:'.\"t%Iatァ\Qzn/ ウテタ2テ、K%c@SeヨJ022ユL叩嫉・Wナ、C悩ofDf5:ヌ\O,v>dt哩ツ」U゙[1*」X庖Uy慳ゥシソハィ|ハゥ<^ *wォZ蘢T棣ア*ァ*泛*洟t桧VyハァィM衲U棗*洸l慕ィK蟲U>G蚶U^ャy*浣セDケU迫Pnヽ ('!ータRヒTXョ ッTxヌ販ノウヲ穂ァf0`gヤTラユ遊ラマ.ュl霓pqヤナ枡・ォ*jシ fc[・wHznQvチ4oiUUゥ7ソヤ[逋luヒ@3コユヤTV*a田qeMル浬O]}ER1帰XI}ヘ Ouni仏拶ト螂ォJT坊/RXナnc} =%) X4ヒj*+=eネ衰ェレハ!+アKp。A・aA・瓣・A・羨・ヤメィメ陟メ彭タ)チE腱WQ]Q?、Fシ_6リ<ケaノ舜l 波ツlbFMケA銕核OnCユb捷ィtq%ヨи=オ5゙Lッ輪-2セオ\ ス緒OYヘRd ・匯ヲフSK」ヌミv{┬n=ァヤ[mャウ^YZW欟]ミ@ ィhコ檳敖ォt+[)[1オニ婀QWH叟蘿wg据 ンCI.技コji高ャzJォB。X-ヤミララ`,キpYゥラS朸]ロP_X_Z1(Kヘ52f衾K**=y豢 k=eh;e8コ)% xE9O哮・g 7zwツ=ヨxF^P西(4腸cテDt1サルB'ィ゙iT・批トP9iツ Z稔ョa1qD"leCieナ マ彗擧~2凉f・ァハS剛uテャホ]Z PラPf畝tE6Bムkィタソフ[ウレ「ユケ+低`P毟Mヨ4ヤ"唱峯oui}ナ*、「x|シuw$ケ5ル・=霍ツΜQpメJョxЕ#キ&/cォ6ーN祗pT9ィェィ.Eaユ5E定Y\ メ%XユM+鴎゙鵜=mゥ、~mュァ、ホ字n4シjE嵩賺ーUオ芙ルハテmカH[9ユVe+掌篇7v 'ナ^ェ飜*-ゥヨ4xヒミタサワb サキ4」險ッ+鹵ヒ=Hョ W「|llC'v績;7ヨウヺS]^娥nm帥扁Sク旻眤ネIンIツヘ」ワ]4チヤ0敲G、b*ラ ゚Iァr/烱]ヨsャ専rHミ簣`3|7ウ薐ョs^j'Mク壤6mョj ィヲレーkjス゚チ燉)A?炮#雍sJ+Qェ<螻テE;ト:」Dl/堆ァY<ニヌネ)N磊シ会ヌ阿カaqeE刄猩9ウウhORq6躅)6)$9モクー「<隷Q錠;Lヨチbhレナヤ0場椒ワ ゥ<>殲G シ、尭タKヤPNFRョ回eハ91潟ナ蠕燃ソナャフチJ(ラ場XD^%ハッラl )倥oキm祉鉐:{сeモm >qW8I澆ァ惡<眉安ヌ>!ス4:サノ。ォ翌nIネフ妍a省摯拆RT杉Y喋73}zf:n)愡ノナQv勞エワ翡lァツ「ぎワi%ルYE鰮クpEYyケク 錨蒟臈。ロ空)?")qセg鬩Qkh<qEュ嬰!/,フ嚀トナツフワ 艪刑ルナ8ッ8レgラ舶.nィ,ョ%"モモ Zイ 梏gゥ =wZ&b袁フ$イ囁))ネDD蒟c63;虹.1Gニ5#;ッ0ウUオ恪迪LハX5ホマvX\ュキヲCOケ!3S」巧ラz+V。蘢^恁シxメ ヲ抱$ァナy= ^CヤRF^v暸ラフェ惧t「゚ソM剿ヨ-」 feg囀壮YlEyd2ニコNホ恂ETマ尓Yh*・」lトY(%+'?;+ウ色テ横蕚9YルS0?3c5bマ。゙-ヒ\ー9!ウ0ゥMノ囑%yョ暮昼 dN) クッホ*+s Iq]p3 rモウK謌ュク zキ9リ2VAn鶸[Nケ* Tィ&PC迷J^u鷓゚ルュ冰ルル+|5ス ゚XaM+ 以カB=シ{5ナユXトヲ姪ハマc}B}Zラd∃Hホ81蛉鵯)fSーホVク曝ヒィゥlィB盤セ」 マNァ`+誼ーーカトup~0ヤ+ bレGタmォスvヤサ・ 胙同" 79娥\ムI*ヒ~ 8ロ8#vp&・8ワウdコ礦メヒフE麥#l?ツノエ躙繦()ヤチエU埠張コニ迪eィカ U杠ユア胄 #ヨ喨S懋ソ貂B9ツL衞ウ4oH; -コ.(|メ_ -ム色。+ヌナAI禳EzVW,t畔4゚ヽェ7ユ(ワカd: t1イ%:Nユ陽っヤP゚>遑蹌ワOlエ9コnMォウ1(mFsJ+WミネaF済ヨ*」サゥ汀PQク>vチェ権ス)@Cロ:kZ7ォ:ソイエフcメ- <、2湲捌u均 =a・ホgホ ートレズ遽ヲヨx蛾 ラナ*ロ鶸"i%*イ庫:悔`、・!3(0ッ:槻幣I0ワヌ゙ノ「7ェリv0e鼕ォO;X&R"ヘゥィ,/+「芯レo(アク鐃フ・hッソAセウ彪Qコ\瑰ヨ艙sjシ攀ヲレcォrTラ、徽ンレjSネ ォ5"繙ィ淘マX橡ョィCEt`マ"凾ルS粥?#a6e蛍.ミ砥fNフwz尋ミ=赫ノコ瘴Hラ,E7-ニ/訣゚゙ヨo光ロヌ嵯~Zソqヨo?キソ;タh2~'"/,。KY ミCv'棋∋>瑁ス ム擽$烋 ?JワイD=AF8a,Б<ク コ「!{$アチルD逗縊g 'コ奸眈 ヤBコム 拜§l!D~ィEw'&6ヨコgh0ワタb ナチ9%KBエ0LL!Lョ0 kR:犒1 y+逢l┬80M@D~s貴ミル鮠 コ峪AwAク刹ワ艇歸' 0幼、ウ皃リ6I罠゙稷でY ホナ)ト。ォ ア5.ェ bヤ、 *L゙0U+檮イh債6j膏'Fロコヒミ曼fc9ヌミ毛掻ヨ(Di?DリE拍育」 9jラF-イF揣袢ニィi!G唆住決 ┳」rsTD靹゙ィ ャQbケコーqlシケコ]lァ'ミ,帛ヒィオクンキトインjD-jニd Qト-ヲxj呰vハリウコ7Bラσォリィ・UワムマキS!bウ0'%&5Coマョ3ケ 詣CZl<ォlOyカリ6司緑Ne'ョ泊モw。!6A淅ウ_PNaxョ_ンB゚鉤ラ7チiahエヤ{P#HΑyYコツXKCe(ャrr錣ェPh+!鸛ャにーw ェ筺 .ナ・Yク{ョリWタxカツロー ゙晰>3 怛 XツウホpEチhWル8x哺テュノ8z,ヨv。Kbヒ7!7、ォウンホNセuロpツ佐杏ユLヘM瑞纓E、ケ^X甥トfHチt墜ホ>疱vチL銀ケQo^@Jッbk0g5゙トE{ 皹6^'x 晋RN耶ラ Kh゙&丹オ \7テ`)├x1ゥhテ!biッHキOR$邯ARHM)~ム;キキd拐D*沿萩其クh:A淒尸it塲6タヤ ]Mv;us・粡ク拌盃:喧ヨ\5z&k8zシr/RワanT。昨*y. チLトYトnA<キ$ワ潺MSd、ワjTノuX」)ecネ>!4z ケzM0ツソ(Zfノpュ嶮DQ秣襍 &G援W%=ゥM0ハ介ョ;Jt孝{nD }>斫3リB>燈カ硫2セ-リr゙ネ*#ャ?ニjVヒ歸+ァャホV/ルkvアヘラLb髢_l67Bィ"Oサ柞ユ,ロamx 炉A宗0")サMケngB`Sヤ瑟P圀衣]Rワ dfn$WのョlクルFHaラ「R]ルfネd7テLカfア[`サVーサlS嚮!1%a.VクOロ)・I.ew嵐尠fYq\fe繹st{ョ$3(kш嵒゙'`ーAヨXNL1「耨、FoュモレニΩ ェ」ネゥO成-(タカカZェサュエ;Dレ゙F}`jレ"マロレ~lホkソ ュ域登裨キ;Pdnェ%苗ャ゚>ヨoャ嫣ャk、キキY:アワメkャ・ヨKュ1忙c,オニXj些ヤァュギチヘ0ケヘュキ%`ク(3麩#Zター!= ノl/形升<SX慫栫ナ,CヌXヘ察={nf/タmEクux反Gリp拳ル)媒トヨアOルユ3カ挿ヌョg@サ吽ヌ~Aァ;pホrWクw篆x/釼r゙旛ナッマキ$崘e,C競Sミ篳|ヲク椏e飩W! ム2Zルチョ+リa・]!メカ6S{;:h鼡 Oo9。ャ 鰤ャ`ト。J獻寶縁0>< コアヘヌテ8> 鬢C&マ< 楽非\ィ錏pn=,ヘgウ>シ版qホ+リxワ ァ*v3ッe[Jカ拈ネ范ー=|兩ナ澹」_ホソ5 障ロ~MヒァKヒラオメ1ョキ4;フメjメrャ`F#Lk[W柱m)モーワカ・s7惓ュ7須,gァ俸 創k」>0オソウセネエo9Xホm^]h9Fヒケ-轜エ!ゝZホ^8滂ηシ < Xニ氣オ(\チ_ x請M8シタ゚ジJ榮CVヌ若聆*=サタnd_渟Wュ ー cg 掣≒舖.|ア珞5Bo+6Kハニ0$ヘマニ驢, kリ 3兼讐「ヒ甥ンKワ r#l%е5=ワ"Z!ク 1ツ ツHHRa0レニホT_ 袴ソH 」テu$0/~鎰(&コ莚オy亰佚棲l警*.點'g竇安ヲT鞄M膵コカ; 「gC$ホ+LQ0 4a2Lヲ「逢チta& ルP$菎B!ハ"ィfA0ヨ s瀉a\),ыB lチva <(Tリ5ヘソ|ュ{ソシ6Rカy淋ノw#/QシDjA遙ァミ}Фク亅チ-ャツZ(\銅'オホニ蔬ンLn@&ッG&o@&oD&oB&7!筒陀-ネネ茉6&ウL~c]ミo `綴"0Oフ.L}--ヒウエ '2カ具ワ\ヒ蛸ョ浙「マ<gG鴇ャォ造ク(U飼#・]ミ/.豈賭ョシ世」H1イ84qtGB、 }トム/拶ゥ合"qフモQZ殿汚オb&ャァツ・4リ"fチン ク 甓q釡、咏Sh~椹ュアmヨ5イウ;ホルA+5サヌVマーhyxロr>ムO#9セチXK效ジ」U 踞ムhヒj6ニ爾:ソM/エトu。%fkム屍'6チFXラ羅~ノオmナi684 gムIラッ ホコニ4}闕ェメVフ"イニ?.オサ?堵。株「,F)G」Z*Rト0Eャ\アナ鰭L^8_ャu稻リ ョナオp休l/⊥ナ+!*リ/^根アN禽テg クホ姐ツ秕結;*d.nヨ[ワヘニ悔イt>6E|MesナfV!>ナシ!vュ,サA|士彰ュ 6Uヒgゥ6キTk*Z泌lrZ|e麝ョ+J。ヨォ拳フMャ ャウ'アマセv 亭dZ6U彼黍「葯B縻z奇K:)秬!M&'!Sf刊P圦ムリ> Kィマリfw+ーfキトzqトンノィH柁レ「ネNt丙Jタ"9ミ熊粨X ~+ィ竢介_ネロッ(゚汰ロニ/イ@3v =ヲ!S&ア4勍)ョメ4払饕j傚鷂跿-Vュク阜クフテア~cュ_,笠゙禁復ヲ淌A0cekウ5ヨkャBk,ッ5ヨf+ ky4オ蹂カO`s`ォyエ?V lKFミシメmュヨツ|h9クヘムuョ藕&[N`ョ衫觧ホgョ(貅掵-、旁ンU・FE鯱ーX p>強"^J,^J,メ,J,J,JL)Hng\ +径@\ヨヒ1"kヘhワヨク ャY-ーニ[炯949茯r3函ティ・θn;ワoク5スクzホ〃イオュD儀+ム'Kホニヒ紘 7ン惓b;撥3U啾iロ3u7=リ2%ヒ伝セヱ 4[e{jカ・P}:J蹌サ-ワスセセ8♀飼コ叫ヤhws{d6嶄ノ.GA$ミ$%\ン$DI-uだ I族D紐wX%K、8ク\屶D8&%テ嶬xOJ¥・。pF゚K」'i cRモ・Iャ渠ホコK添_)%J3Xイ藩&J,S*bラJsリ&i.サ]*fwK虱キエ5I菩慎ベ西ワ)5メ*゙[ZヘIkH駻>ZZマウ、刷R騷セ^コ\'ンネ7K7;、Mi;Xコ?#ンテソ?、{OメテWiッ&FHO」・δX髏0AzNネ顛j・cツj"餠*5a偶鴫U:!ワ'H 終ァf#アVTャ>WK?H7Jソ骸、゚ト]筑(歙垈,ェ嘛ゥスヤキN*ケVX5p、コ[ ャ7^]+フマ>掴g゙ニウGエツュO *ヨ-:テ貌PeツeコヒDヒ拜ワ壇' 塀タXルナr/(謄。J竺ヘgヨyャェ」俐d9zハ ミGvテ9ワr倹テDyL宥Aョlソ:夬gOネ"咐ル?悉"レ惚レ志マアユ紅ソイw/ヌ-GYy<頸*O3!^ C臀#蹉|檮ォ9~ョカYkナ+メ+踏A; カレ/リ゙イYノV=勸コ袢ネ千A、シケWシX兎dハK!O^s裹6也}水,ウユ8ソウ「チaxK~射ヌ疽}崙踵ュコホ"u「ロ9(Pオ スaヘー房ヌゥxモエ/h<マゥ/ \j絖"檻x懿G袞!QFパチ3ヒwゥ=7nL|串エト8ヌャイ=ウ・P}:J蹌嵬掌ラヌw f」sホノZ R ]P、B U愾チ!8*氤cィヌU\。メ セUzツ/J/&(ア,\鰺コ* ,FqウA1|ソ墻羲PD?レ\凅_ト嫩ィ'~'l、\ 「2te$8謬隕撃he4ト)c IIゲハ87 |ワh?。斬\Nl}vettユハメツトTGK4^-<擁0U庫撫}1E+o屎Tキ31ーモSF'オ.ヲNt3~I」roタBナョ畆耄ィキワ^ルB$ウgg.ヘネ゚'リノ(m?\レcB wン 侶p。ァャq餤鮎スT鮎敞ウミ>eK!馮=NJktン2跫ロ? 孫ラLマv@ロkソO心ク kC滸ト(-*,メム 陵ミ(セ扈9、ネ豢> 孫ラ孫ァ#ミ>mK!゚)EL蛮カVX揆有 & ~ク「ヨゥミ(iエョG]ツ%{aス゚8ロ邉瘁ノッ:GォG[゚ヲ/港レ=ンヘミOツk}ソoュキK1Qrヌyソk・;Hネロ゙ L;ョ-渺?X%:「~露オt(ツメヨ/}p比ヲサ岫af蝴Fク・Q啻4];イ *" コムmニ=DuC#フリ ソ躾クカEヘNラ旁ヒJヒオメ泯#オ筈cP2d%4e 8猫隰L2 ",陦フ^ハLQイ。ッモネS a乏%ハ,ィR襪スr>\,uハBリ、氾-カ)ヒ`ソRO)ヒ畉・>Rェ灣・セVj'・、 PVアT蕋V、\フ)硫Eハe<^Yヌ燈+e=泛\ナァ(キiハ>Wル-ト* 方蛬!UyZ」f(G"。ByEィQ^蕃ナqハ[bカ丼シ#*ヌナyハ冷アZA\」(nR~w)委ィ 杁咥果ナソォ」*漢ォ間Kユ・$オウT、コ、jУェFハjy%週{ハユ^T5EホRGネヤ1Jャ:Nィ燦Rヤ JェZ群g+3ヤyJ:_Yョ*オj甸U]ェワゥ.Svェ別スj奉コRyR]ォ<ォ^U/RセQ/V~R/U#ヤヒヤhu:XンNPッウ 硅Zァレ O}OE_綵/エシ my) 艝)2マ%腓村 Du 韭-ミYス"ユュミSンユロ`z群wタDu'LQ.ネS|u7,U*u塾n%セg0a!ヨ。c^:ヤSuソUル鵬「テby牧ニ卮ノ椰[ソ[uxmセ1Y}X赫ゥ廐 韵XッOG2ス\掌]OW c0xPa z %Qkィnチセユx埼{h遇obカ14ヘ?ヲメjLZC>[ン亂%圦(ヘ/嵎キw縣「ホョ6。゙E=コ゚ 碎澣氛)隕> Q鸞丶>Sユ![=  0W}ェ/ウ+フュセハFィッ。6ツニォ浹\ 岫~ナェヤーkユ湍fgカMR・サ=ェ1カ_冐ー&ー5狛ミツル)ュ3ヘナ-gk]yw-追hス-'jスH-蒋-j |∵踞Z ソRnウサE~サ{咆イ;1菩.`ュ{`^慯_ャアl.ノイケ,モ綬Aカ?p。tdk [K"Sノ6俗 :\真*"ぱd;ミ}ヨA8D゙^q鮎==ラm!3$m2レ|Ouコヨ$1J給P]Ly先佰エ>枯bL{斬ュ1ャ'Vトh崘l゙サm p- Dm,(レ8ミオョMァ6 "エ 个Mレ4ネライX-崘h9,^ヒeノレb6T[ツ&kヒYカカuj+ルRヘヒェエ:Vァ5}ョレZ棗]6z!洫ンvコ儻kキ5レV~」v'ソUサ近。狃エ暼!n~Dサ料、ンヌ_モ鉐hSレ;mッタエ}6サ.ル5埓`レ@v-Z_.Y_)UZ~様8レイ1oリn$sセiサb、リbサ0ュ倣潅{ *0」ロ桎kQソ曠z 為察イ,糊梏゚オラ^be贅>d腿ヌl7未}ホ2オッルLv樢-屮}=[」トョム~オ鯆bVf黷4=7ョ。倬セ}芯~\48コカ>.エ*['ZD入箜_アソシ諄 )ケf ルZ゚キメ'Hユ>|/ワtKーオ」[胚り`Uヤ]ホLTラ^ 耆ロCムlk;^ィトソi刪ッ・X`ンュヤウχ=D゙^ラ=Dセ-。サ'テニ2キタセn鈞タ蝙o>l」ソホホsモgキ-mヘカロィmクロ9$ーュ渟カ撈テ_渕 u磊歙ク|s゙キモシァ4Bz適 Dヘ 魑ッ1wセニ柁_s掾、sPu:tユe尉p:糊テ`エsu'碑]タ」サF準偶(クT 7驀6ス<ャ}8ィ鋏Aコ'DYOf>桁テY>筑マ蔔雹~=?ョ銛Cl~T淌_ラ銓cス・/? 7}ュ獎/゙ヨ/゙ラ/>ユッセモo~ヤキ兇セKヤ{トョn1V@ェヌ齒Sス67Zキ゚チクン#z=Hj俘 w。cjvルd : ムムm~DタY聘ラf`.[テョタユ)ソウム孅u猝4飜<=ァ?ァ!Eニ鍜`ャCツランョ/&コ0ノ%ツチョ3転_!糶a危Г]゚"踰疳ラGク~@8メ#ツTラ?屍疉ラソ子0ヘ3ツアョ_市角シ7\ソ#懆w當 ?タル 颶1」0テ,p」Pg」p兀債蒜A2 ヘMョ2 害リhT」pュYミ個ュfA7 [ヘB浪クヌMgXサ<ャみVヘnォcュaMaアgテ.gッ]ナN]ヒセ サv テカ.a;{[ソPKΤエ7E.ハmPKma5antlr/ANTLRStringBuffer.classmSロNQ]gfzェワzZト*炙 T )15/ チ[JIH>DI_|ミК「OL|モgq晞エ惜={殷ヨ^g=゚|=01:nツ4C:匁茂ミ -3L#レ啅P訳桂望qヨDチdユョ:Sェ掻%7W-‡bユカ豸゚妙Bゥ\cF3 rン_レレイU軻5・・lュdWイNスjWクク峯R}レicヘPナrf<囁2X染C,'a}Fチ%併「ァ1鑞6i"Fb1}~sサセbヘV・フリワBesュYoLn 」aエ!ニ\&F=aD鑄蝠F%ロゥユウHロャシaュP~タXホリ%、x:I&KK填騏ゥ┯酉C"}ワ<0ナS=jv|癇@m >レ.メ\%u;m7s流ロ#""ア-d卯ケbpナE9リ 壌W%オケO}('サ腕}Nf捗ミ得\胖wリ7ァ゙モ「堽!2黯)QmbqG砕讓Tイセjbム\「モ゙Pリケ「qキEエ険」 LaC|`迩30&^ テ減jcg'W}j.9┯.3<"~槞=イシ維\スャ゙)チコ崇湮6ン崋KP/6チ2内マ-ミаハ.↑ュャ9B.DBス] ノナsコ+m 泰ホョセ煢N%垤ミf&リコン#c墳hj揩Gb悶X瓊涓OI7@゚、M#蛄~C`I椛」!リzヤ邏)w゙oPKォQオー「PKma5antlr/ANTLRTokdefLexer.classスY |盤アマノ&嬪XBツ+<1ノ'4ト色6 &QIツ&|yネ>ツf・Ul}カk[ka」DA|タユ^ォユZエVk布ユ技[%w贔/嫺R~?シヘ恙sf譖?醂9Y^>カg/ウV\c畛jアム危lー炳+nー!・F麪cK7越ウ嬪ー鮖Uz6 ーJ゚m"ン.l恐;Hリ遵ン).awヌンン"ン#^+賑'ツ~*lォ 羃ヘーVaト]シGNゥツwQ弓ネX頤K竭カ繽x゚?;>V?[蠢゚ム UzΡ苑dァ7!3jrvcツィ^萃カQメcヌハ*」FヌLヤーWト'租」 」フGヲZ衆 歇ソv塑~/ yi攻qサシMケワ穃イ縊Z|nヌ?)A褂>.アSヤ」2ルア\セ頻+p il%qノタソpフ思Qラネ;`yアc-ヨノ僚ク\、+Ь疫|テ竺{04 テ隣+}/vロクカB5k[カ_h|4\mzオm纒\」;[コ゚蘿[ェc俑@q03wト"ヘルマ゙豐剃ラルッ+#,#]姪並セワッレ^ウ 勧FヨV゙I 9ヌオJ7ュc俐、膵{]N(瞥ZjJ46k買2舎zアマンJm.RVfセィOt,オH26゚S|;FU-セFr 繦ヌ_壁ル」蚫[岫I Wv゚L;$`タ;ハオ畭 8チ妖ニョ)ミL姻ク ヘ2ァb j齬ケ フュk)ネシzヨ・ナa軒戴M鑽}A酎oョRqO4>Z當!Vワ幇ルナFルKママケレョδR,聆モ(9ク%GマhW亊ヒhs+7噴蟆穽蹟ェ=ウェq峻Agn3ヌヌシ瞶リ鋤Eワ糺q嵎n[S?n 稠Dbャロdvヒ4ワ哺キbト#サ$i餅ーi9歓カプcpo雪g槐<訌?マF絖mチ・ニ}チッKカ階 '壁ヘラB"?ヤ4NbZヌ゚ 迩 EヒァJ噌I'宣bL:I且mL吾ァ丑4モ}Yル。レ.、88 ハ0SyJ乃SyD4Sル*ハpSケ]ヲr(#M・テリ4灼mュナ將CFヌhヲチhモ ゚0HI゚ニI0Fun、1ヲ 緞慌マ4(b'2雅A$U*R捺K;6ノメ>6ラメ3jGY:揃oョe覇?rニー.hワ,rvマミ媒]侈zgu"w幀Ro SUッKk韵#ロミ・}Cス/ュyニvb棄ニァi:溯<>M3ャ衲G呂1>テ<:T鑢廳GQDヌP「箍P%「T%c喪タゥメPョF5U*ユ* JテJ ]藺伏 キ*G揀VU祭j j*vオkヤユXォナ:オ WェスクJタzl瑟ッQo畛ホznPq」峅3カpS1T雪J@h松L伍エヲ躰カB_pソnx+B朮「%Rチ俶1M征ヲ9&юD・覃2у喞 瞰キm纎蟆ツ権偸*'^fU~7ヲM=)cヤVdicxgTLツフレp「ウメ,fニi抹敲aカ6聞No%ャ綮ワ仕lチ`ワワク%リ界リ <<トキB棲%塁G・暼桎めdサ $nテモ [eヒ}x3ミL茣慄{BF拜"B4「ノZハル!フル音ニdCw7゚ャP$7Pネ、L、モM、'! bV ー=(H*也#旧ニ%、セロ^蚶戛勇゙テ?ウG孤gク◆チキ祠F+ニo呉゚<ソ冂アEーウ俘=狛擲+dツ捺 2a'蠧@1E)輅灼ホ vソチニオマコ_ワ鋭{孫>lィ乏ト。O}ッ9ハ|ヒt#補0逑G\ ャ(。D,「$TQ2V- ・a5 ヌU4i$ムRz|Z{チz檸ムX 5G6OTァ醒垢2塞`ハ)LツD嚮,ハ蔬ウp>eImタゥ%rj9゚YjgrjS8オ\Nmァ6抓ヒ耿frjウ、8オNュ;KlNmァVタゥrj9オs8オ"NmA渣レ"ゥ7S[伏9萢カ;/.チ#;Q|ワホ0ツ讒:掃ゥ4TツYATaT竺T´Zt ィ ィ紛メ2エム%陟1適?nヘWf]xrjs4ベvヒzfgT<セタ搬ウニロ\ayb嗜カyf;テlgrセロク抛カf;ロl1スホTマ(S#S'痘i 董>ヲモヲvホ%Qsx・メヘ74Gタカh斯CクYs4ツユa艫ヤ。9イ チ杤5ヌ4ヌホ ッ箒q}X垉Wナ+痿エツPtoGウャqIト8'b<セヌxxッアア!JD 5c,!袷檍鰆ッロ;f:fGO筅BK゚ゥ;イ%リ{a 鑁;ユソ_pz゙ |t6ケア媚ムEォア瑠&_ャエッミ蛉斎緡レwh#゙ァ!ンэ馭|B孀нッ|Kwメdコ脚コ・Z オメ6コ益0椶ヌi?「テエ寰PァJ、=j"=ァイ閇5^Pモ颱桧ヤ<:ィ贊ッユナェZIッゥK鮃鵲zCンJo9u葬rオYRハ」Oン滷ル峻x)ソ孵; レ! ァw1迦C・'8驛>ノュ噂マ#OtrC {$孳螽>9_トメ@>゚%テァ鷓刈シZ\RKXフメE糸キ肅cc゙ゥ汳>#+}IC+J。oh4独メ1ハWDナJQ刳」&On片"サ,R拉ュ7jaュ爽ソx"UモD _瀦]ラサイufe#Hq詩!ャ満$5@・ァ叡ソ[eカァ"x呉}ヘア\ョ緜ロ#gリ]Aト晝9コ#契ョ瀦 χ`JT#ネ。Fメ0柊駛阪Z>[擂:ヤlオA擯ョU ヤ&Uャ6ォRu楴SUィュェR=ェユオDTオ鶫Uァ゙Vヒヤ!>隨#Rン゚舖f褻冶啜痢ケ」モ"wtマ}恙冒ケ_n煬ソ >褝bシ!ユ ニナチJヌPユ鍬ユかハ巷蕕彬ナハ循ユfタj 6ゥヒb;Z淌鴣<モ 柴ァiqゥ(]l)HOリ!Zz*ハo$N アH){コ ホ絆郡Wqトヒ虻隍"^テッ蛻9穹"漫Tト8簣qGシ#^厩aF<#アrアfIナ九渓ォ~ネqo羣キ3[0Yンチリ^ナcラ2ホoVェk櫂M'.|sテハE2?ャ\l(辭%Rxフ6ソ」ユfdゥネネWPKw稍謝 PKma5antlr/ANTLRTokdefParser.class晃itラ柩iF羈ツDデ-!畝T8エ6"親ラ2Rg,ニカ@#ノl-mJ6レ!iguイハ4 McレャM$MOォキ=9iマi{尅-om}腫ヷスn3~軅ッ_POワhツI) 、5Zg做iクY 怠苹1馮イアネ~09ネコセノ膽ャ bs゚f;Lna]&ワハマ}Lnc蓖フン。潘 pDテ} w疣&hク1ケ_チ問A^ユf慍Ef浮ェ相GT<ェ1将xBナ*自P矧ァU<」禳マゥx^ナリ54%訖V鰍Wシ&6メdカ=的 鐶ケXチォオ暹フ31RfN`ニ酒ncッア"i、{VDルDコgT&j;W フキト3ノ、マ'2鰤灰ェ/ケ「1'続W (93o辨VSjサカC|}f悠瓮レ倖ァコフlサム弗・ァェェゥ輌ヨヒdMルl&+pEヘD什「ュ63樣I'リマヲqウ)Qi鰮fdモpユ'Hl畋ャQ7ンンfvm!3';痺アrテ4R鄭>XX楸ッ_V#ユE3ルクケQ:0ウ!メn#峠フnヒン:コ「P& D%ムc$ F+ンッ.0]dヘ曠.oメ・uトミ。#革ヌqBチIァpZヌV゙>: メq緬Vlモ>ンホァ1|]ヌflム:v29ァ<゙ミqu\ツ:篋I`ホヤ。罘靱Fウ執ウルHヲ#チル昔諒ニカB麟ロ滔゙ム.シァ繃|紊Rタg魏ェ2悛ヒRh、コ2IアソツッFヌoyAウg|1S>鷂rV啝62ソ旗俗疳g{[SSgク・eKテ M !"ョh{[(イ汚h4エ)BLクオ。ュ甥6) EレI|[筏\゚メワワト;Zsノ「ャ)コ^タケ!エ)D+ラv尸_{_ヌ.桴ut g%>。|ィ蟄菟xOqgイイメロ|3。憂ェャGラk喃僉5ヘ巧f6:K閊ヲ5Dj'"wBイ。ァ9{塹\室マ啅aルlヤ:ヤnZッマ$S苴イッ8^エメ9サ4ソエSタ=5。レメ BWpヲ芸シ夕ルkーx2ュト)#|}ン豸ヲ嬰加ゥヲ6kZ4fVMェ(ア邏ケ?/0スd 、fァTルk$ヘ墨▼桃鴪ンNM、 )カ)ヲU%ユ口フIエヒリTノラ詰サゥ(ュ*)ネ~ミk+lt、言フ=5填フD5テ ロ4~I9・゙齏ヘ9コウfヌ賍,、FlB9}ユミp・7、$廡r-h%5ZュC!9モ猷怐D98b゙イ!農斎セセXタU>0:ャュ暴ツメモ}nケャ HニGスX;,ーキ撹ラ0fl%齟シ3~+f=`gン'閏Fイョ#z/褓>ハロヤPヘ>H< 4モ艘齎」4ソテr<遮x諦 ヤァィ 棣?C放,2x{1H嵜@レ^$Kн勁_チx丙ラp埓??ツ |鞄}>拝カ&dシf'ウウノ~}|@.u8ラA3 jo%ヒ Z゙甄jヤ ニイ&セkヒqョ+モ隲# !#メ忍ネ{セ+。R仕xレJy56\3逼rラ*ミンツU,メー;O・Bノウ&=g42マハTゥTヤメ^Oナ*Bョ/"ュoミオjjメbマ萵9-テ-ラ_シ<ネヌq'2ヲ,#U&掣 エ鳫aワァ。ョZサa=u晞\ホ躍Dレ^]Mミ Kハ2'フr損D.3WネUf5ッニ\'o2oフロ訃>ケノ|@:d>"3汾O兌鵑@;ヘg?PKヲ鴫;HPKma5antlr/ANTLRTokenTypes.classmヨラvロF濔ォPスKカワ{キ鞐W"PイモBLハノ,・ラ量E 蕪!=ホノン~ウ ヌ檻{NソI1ワ5)、%ナ、ルlエウィ僧FI'タ2)ナシSア0 mラン0ヨ媛ゥ)ヲl+P杪YMワhSt職kqエァBヤ<ヒ)NRtX蛎テ ,ラ!螟/クfhコ・池 工1モ{YW鐙ュ7Eェスx?8&ナt/gqcGヌルZ;゙3!ナ\/、q\者vwfR栢[ヲ忿・顯VQァ8-ナッJ紅0纖マ~E囎bゥ櫺-J7嶐hサ泥ヘホI1aョ?< ou昌 ナ!ラ」ム「C榱a ;麈rX害ュミSk8きqヒーイ,ヲMSヒR,亳ォ;換レeテSZヌ}g憐ョ$'、倆E[i3境YシC'ルVg;ゥW)z岸C+ュE咒、モィン*薩3R^1|wgルCi忖メn7戮=Lラョ9マョ6゚%坦ノ \dゥ「_齋Vo6h(ォヨ(xYッbォ^Eッ錆ナ"]E'犂ヒ榑~ 9チェ*Z:滔xEEス3ワ }+麗汗ケ<^d別カ・4oRツィクセ[xヌ貌ミサワfuh!Cカモw2 ケャ|f#Rz/~篝Iテリサ`-羸1J鈞0ェセ5OPゴD?ilAミオzムフハs7/リミ~、逕mウ天羞z」I 'ース>チA+nッF{串%i?虹ミt>ッ.レカヒb@ BCミ!1 逖G濂1x忱枦桙ァ冏澪9x框^d^3@剌Bヌ湘'楼ツァ楼テg佰ツ逖マテ/ツ利/テW弍ツラ弍テ7聾`愉;{「ソ"稟=PKTミ7クツPKma5antlr/ASTFactory.class氷{|[U6Mメvレコ6[&::侑チ`Vカ瞋nPpー,スイ・IIn堤DEE!*Cャn系c0アU睡( ゚゚スキi跳モ壮マ9wセソwn檍マョ,ト゚<ィチe.\ツ蓊JqヘリY3ョ貰」s寛モk=(ニuイク淆2、 7ク)n;ンクI棔貰3 nnv盖97>/亶齋ワ&テE8_雪誉塙_低ヒ2ワ鯣.yw,セ"ウサeェCヨk粱 7セ)zン翊キ<クケ0ツn7ネn慕B?"ェ遁~チ8 ウevPf秣壷#イィ゚滅c狃uaLa坡Q_゙ルJ鐓ロtイM-皋X8゙ラヤi$」セ・ 袢Tュアp*・0=χワ"e舞リャヌィ+チ誚Gx@。メ"4「アヲ3ツゥ孝xCL轣ハ昔 ヌ更r^k「ソ?9ツホqr5「2ハ、 ホ謄ィアL!゚ラーVチムJルJレ」q=4リソAOv 卆Iヒ)ン7ヒUセユ妁支Vm玖F4ァモt#Xouテ$6) -6Fc= セvS&n&瑠ヒH"モ#&エシ2ル9#I=lケレラ0卍,:ユヒ爨哭Iセl2」X茆J)蕉1}qツウ 洲ャhィK,ウッI速ユH$セテ:暸シ議{4eXウョ、N7:テ廂8ホw甓[ゥe2テFb~y2゙~テEツ燐s ハS谷ヌュ數鐙ヌォ慘jワ泱ハ |マJ} ゥGh3xu_FZ薺セ)鵝uj鶸z2)^pZu@a、<<<揄チdDo徽%(!f[8Bn_(、セ ゙彡。ム功、3]。v$0鰻スヲ磊xBテケHハミゥa5ホヤAエk錙HテVミf蜩ィヘハ6⊂:n閘コ  ?ト.Hテ廠CテOモq4Cエケz_8カ<ル7リッヌ 「ェ\cュユiミ鍔-畊エヌk6靱榴3Q鉗x9ニムpレケmレツtケヨー;\_禪~-テ3xVテs2Flrヨkクハl」4<状eムォシ(ヒg]xI楞ユミ]テ9ツェネbe列;メ 。ホレー苑ツ5シ?0b'ヘコLvF6v霹ニDヲ壌ウL甌$ス叢|≪蔑F)#剰b。W*-レjセエ黻,シ |囗n侫ツコヘhxk位O2ロц oj3゙メCソキ,Eエ {」棣p$「S4ョチ慴0、 ムWシ翊ザ]X嗜&vlヴ&庁pソ%Sアォ>ウ;5Lヨロlメj築b杵悶\f蝟ス5勗5 i鱒ツX"ワc欄、テ,ラ曉?。昵=゙#ネ応ホ<6ヨe ナYュ面&ヘ 8cハJYアコt1ヒQ[42ョ=kハセfラテ誓ヘ闌n厠ァ{カHシサォD賞W「 ア4イKj、Y:y}Y攜・}キ)ヘ驤kュサF+yノチ異ヴ&v築ッネル拝一sDハラキョカゾaA#9,;カメロ「ヲ穎逮且r゚d・Zフイaq_カG」ツヨs覽ン秒*f綟T対)Cァ/リgメ7ユh「駘鎮メテKヌ p6U振,9シ]7テ、i9、楼カ>ンR<。p6`ョホノXヘ謖スツW's5}pユpノ燒。p7ヨpャ"/窖QHモー論*KL、オ6メ*"褥Y= ?0マワ,8Mヨルpァ`&桝<廾 ⊂ツxス/ ヌ0ヘEン18F損{ナン50m%鈿V。烝陪2G0シ!x[Q゙}<_YV5AF0sェルMョ 8+PfKリフョノE9ハィb・ェァ\ リX!~ョ#}'.ニJャ邵ンヲ=リ~lウ$ケ倫 阻ウウ樸(j:ぅ({尾稍xf升、ィセウ琅ホpレ~オ ャ.VY,オ隗%灯. /ョウァhK眩能N EンチP79\zヌP*サチ読焼bル綯nハKヌ;盧zコ ク*]ニ蕉+ィ鶩 p5_脛⊇ラ」7穿"堝ヨ0ァ?&廃埆7峩mレV伎犁ニ_3~ィ>ヤ8 ュアヲlチム8塙鏃z6披vT0l柯V<*jャ "蕗4wi<ク躯サM $ノ7yヘ3Zユ概ー↑ケHメ3QヲaTb初ア腿loヤニP+P4 O斥@せ:db:!&!ウヌニlア1ォ$ヒyX杭X僕就セヒ虞8*0j粨6ホ|ロVフV「, eネcfSTiHッ}d=ネ蔵0:PハヘウタD棘贓<"yメノf父gク_茆]/0レ^"z_Zタ輅 ヒUHオ8フTw<建=h稚Yd・K等.M-祭#8ョナY$]sw~sナ-!Tキ8ナ麼7喬命、Tdェー$サT圧瀋箟b=,`喟磐cJ8セハッヘヤ Rセ右シュタ゚y穃恷wX。゚eiy夙ナモ聲ヤV[ラロbル|Bモグ\ャ-ヒ枦I65エ,+9幸d 蕁柮ツ」咳ャワ丿 甦ニ戳ェh:4Oウテh諄h餐)モ1EPェ冪ィj8U JヤlTゥcvS[~ソ燧ワ FQG、Sレヘ=ウニ4;。。_謾75鱗ヌツュ襤SQョ鷁ォPRmホ:゚配カ`. 劣dユゥ旦ァ僞-7ヤ""7モ1K畋7シァ#フJ敍qン&7wーアHJ)マアトQレu゛G廚鐐;*剖wn 61fdi7・ 。「ムレ嶢ロz犬8mキョvィ。weネ|&・:E4Zz9ヲォィTュ亊レPッNヌ"u燦gb冕ヌ ユチ/ソマチzu.ヤ ィオlテ分ロヤ5n:ヌラLスX>9・YIQN、Hサ^Z「6(-Zル$df ウュ舩y・3LtQネ&<:%" y-H洪ネ戡"胃9悉ЧIェ0hb$m蛍v4ネy誉qリ\モウr「1]、0SJウォ接F暦`杤Ru)ル\ノ維IYY嵐ケカクN喪ユцf控ラu0フ尠ツ カリJvHソ+殘鴉9nM_、2d]ツ,7鎮Mワル蒙}朞゙6閣z閏潦キネg[fレハエク?ヤpA舳c 1納W^V巫j_ メPKLヌ}&J PKma5antlr/ASTIterator.classuRノnモP=/q塰MIKロtミ:N,TQ mX、bQ1ネIュ猝リネq| l韓t摘影*Q樋弃X`Y:wzシsマ_゚~(!檻的$嵩0僻4f$フヲ梠゙\q, 5;^ロ&7 ヌキスrモオmウ饉ョモ.ッラキッ $\マjY斬 ヒア5驂ネJナン5F6-ヌャu5Loロhリフ$ュvスモ=・00;覓_瀉鞳。=、麩ヌk又q柑ェozz・=繞。b*RHォP綜罧郭8ッb ヒ*イUqヒル>?ラI& ハカ盒ハ{|U 囓Sッ』ヒキ_=7ピU‖ニ儲+O-{W`*゙Fgk廣ンjリ盲ツ"吩ホ#P、zz獲6 2#Caエホ|ёJ。碓HjZa>ス,イbト,/ :テo9験ハ9 鞐楚。}ャ!ヲt GH=ト「]オbwU垣。ヒ"1rZ゚モ牽7ニ\ヲf璧サメ=ムt儁ュ a椚.0ソH=K8稿フbEー クェ(k}ススOト蜜i。lq(6・ D| ュ エホ 萓FB&%ハ ?閾'KチV9+|*」kン、ワ[lィそク-。ハ[翆'~PK2M#>トPKma5antlr/ASTNULLType.class酷]oAマ]、-ヒメZュZuェツメリ庚H1B采トd)N]vuw1麸Bo4ツ熄2セ3,Pカエホ9フ3允エ豆(THR┯縟?N糜寝ャ$L碚2 k/EハV3坏v; nラOBoオハムbエ「Utモ5Bモ2 ゙te:スZス(トヌョn8Qッ%{TE8.」フミxョロョミPキ9メcaカ ャ顔^4゙ヤvクュKサネhニ3<ン跟セーラ{mV;'< vk「aウ混爲テチhヌNOzチミヘv。賍リ/zタ\ユgヒZ3堋ェlン舎h^ニUユッォ4loRv 9官タ龍ΦeメrヤュワTョigI3?mァ滾「6>ェ"T゙@:ョユロf緋/{仗%kVラn}!ァ(瑳U*2T^^箸8%長ア屬,ュ蠖7慊qx郎オqN ンア擅(a1+ャ5|セムワ/b?ー@ひ腕ホB/QハCウ$睇2ミi夾0_b。7キ タ-Cモク+ミLn:ロホォ,G8ンヌiミナワt.溪、7樋sロミワ4ォ%キ麭盪|]U ?ク];ヒ)フc靱ワ襍ワ3:r8W痃sUQリトォ*ニ$趾hGミ宰リ+-コ= {ス」襭-tsミル'スユ。ケ`(゙ 阮バPK+イhv#PKma5antlr/ASTPair.classm苔nモ@u憙Iン4H、4襯ツ魄ィT .メ !.愼 ョァ'-HエUゥトe匝゙ラh牲;ウ3ウ゚x75z犒゚vLfン1ン~2ハ灰ヒ]ゥ-鯣鑁M佗a,2jク#0Oidリ゚:氏サXXシレ糂p゚トkp櫻簿香Y椢?P≦J%川[Aモエマ;-ォニ6)ャ9XR・jqziE.w孃ォBIOH%hUz"Okエ6Z>-q遵チ4ソk耡g! リ,〉⊂4g\ [治9!eリ4調ネ。UfrXQwHUh倫。セ:EエQ!*v堅エ i桧WXミ9^ $郎 薗蜥。mJトwマ 4$ッィFホg゚DfF+3ヒュ*$キフM'C{QE禾オP粱PKトDuPKma5antlr/ASTVisitor.class;oラ>=NvvvFヨイフ篶F) 淨シ凋"蕈惧ヤ苒フシb}ヌkヘ0Fョ珱メ「腟キフ弋F~hHS~楚VbY"#D'B(柘マIフKラOハ塢ニネタネタト, フ , +薪b`PK聿喀PKma5antlr/ActionElement.classmS餝モP.-M嚀-,bQA@I Rュ,V#ュク p)アiツト撚゚ト'ッホ@uニ。マMメj∨フ9゚=戻~0細 0船?tdhJ+M 臻ラp]チ ワ鏃 キqGツ]攤'羸炸328+癸9ナワ-ラ)>デシ3ヘ徇:藹ム,ァ徃HZ蕎コm!1c9?ヒ0ャヲ聟^nナ3ォUモヒGマ端甎>ウチ_tキ9C預9|オVン膺ノワエI"了テ=モ'モKロu+ }z!E1H`rs妍図7,GXdN+R)コ5o/["コ6エd*wiaョb*ホ"-aAナ"ニU愧絵XRア*O%*杪Uツ嚏uイヲ臑p揩モMAヘ y%[G鸚エ祿T」K詐&ィ 。 スヤ。wtトオ髦・[骨譴TOtセ9B輪]Gャ3dMWュu6LR0ッヒPKYT}ナュモPKma5antlr/ActionTransInfo.classuQムNA=キ[レmYカーPA-桓]jタ丐 ニ、。 m46-ウヘ弾lキ[咫I0(ツ拆"袰3w9g賺蠻o嬋U畸ュ9<6トタモ2Uフ`ヘニコゥマ <キアa繞。/ヒxUOpトx控_k閠0嵎pヒシ縉柴7ム糸オ{iゥム6a.ヤqャソdコ'%。6RQ餐ヘマ}ネンZ'Rror<蝕_ bリゥホ5uソ/號OO庁ワ採x}g炉Z。ニ鬱ィ7ヘu,9ィa゙チ<愚ヒwオ゙OツP&lvタ-M^ワV綏メ-m5vサ扼Kャ<$, 僻IツヘンチS米ノ欝?レエケCホシw.ウ2劔痩旌ナャ偕0.鶤カチ9(ーホP 磨簪ヤ 」"」ヒXCU]ヨ$Z裙オJfセ椒苺ァーゴヲィxユ彈\シルケL磬{A)7チ(ウクzPK_rィッPKma5antlr/Alternative.classuRロNQ]ァキ。テミDケx+S、  "「ヲQia`!テ≒ァセ ユト_M(:モア MコラウZ{ッ3マ勝fpマDv]ィ モ骼3yT1ォ滓 ワ50/拙鋒C`クハ宵ヒセ嘆(HョrOj=ハ|聰C&廷Oコュ'~韶.ケラj隲8GNユwくj]E^ーナr>・S{フカ0き.cネツ8ニ,倩カ`。ロタeE拿チー@6(ヨyEッ7w、ヒナ.rヒuyャ8」供?GAャ3錢!ニ"J1「/ニK闖q1rYbJ/F4 エニQf6マヨカソCリ「花Wfラ H3゙ヲツ$逞c゙Xツ'/E,カyvメriZセEョPl囎カタKVエタ、}の/罵サイムDカ歌'&'ネV塋ル ル取 イ元エ0G 4ア ワgu4Zコ7蝦POKラ\&ケ-窓ッキ)モ ・# ヤEフヌ7!90ウ竒藹 -^_ォu2ケー=メムdニァkt'褸EMPKΓ7lYPKma5antlr/AlternativeBlock.class・Xy|Tユn2{^ $$\Βa5m51Z藪%2K|3禹[エj+ュZュエjュQ禁 U。VャKmンュカヨオvウオオオ-駢゙シ$橡(~悳ケ逵{g>イ@Μa5ョ「p・ォ\- セ([|Iタ累クヨ ラノ痙_A7F9~ユ魚ーヘッ硴そル梗}ォソQクMミ;艀サwx1゚pァ週イサヒ砂ス閲|、;^7aセチイ>狠Υ>菘. x1Y遜ロ搾唖スT濂ツ><ス<チ>筋ハユセ'橈モ罹チS|゚ΙK/樸9Qex?a*~菽 シ(ハセ$Jシチ+シニk セh"嗜LG }」゙モン肯i3夊^ィ魍エa&tt」然佯ェ'メ1ウア3_ェ15v4ノ煬饌#ヲ cTAオ荼ハロ怺ヲ喞i栩Tsb鑪オホニエmN徊猪eヌzli"IWキi堆 ゙ワヲヘ メI6gメノ・鼓、實5=FbユKnj紫」ン ユンFツ0エa旛推ヲQィア~1 ッアュZiクn. uP鬣ノ娃PルMァd秣テlラテ1beヤア=ルk$ョ゙ =i崢!<ャ拡H7レX#穐n#扁ヤ譁Pv6囑&ヘャ 2@>・醉泳ノヒS$ィlコユ始,呷ツaユZ禍{ =BB殃鬥ア"i6ロ1W賄腱」ンヤ;{)ouラ髯碗コムナォハT。!ウjaハ量ウトQcケセ<5レ緋坡帽三 ロ粢vu_旨Zヘィh:Xl馘N_oKfフNcET弔:O%アdgoシェ!Cテゥ8MテZャモp摩ウム。ル+`ス]@X@'進kシ}]テ馼モiチ5シ75?u- ?テロn\テ/K ソツッ鄭レEア゚犢nNテFシ」磔xWテ@K+ ツェ瘴宕?ヒ{q$41rTv(良減践kォ゙g>エm紫1 ヌ?儘QY帝2,モァIO#Θチ+#ムiDびDーヲ。。!エhq}0廬ル」畿J ?エメ!.ユoqマ_セ5ニt+D6Kt=752+・ゥaq、ヘ テメc)ョ :Xニv%ヘ`携R饒リ S4率J替買UNqハ・p愛「mゥク,9y|V(ツI-燕z, ヲ2aヒ紀+オ宸lゥチZv瘟7ノBvンzMケ壁ュシ竒。TeトイK.nメ&Q@ EB?ニ要ミ・[、Df|lエjT3疎ニQ5コ:シ!nムケサs、 )22r゙Lr_mSK_9&マク"hカタniS閣h+K,MシFNラfKロ孟X}ルモ=Fkkッユ.0(xPglオユコ[8ヌアw 5-ゥK素HE/ネオq|egァ゙ルC、ャ" ア。テ2」/ンC&ル剰;6カ25メコノ/ul9;抬fPア;:\9R Z頽、iテia]ケr9!ヨ展シ[^#RhカaR゚pテ()マヘ搶マ6\Gツ8?m}・ gォ、創キU*Pg羮 去<、9籍紮雑セr~Z嚮e竕窰喫アJHKヒォスヤ- 1>ソMャLルアリワ(Hv甅L7ゥエシリウュナfソ;eトsfナQB節キォ _フA2クャオン>氈k]cッgレ+Gヲオ枡ッg[xk襤彡!cヤZラロtコスオモZP2 サxレI| ラ3kvCユェ番TCSォヨ1キ<イ (ォンmュg7ハPaQ>JヌPYヒ ケlnツ%エィa"*1 1~娑釶テ151僚8ヌ「Ky: X#ス2マメシヌヨ^j犲カカョ~7ェOユァZpQ2ェ^Eユォゥdェ>Μマ& Qェセェ」ーエpw使 () カ鶉ミ裘to.TヤeqリネS>b嫩Wチ:」清<櫓Hス; HWヲ.BZR(4ツLQメ怨、gnト&幺H濃ヨU諫2iJテ箸進-碼テ車ノTハ靨ヲ杭L゙∠<ャェマ央禮ヲ`Z}ノヲoQェ衆ヲpーヒYuV\x>涯ヲ艫ハ~>跏コュtムヤ砠カヨNヘbニ)sw!リ8`Sタア3キ。=獵儁ホコsfeq>フnr\OチV鼈=亠v7呪ラdイ隠~&C( &R}?f\サ07猯「aM?ニ金ニmミDp#)サ&G獨[JhR|ト、ナ,I0Gヒ况肛ッi:Siモc.d!c」eエ6ム」淋フ+q!ョ#マMクС.)g,;\オuャョツDス勲犁ト\щヌミOaメ_N+飴o狙RH魃&3ナ'メ_Cュト\fiEzu31フ5.幢_税QロミR/」ュ}叟F捕ツc絆プィヌハ&九(繆kハba?゙カ$霸\bY?J$n逅トEナ$nオ$.カ8レ$V:〒ム<アU"v鞣ヤ!ル'=ソ Nbヲノsケヒ1籐u蓮xュッサ襄7ー。゚ネルニチw3ヨ-フ媼儖ロラロ゚;キ湫{ナンリ旬4ヌモ+lo>{ナ<チ9%ソpHスマ!珮レ輟n俸r[YェLNラヨシT pZ塔p1ホjgノvクqgオモソd;& /オE飜|28ニUェv゙試シロ」レ5゚ソ<鄙c・B'ロBセ3ニア~Lフロbmqア>ホソ、}c5皎壘hロ& ク`f+禁ワtW珈CqDv祁aユ l渭ソ9ナセゥ魎&ァ e餡Fz/ナ纜8絅<チ!$?0bOウK<-x務}/p槁サワスC%V}YU甼U細TッォxC揆7U;ェuxK%ォ^QXサPKハィ&nd PKma5antlr/AlternativeElement.class}QロJ#AュN&遁ャ悔トhヨ儉ト<、,ネ「"4{ヌmツ鎰臭:「艫"Q粳カ/ SユUuホゥェァ辯G"Zァ_ハミ9Tラァ・-遞チネ鯛B1ェvクTaメ゙菱$Wチケリ E$、レdT腓 ルス<鈷>」イ隻a<dp!_髴s゙ケキ週ィルュ@ェ橲ー歴(篷fウHq0頚"鶯^偉8レンTHCm|筋ラ審_ィ透ス&f/ ニメ使炸タ;シ'BFモ^ウe7ト功E G09サ∇b緡ョk訪.p)O4ノhォヒヌ(#[レエモタ;fョiYハチ:Zホ疽トtリ「・ヨ=1゚ケ」ヤ B\=lノネ!閧\1トIK\CMW]ソオvO餔3チEモYテ,[j蛇}ヌ#,XzMkィThハJヤ|ニソ・ヤユ-k狸ーモ ]ア fレH汲ノョ`キ゚ネフメ愬Wュt馨城ョツホcィWワとhトヘフュ喘PKGア眦dPKma5antlr/BaseAST.classナWktTユN2匯0L !s% !繊0$ィネ$ケ 等03Aォィ_-ィエ5オj|A筬>タZユhウU綮mラ鷸メo゚{g& ォ]+ルg毳ソウ_g毳ホシモマ経ワX峇眤ヘBnq瞶!ィチmB~鵲~&ワ7ソv碚碚N7ヲ珸ツン%d乱ン.!;\ク[ン翊/ワ_jクWLu ケQ侫ナッ5ワニxP$=BメーGмTテ 初ニG<ョ YエWネ> Ojリッ痼ァ\Y氣ixZテa マ畜$レgンxマkxAヤFテ喰% /サ 刺8ェ甼 ソ扮ッ 攝ラシ眥.シ蠶ュ騨テ テj痊(Z>/3贍7フRネロラトFm0「ヘ遭QテU0邀`kヤ w<イホ/t1ワ雰k蝪@クスワツS袖"ニ+2K'-ァチェHォ!ヂcqWGウm4(ムュュUkぃV3ロ摶H(dエトi.V.n奄ワv#n-\メf筌FリT^」瀘4F「ェ#ム鸛クun(、pム液・;:C衢9汽モ}剖+トァアセ+茅フQノ劫ェ ニ佝皓ノメ@4 逐ィa$2I\m(f斷PzpW ネ彈芯I*ハV」アク晰メ闡ナ弛ccシ>リ竡メ+ (Pチeivワ\ムiX徑エB6ケェHィォボ蝟あチM0イエ諄壇o")1S5:"菰ョ #'6リラXツラシメfL鬯」エF,?Qヘ*]即ネe勁エ。゚aネ+摧8h闇+xャU"ノゥUホV」ナ<*%」シ+&PM6カ'Nュケ$ミムノv売粫ミ.Φd}Wトb搗リ,観]ツ/ナ3ホc0サリ-ク喙U<6ホ「煉器ウクシメッ=2ォtモサ較qンチV\翆uBnr=ョ玳呈オL$ヒ$m aU%*/Q+VテマMk$ヒPハ。5ラ:?dt眞釉イd墨Z9サnqコNルソア嵜ナjカ5瓸ヤィ H-ァヤ7ワメfI.・酘8ャ薩ツy*u 斤,>Icle@atiユォモOゥ-$睾n庄!モイ&キ]氷9ム"ノ&};伊>ロ妝6Hd5}iいコ$ lHャヒq後e!2ZDホ!ュP{3マ馳ォ佞mマアQヲ坎{9W餌、嶌a幃獨Lxア康_ヌOX゚7イ「カウ「neワニリハ`ェwェシ纏.Kpタ%3*p)潭}瑾ヲラf@飼ラ添`ェモ^?@ッ、ラヒzジー,泛Wxpユ熙劉r六跣En{テGチ(N単ヌ-}h8椥ヘ鰌K軒ニo咒マ:群MzNロ嶽<メ<沺2マル6セ豈ゥ~m=%ュ雌h算Iュ「ヨS変ィ綮$娜jゥル"ヘソエミフチミ,Rセニl鑄夐ヒ t動Mタ佯 Xクa、N揖:ワ(!齔アム#&ェ3ycウ>dスウL 竟nゥUフt奨d}レ倭WTラオ、ユ4葬 妖^Rニ%~Jb維K%"A コテT9i6l/才アムM禀:Pー揩踪テPモ剱t秉.サはネZVh&y{」oj鱶蒡?ニ:]Qリヤ「レ'Qー ユルWノ@メ「ァ糜ンィ/朔9鯵諛i~ハ^渓 ナ降゚xsH-ル賻 {羊ヘシ*ユ2sT5ィXェ「QユU-ツ:u!:UムW&]ハ剌盤ンfNKセォ/姨B罕ァトコq0膜;゙紀チヤ律>コモ鑵メ。ァ-c@-Cェg6X]雇j9ェU#緡ツVオロユJ<、、8sオ傴c>ヲTチ43モハ` モニ`ヨ0 キ*^素 ロ]a晞ハ悵虔ヤソO-ヒモz。W公釦+''遮慚&%L%ァ0モ臀&勇N39g。7ミ E゙始庫sx;サqZ^殆V7扈|gJmモイzT。7ヤ砕蕎q8ノ署ロ スi薨@ャミサセz孫ォ%Dト`w&瀋,クヴ9?+ マKxロwセkエャm夛翕來費テヤT」 iシ?a{枡,畤Uヅ愧列ャィス!ゥv鈍8逋OJ}9:;」 z0ヨf鉈丑ネIオ-\`?ョ{漆愆j]Zイ}ァテヤg(P渾怯7丕>ニ\~iY靭セLー韲L^jタユnテアS(ケム|*l/PKルコ央 2PKma5antlr/BlockContext.class}RMoモ@}豆ql岬 数ミエ。・精u!オU*E黨罧斥ニFョSウ@鰍トq襾!fヘメTIチヨホフホシy3;サ?}ョaテョMエ]カコリツカアyリチ}\志t├¨ツ┠セ3{:Si,ウ鐔嵎 BYx<"ミ。偵!。aSュ躅ォ相3二2乾。リ髷JノPェ(VL4P;9ミケ%テK-。ユY/サ!ツq竒テLメ.「ケ影 コ`]ソ 8kレ{寥モPス鮫gォ。8S浚゙ゥ<5ワXテm‐4ョ;x(=‘T杖ェ<ナ3Bメタ,7aナーZニ7ゴイヒ羯\エ]ソbBホ\gМ{ク0ム唳「$> 「ムG懺.I┓ョSs/溲Qキ?゚s2iB゚泥E^<$冗畷ャイ,稱Q働kbス}aΝウ,醫*ヒ:コOQリ擇8ボyホoロ嗟観~CT'(Mクネ)ィ|+ユdルト嶼カ]ULゥ ワyヒuヨエ~PKz覡伶PKma5antlr/BlockEndElement.classmR]菖P= 螢[aEナ(H|eウ 4$/v_シタ Tレロ、)dラ_・ f」ルリe慷W"2モ9gfホヷ}モ(&・$(+D4樞YUマuシ`ミ\4c\^0トhsコAサ"ホス。>。+VS|j惘哺 >|LM。?GN「a ゙vケ惷ラYB、<悔史棒-モ薙+ミシ+eQJj<~EjX蘗殕道ス{0dテゥーYWr Hiォン枸PロZg7ーニミャnン9シ&ェヤI "ミヤ鍖メ (ゥァ#挿@ム#膊ォyf5来h_nオ汕コモル,a!Oqイ&Ek~mテマYKhKトハヨ/ト」セ#ヌノEイU駄膓6jタ ル耻゙PKK尨」ォヌPKma5$antlr/BlockWithImpliedExitPath.classQロJテ@愀ム41オレjシロ齠M・セ旺゙}qロ.&6カ・ソ%(あ澀曳モ"XPYXフフ劑テ|}Pチ嚔4ヲ 陂1`cVヌ*シcQヌCN`Oc<゙)]1hQ:ウU/逹.ッユΥオD亢囓Q'nOノィムコ、{エ}O4)ヨ溶蛛wケ &,,cEGチB&}9ユツ8,%\eX駸ソy2リ}チセ/Er騏E「eミqユH?3d~4{タ{ヤRsメ嶬彪、u、iW#ミ`タ、率ツ&S」ム擂j )Bロ)ソ9ホF梵メ゙=cIコ'H0J]3ネ'S塾V彜゙Hス`?峇e1K6$ハPKチЫツhZPKma5antlr/ByteBuffer.classmRロNAヲンv2ミVィU<オ]ーEトC「&Ub0&゙ケmg肛vZァサF.yチョIー$夊スo瘠ィl#mセ廂Oラェx鐐3(:$ョjKクfャn爬Cイォ罵ワイQfHIユ州Bュチォノ^ュaP ッ!X*>aX(猷yノ`ユ{チ仁J%昿ン孟/シV@亥ヒ Un垉ア-。ゥ″ スHキナョ4ヤA(カ#゚コjp萵rフ ヒアfャuワカQ螽a6llrワナヌ=ワg=+n, CムSak7ユ;50腱拝z丸-i"Kイlレィv撓脅ヤO黨$"メ蹂龝ムLウ jj) ャチタサ桎'橸D8ルンx4WR8=ンH{セP3コ)gr_罔i_8瞎フ流ル9腦^$マ%?AレqOチワオ/HヌY3H箪dーJ ト6鎌阮;Dメ=「5奏g薫'1i G~a78ッヨO1纔^Jィミォ」LKー綮,レ}-サB仇アiヨ錆オ;Og!a/PK^モロPKma5$antlr/CSharpBlockFinishingInfo.classu設Nツ@マB)EMテ埖hL食BB絋JYXキd)\^/|ハ8ロ16况:;廂|{ミDUG$(*ルQRRRN賃.>チツ宅<`ネg鈍l1i 驫ノ9Cfツ要タGト7pヲ フbHwロ「7fリ\ス^K鰺使/!~ 7クd6Nエ?「nヲ ~ウ|ryg=齡s-ヒR}/・テサョ圻uS[ホッ<゚yz1・ノMe7ー煙 $ 鍠H+1臣ィロ"d?;C'v8絲}ZRWィムCHィ卿a5ヨ5ヨ労&ヘメW桂ル|3ル モ兄+D6オエI"-"FZ%Cネc% Vクワラ%シ坎┷3ォ*Rm郎ヘォミ早(ヌトnヲM\ケクoク2ケェRサー難qw呻qァシuD草リ'ム/1 藻リ/q@笄トC:tRー#8ェシcテ#ヌuエbTテ亟 ・N(5ョ」)eOJLj來pJテi∧Hヨヒ」。xマ%}j+ミ4暄ワsロ杵~゙YフqGw,9knヲh匡ッL゚r:9ヌ[ネャキ2,`ィ`-ミTワヲ\ヨwN.゚OラD5ョ」*zfオ神rァイ*チmゥ ッ)ゥユmヌ'エ_リ⊆O)_ゥシR Ju %1オjォ~Dayル%セ~。伸& 、qFテYモ浪[ヲ"")佛怙Yヘd<.ネ`゙タ蠱アXフ@'コx坩タvヌs浜燬]シ.ア趨uF`トY[sス黯P サロ'程|wEqIオメlホシTZ5キオサNョ猥.ォqヲj""9ラ[o民^塒%ヨ1ホイツ シォQ范QOケレMO オロ u、シケ劒ン b!>タw5ヤZ4PF):e來Hァ( ・孑幹@9iユュPム ュbH+ R EロxロシC トlウレ?@ソ,」Ёロ4゚a3嚏ヲIミサ?B叟ィfヨ7ャケV Xユ;エwh&顕cl:ノカ枅ッ謇クホセ衵{医z 枇キ I|ナ セ(凅_ラqa>タミエ2-3。ムh_ mマムケ朿iEKリ*a[跡フヰxK祕^mOBb ウ鬧トFサ僮ョ!セゥ思g・XMQロ痼カシRWンKヤ=ソ=トサ8「種襁Jリ。(レ欽スエu顯PKソワゥネィPKma5antlr/CSharpCodeGenerator.classフ}|Tナフ撓ヘ・% u4B$! ムP、*ィク$$ル俤Pト.>,{ AャリタルロウgWホ几nノ|~eン)gホ9ufョ"鷸却柁ツ\b「0ム丶)髞・cbcメIホ>悌dwチ、ォ巡x7L2gヲ舜躡奘タ>侖3紅^ォw:テbメOX踟楚|&モ セッ耘!6[(椦N<フノヌ、@aス! O#HナGH>囂ニLモ|,>ヌ||/簀廊タ、甅ト瀞キ4沍95?3K溪#%\n覘ナ稱i陣先蝌sー諛`ffNラ|35衍。墹ヨ|取s5洫|ヘh~V=套リx聡蹠`ラG篋渉でXp4セ4_「y・マ堋紹シ 」 簾2~5ャヨヘ立チk-ヨi^?B7h~lゥ竝括qv厄ォyIラ?VhセRU~オ}ュム4x= w「'i~イ讒h~*「y"シヒOヌ Lホウル、ンサwk~=W4?_ 4ソPu喟、ナ喟「zヘ/ナ.モXrヘ嶮H3篋A+0JヘッJ」j艪濠o総ヘッユ:フゴe詫ニ⌒y区7bヒソaキ獗PVLnCキァ;メネ.~ァ誣aロサ5ソGVャアGヨヲVヘユ|帶i~ソ`オ5<ャv5?チ7」>L$$オンラ 5ムリ柁&4゚珂'}エ3湟i|ニマ ソ 届偈ャ賺iOヘ湶ヘ_T%ヘ_ヨャ*&ッ・ラ>:I膊ヘ・[堊(ス#r鑿53゚ラヘ?ヒ醸 8聟髦怐ノ劒「ソ5ヤ?テ>>ラ ナソT+kヘソム[葮2L6#蔡4ナQナ?i3>ムWフM゚j玲サラn-xLXZ0-クB ゥメBk即O逆-l-:・ムODgLコ`メ渡Zd顋"S駒Zd疚4:\ウ+z*俗D/%z+ムsA D-Hシ#ヌタS ヤb*1&@メC⇒Dセ蘆堤聘"`2フヌ*D。テオ。ナH-Fi1Z1 ib?-ニj1NオッEナZLミ「D級ZLメ-&kq・ZLム「L叫ZLモ -ハオ8X気 E.モm3婪絮トf6セヘムb.ィS1O懐Z,タテエ8ワ'界Z。ナ岨・ナb-蛇"-冒扣|ォ:jQュナRdF-吠Q」ナ1Z,ラ「V:-オiムStャ8シー-咫JャDェッツォ8N5>コN硝 Z怦]揆ワ菟イァ九オ88M況リtq&g"eマツ>ホFネタシs籍鋺トy窩-.ミ磽j&トナb揺ア^規オクLhqケヘZlミユ虻R境エクZ牛Zlメ-ョユ:-ョラbウ7hム「ナ校ワ、ナヘZワ「ナュZワヲナZワ。ナ抻ワ・ナンZワ」Eォ[エhSbォ>/EDキ"iqソhiーロオxD季オx 'エリヘタ,往00、Oiエマ(ャ淳マ。@}ヲナ?銭湶-^ヤ筵4~堀aシょッbつ/^ユ5〉8Pニ慵ヌ=o`&「/|{K狭オxG仇オx痾ャ"。。 蚤ぴ,>ヨ-ュナァZ殘_bルW|ュナ7Z|ォナwZ|ッナ鰾G"/~メ稟-~チラ_オM近オC?1/-vaユJ゚檀Iiaツエ膸 -・ ヤ雙煖ネ4-チリ%モモ、-;)ルYヒ.iヨpル渡Zfh フ/サk刔マZ独サヒ杙f羌ィlル[ヒ>Zユイ泱峰cw(r茶エD7CN会 「蕷-s@ネ\麓勞eセZモイPヒ畛社r、魅エュ湎ODアQ偲dLニkY*求LJエ惠$-ミrイbi)J[ヲ亡エ,モrェ盲エゥ藉Z>ュ3リテウZ>ァ?オ|^ヒエ|Qヒ龍|YヒW織ッj>_&゚弛ぜB゙メm-゚ム]-゚モ4コN~蜃#-?ニ粥ho-?ユ3エeタ=殘棒"フッ0オ9娯lュ戻碯{L~タ>~ヤ?Z、袞Z「蟇Zヲ襃杏OTサィ狠苑iケKヒンZ9E}ハR +?tヲ、V Lィ4ュ|Z・ke+ユ)ヘ 遞ホJuアIェ+&ン0ノタ$軟賄)ユ漲(ユSォl・z!゙Jゥセ能ァU?-ユソVエィユ ュユjーVCエェU桟ケZ虔ヨO`ユaZj5\ォZ砦j之」オ筥ゥア タ粧! ァXゥ >VァJ0刪ノ$Lb.爆ャヤ#dЦラG舞毀Y挫ェ癌柿-%エケ4X?yホ\アャホ ョ咳メケ)\Sソエエ) ユ咄顧タ蛋mーェャ6Xャ称]ンフ劃+テkKマ4B誼JイkBkキrb~y} モ・2TWェ/[i Ln\ヲ、[\ホャ@c.ユェUj!GラFFホヤ6ワ.チU蔽HMィ~イニミハzJ:EeFh~M`I-ヤHッljl.g7皹nHm縣u`mィr9玳ユュネホ嬲ウHc0モ 00gエM霜レツヒ"X砂W+k@Whm(レ}~ホhェ[l、$mニフf:ッ ヌ釀激疇。レ*@コ4TjfX 籵菟FV8cョソ久eィカ6X嘉 ッゥkィ>~ "ゥデトTスエ@]pNC芦 ュtホイ@cテ敬劬シセ*クェエ1]f鮓I5#実,サyrB L礇JXN|Jxiィ 逎「ヲ>闌zョC_ェj3bN{ヲノNLョェ鉛W56Z 鋳9.囀ヘ]ホ)ニホz{Eオ疏c} Rウツ扨,-5YeUMF Ka5ミ5P(Xィ]梭アtAMmUeア*e ロ。d2ンΔV話.隅4ス>hy`M$・褌E2ハアI壞dN0イァN ァEァ・チ意 2nyn*}ラ!#jSAツヒ(ル/'Aク簗Ay}uィ8@Q@ィヘヘ ヤqRェZt黒ァp髄:~4ヤ*エクキ祖}Щjョ髦停ema゙ ,nェ1ェrjィム([ユJ/lFラ#ユフ.4トッ8剄ォ 氷UGi Rケ zr AEK ヤWユ「&トマ沌9 チJ&+Zyn肚x ン39ナ(33テR 檣_オH ` Csオ@x.hfJ%蠡@ヌDjオF^$乾スヒ+特ヤ{D0u譯ル三ミォ}>uャサ 3壙ィcn5レ慝uKBオナ ヒム@}`iミ瞰nモ忻襄+タリ;ЁiフG#*ョムZニZ娵ォ抃ケ樓trMMa、ェチヒノ ;ス。絖@;3」5ァォム6:カ_CcィX゙sャdャ}cハ-%CRルトTコTハツ劑\JFMヲ5>ユM4"。ミタ2潺莎x垉(j!」U慨kホ弋メ渕j(鉄セ7ヨ>9蜍:ヤ燦・xラレ(*裔イコネ8曰 ィイ.^乕Nuセ6 *ネt]耕9pjI VHpよ6テFオヲトオ +鼈ニクマiワレ4#饗3S欝;'ソA「ヘヲ粒┌BW鬥`fS、。)メ+hfe5ヨシ哽絣 `im vlr恃]楯Nヤ附R」シzF0XAj1汪゙アZ八=。イヨ}sBM黒チゥ5H梹5Dwヤフ1a!カキル螫ルfMlヘVイU6[宰qlhアヘホbgロLホe+タ犯a6ュュJユ岶cルェLMオユ4u政ハユチJbォ 5X6 ッj #!\クミV3リカ圸f)uィュfCgj字ォヤ<[ヘWモmオ@,屏+luクZhォE[ゥ山jアュ伺ロ7HTヌx5$j ;┓スョエU%@fウlUナ>疾&4シ|ヲg芭TユカZハV)オフV5C,リT]恨オステ9カZョj1@smUマv)イUCゥcmユ4Wavシュ"8boルェIューユJ怫゙5ユ%%5タ「s"f 6。勘_ ヨリjホDVJiーユj ;;撈aウ_リッカ:q汰ト歡ウス[n,メ,l "ナカZ?;マH*фミ'j トハ欖゚T[劑sCトgdlLュi GJ浴#椏幎|3Vbセリ-ケ.タΨa`ンq U+柊Sgn(S憎普レ0「リ56サ3像'ー]カ:$uイヘe詔l-ーm>}ラ#@8ョO ッ pイルゥ、゙Ke・N&>フ Xァミ,GaEー~id`某ホ禾タァaマイ[ュE禺v\#3q:NB゚=S Qyスムオ:"賜U矯レ 榔 ロ Z a拳ヨ,,,鉞ロlRgB簗ネb (>mu0$0. ッゥ沼LアナX Lク4ココヒ).ホE 歡ォsアyネホ鈬ロ鵤・ヨル"尾チ。・`ェVsラワ|?ツ(c ア_コニ>ア臙M譜'瑜O,4<セ]ョiP|Dネ3Tヨtョ茄ハ喚?チ曙08i~~アヘ.C。Wdォ緊`iOテソ8レOァ戡蚰<+ ヲ&ホネ康ミ9{6ヌ%ネpユ・ミGイjォヒ推sC。ツP、U} n]ーハV@^ソulウレ,オ WN=\iK庚ィフ歛;チO ォ⌒))ム1ッ「ツ?ノo、サ&i7*モ+0ケアセ 脅9コPイv駮テレX準エユFゴ HPキ リャ@ニfoイ嬪オノpコムujmァ.ィシ`jr83=.屡悛%pンヒQ$挫jLs 5D7[&4*夷m?X_jツ_チ*Q@5シ鵤ーP 」o6x;@吾Y <$メ゚ニtロ9ル#ィ1ySウ、虐虚コニ l「%@~=ヨ攸ゥ:ツメ4[シЖーVョUラア草ムケ-チ爪リ` u)9,Zヘ*葩゚リzp#+k@`5 ッWj机ヤ(モクオ i NU`$タlVヲ0舟. oBB ォヤ゙P%[ィユ3チ{3iヘ% Q\勍\}p・zMリ(`蕗マO0憲トヒfォT強nT7)uウュnQキレ6u+L{ 9l0l@テIワア^チ秣+」rケロ 8#ノ)9ux゙簫&マホ綬・y%6餅X<偬ョk\朿キ」zソ」ス マKゥツ肇g砥e椚D.u7霽H緞ソェ%qエ 勇LX瀦rノB侃PタVー]0XW;$;4 ャpV皋%(ヒョF 」。!ヤLヒ枌OSBlユェカtls ホVmjウヘカー6亳ニァaミV5鼻諦・去ラ-_lウュHX+フ綏u翠メLYU <ラmu/"n8店 frH`嗄 \≦&ァヘVM;3コニクS「リ_3-<ム况5GBッ0Fホvユ3cHヨ:ヲ2:ナP}jソ3\t\莫 t蛋タEウ.D浴ぱレ1y島ム$#1カレホョBG X4続ォ0ャGリスカzmネc"椙事」@!、ZO!B遲ナッッリ麭ト オテfk5 ヲDtヒ昿スォハU モ ンレュIリj'リZh惆榮キ゚ムサz冴Sc岶g瑠麒遣朸マQ2俚 コef。フニキ bbエエ゚旺ユ&\dコsV#チ:\qGpナユ゚眩キEaフBァ.エ-qレ竄ルlァスカ祭eN{/ワ[゚・1)午答簑) ユ¶モタ6~'コ 欺X08ハ畫}ク{テwVchE 45ャ=7L遉゙ソンC恍Rwワu<マ^$釈[aェzアル+豪O%ョタ厶ホ`ehゥウ@Nェz@揺タナ{0Eス8 S.斉チケ,?yニワ館ゥムH店レナ。Qィッ[ミpナ離ャアq\ス丐ク:ワC[ッqャ^ャ-bwレニ鵞ロ1q声YB[S夬邇u/シアFロyコ.T゚qcユX=h|ォ ブPZSルムGキォ毒,D,ー%コォ凍ャ佳「PPMuナ\ハ竅uP/Fロ8貨Tzニツ@ウツ)XUナa)ァャ誤Dカz]ス礎ワryラモアユ篇゛ヒVoゥキシ÷.逢bbNhWTエkl斯ラf糶痣ワIp雲Sホp粗5紂y2寃EケX5ホ87&T iN#VッWj法VE|bJハU4--怩餝ウヒ`リテ誠ε喃-%裏トA,S倥Z埒ァ@mヘqAyN咳 ニェアBムCヌユ0イネb4>qJqュ,7:kf5゚劵-Qン尭%>゚wレ犯Wノアユ鐺磐ニ"5听?i`∀^cヘ枇凜ロキカB}iウラル6{ッp[疚栽aュカV=ァヤwカロ丙仍榮aコヒ=9リサソレ}羸cLャ3サ<@pyGユ「ヤ(ヌUカQ9%ZFT#bヘ:S|Y諷ホA2 オオ拙2a@ク崇ムqハ<媛Oミ筐5涓Ws/>貽ス{u.ヌナミ「6'怩&v @|餠ヲdリ^恃d、p/゙I;キd^Lgj9a/ホト^シ シ或コヘH里R8 笋ヒェ=yR.ッ8K゚)・モmm」 奏^ユ;ハェ*モmン bwンcai6Qz;[>)w{lンE} jモンテqネェサ麩`|チセニYd3フケ"xヒDエサ#レワ?l"ェ伉ハタ}Aロ-錫ョウ9支ァOMミ。N愕Z襯ニワ製-サxI bA瘤aN^n?ーWヌg疥 {ルfッイ凌-授Gチ*"折L氾〉H7wキ7イ;l揣ヤ`e3ァRメルーi!シ/桀ャ2[hEcヨMウノ■@u%」チ瀧{〃NnZZレ畴ツツ)#<リkjGv覽c_唳マIスp[ョ @~l4u關cjヨdソd栴\AJpタeモ疳`ゥ劑|ラ4嫺娩CN"ユ `3ムZ哦・{レ:[ツ途リコ7D渕情 I殺キu?ン゚ヨ~カハヨ接ス\「C晨ア、4 ゙茹★zPロYnナナ惰:ニロクモAー|ニノ藉 頂Y0?+Y亡 トク2 =オヲヨ;;゙9説ェ嗔トQ5礪 ]SUス゚s`| 継ナuーンr3ルvcC^ 旋ンヌuナC~tネ7t'ユAゥラカSケェササ<ュf 1フCf午z@コチ'リk{Xツ9h}l4 WLYRT埴篋Oc黻ウfヒΛ8&ラbfホケ5ッ`コ3ュ戌ヲ:g+モ苻-(3サ)taネ4HgNク¢n2椙k槇Mt。@ァェ梍1)ハN_ァィv「1ヤー (]ィゥmjト/K懿ニ 惚マruMk7GD<ス 誥 イsIIンゼクFタ*s)溷ト`'<謠o\悪*`紵ヘミMンョリケタカ貂殫、ヒオP繪X$b鏥竍゙9yモ+kォrラハDァヌソァルpz,タケXBォ\e弃g&浬ノyュ鯖4%ンァ&隹D 7[チb]レM牴寧3fN.ア:Rテ胤43ロ戊;リシ3ァヌヘ荀6"ケリ冖,6u* ;レ。%ミケO<懃3Еッbィ゚%オ|榻ア4塹NFg」-0ヤ>ソg$mv踝(霏\、レM]ロヨォニaQフワ~02*攣pワフ'「< リ刹Gハ\ ャケ!<ヨ`{歿ミヌナ~ヨZGノチOs}ユ訟コYzIpY`E セq§ック 'ツア・ヌT・8aウ(tス滕閾ェネe9クャ`;]シ1」フヲ狩dア#レ ソコトチk`タ\}RケWC雁ムユnZspワ池胛YHム鰰ユャ%Lュw截BセsムIGーア゙ム~矍=q゚可図5a<ォ5ウ}[zQー1&ムク;ヲo篁&カ◇k胛Sハ7mq鯲)e匡箆(=yフル援yフノc`嚼jケ"ア,N&Huq|F)HナWラ善;フ8+・@zO.マiゥ {キ){矇タqッnNK陸ヒD`タ弟]8 #Kツ贍」M膩mセ0_・箸W壓瀉At87jl\ンネイ 0K桑:UaPロ\Q゚テュ? 晢'7EBモ「由)咥ヒpニ 8テV0iホ[/j樫。.リッ5D秕>ワヒァ N)n ヲォテ{祀@`ユrフG> 諾e舜ヒルT1ウThウy似eョヒ)ェゥ5kj台rア $ コ7a・チ[8XnJヲMqrャリp瀬ェフgス朞t :vhtj*ネ).pn1hホ5フ、kェ&FPKzGc|卵ヤハ@cスス、ゥヲカハ8(トyK遠ホ抂ン2Sワ{_V徨6食ウD._ナユ5アユdrル、枳99aqト卻^D帰ラヒ0ュヌ%]ーエァォ埀 」」!.Pテ!bカ9zgェgサ /ンy"ョFLQァ・猯フ 勉Wヨ6=ンj窄U^_ェ_コ ヤ姻嗽賀ウ叟nY¬蠱*sg"wムチ(gAホコユ Mvェナ=t釵cVシレ9鈞枋v瑰M( W6ヨ4Dフ クD3ォAモ腸ォd}5qキ1uIシテ親跿I|Di@シサ&リh6u矮、ィセa4゚哥穗キ@゙s@G.ユy7h$レ「5ユ5XW化ュ(lフtェコ貭沐gvソホ)ッ{o}ィN9モapwnホ[畤Aスリ3N*ス*-ナKケナ zL%\Uヨ-~xxO1ホZテヤFミP [ハ輟^{cヌ&0鬩n\殤粤(痙,ワvJソs=砿Hpu祥/Ce(ヲRy*癨ッ5x\ナbi坡羆U〕ュ& 9カノフJ6}鋭レシ02ygDl~ィ2ー、 リmn*レネ棗オ。pS#v凾11 (`%羃D謚ュ[サ7z=r_$ァHハムマS円WyホD 3ヒォテネヲ]q]ホケ」?~ゥtェツムサィ「l菟V:e匣倥hィク!@侮K ォり>璋k偵s~/;ョ 4x7u F匠ト4{&~d_4ォアワn`ォハ-ナ棡d鎗"ロd( 肩{O<レ]_襁+ョ 鬨Wテ(ソSー釶ヤ! 4エsSコ:7tツ3uモk齶ツク桶背厶u%.p」^sYo:Lハd:ォ゙Uh而5'^セスx5ヨR牽ufEナフ密ョ笛チtsヌT^;q併コ=_Wテ jwTxサVテイニwワU讓p#ヒコdナnマBF淳O%j攤ヒ1l+。)7kj穹紺kLn ホvbp袖ネ\v夜sC[Lw・サ娃.r懊8u昜'坎ホム綛ホ7fQ[a)込ルツX!\tマ)EDzテ」c菌ウゥンレts[U9モ#。yムBト<飼GBルャ 。槹-mnLK\鳧5シチ0Dマ3ォS律ナB+e>|Aヨ%v+クェnr鰤3p ン+<;ロ$ミラ9=疥シニン-幵サ s3」[!ョ.IY Nヒ癨* τ{sN tp軻8h`ヌ背ヌtOス#;z=オ。J;w琴トe&肚・ヤ,袒ト gテJWE・1奔シ鱶3ヲンe@7タツ,Bノ&/茶7指ノ咏j$x」忱ョaヌ帷 Dy蹄<ル}樞>Ou洫ケマオtvy樣>享m樒クマs }p1c酖oハ.w犠リEbv <}xgナw冓G:,xフ6竜fH7タッ5 マIy[ ヘc[ テc"'ュDm!:囑ヌキN゚メ.Aル先、'NJI價:途棚ヲ到臧2I!YFF鉄ネDr*泉Jv赴H!アd莊槻m、 >ロHW^uォロgj廚49rョ1qン。Mキ6柘Ll|ヒネl#ンcu&xン:r1`エr7z~2マs。]V+鰉Fゥネ゚FzRメLニタK6%ロIッPワサ策)ネk#}ス`馮他く、AC餅O9@宣.寸.植3m#Rイ》ハヨFc2トiZ нx>m#9w!、`6$拿 oW掻d#B6掃r ル十G簡@ヨヘ、五Df騒ノ砌RIn%オ膀r,ケ岻 ュo@ワ p/>CМ,A◇86w倔|倔 @イ&exニxi##ロネィV2コヲ 枋瀰ソ62カ"綮ノヘ$;/c|)ハヒ(ニV2スLク沐タ箇拉mタwュ ンュ菎$7レ%╂2J!:Sス,レyRm*T曼L<乾ヘハcU」ナヌ^ォ遙5WqFRナ*ャ1s ヒS6怛ソ」萍/9Jb|[OzCコ &署()&Oタ 雀膠$YD栫i~4ネ膓r-y洞盛ネy飾J゙ァ メ|I>ヲナコ線娵前1 !_メノWt-ヨート5ョ\シ r!炸v{迄ン%';ハ軈2墹/w腎フ仭フsニセ、0記フヒ輓蓿"棊ヘ。`畄イ("yミ(F哩、 、?Y~ $7逗゚ノ$傳vyv90螟 ヲ栢@59恨撞ィmyュ;フa乖ZエyW「B「囘0n濔酊サ爍イ9ョ=シ#SHR9*受'rヘb蒐笄VツK0m&]jo"換.E Kタ[ ヌ□. ヘ{レフヲノ'!`HVモA]ワ_tqoX;ネ靆レ逗]アゥj&= モ。zPF1,鰌 ヲ#」:^*"=Zノイ{I :ニケワYAv墮%Hニー0謳ホ4洛゙曚チミ ノ奏ケ"モム舐:ニ o3F゙ムT準^齶Ig@$4l仝@Xw\メリFツ快レ(xン(:枦$C'エ葭Id8=ンN(。ヲ゚ャlカヨiY「)$ュH$x[頴+広チ゚ェ、oタjタ*ァメ+Zr廚ホ5m艀,コ<Qァg懆リ♂カjz Lマヒ89VODマH{wbホェq?ナ"c+撩Sヌ*レ`ュVイヨヘサ呪ダァcホィ6rー:+ラXユBz8サ瑜37 eYイ,uHロ?雫g9リッuーGツ"リ,ユFホ.メy酘s篁Q囿IカニキQ峨ヒ1マgィァt^カ6シ0レp]エE姿,ト享ヲィミワヲ%ゥl智!セョヨ7⇒RズUQテ渥/左セ,・タVヒ :t ッe"エ懍G&楢!d ュ ミ$Hg代駘「9d#扛Z蕊"4=ぞB$メ」チィT佃チ|G膰コ*。>レD3飩:式。ExZJO、s)t>=.「gメ%,ネ9璃C/閻az1ンI/。ソモヒュnエル鶤7Xテヨ(z・オ?スハ唔ッカ贄斎bコノ:某cュラZヌモヘヨゥ#h7ヲヒ;讓op嚇ソ6;@タソ ヨ$ソノノレユ^タッL!烙 xUケz;ル8ス`チトnレ@Cャ「 ッqリ*懿k攜リー構牋ソヘm艷サ !カト5シアテy[ノM=nn|レネ-)-贍=+>1(NOワェ6r灑イ「サZノン;ノ薩yOネカz$ャクナ凍2モSレeン思tワQ[QカeXuホネュ エ鍋チ u゙m&>$ルNノ0ヌH゚驩}耕シ-左甅Dサク/ホセ?~掠リヒ-覡ツyzケ」。賃ィ(hムBz詁L?暃1 モOAS|z8Jミ!ネヘ8フhQ+ヌイャB戯[メ*キメャ枢nュー:YュョヨV7z+モコル麩ンfーレャ}ャmミiヒo}l イ>ウ[?XCャ_ュ\/+マネヘョ_梦]ヨ^セM鰌=乏ロ|<ルソ{"オ蹣-8E;悗B5 カF帯ヨhrク5e遷ォ豫ア&駐ュネ餬)ケミ咤.オヲ1゙竡qウォ;寥fホvヲテホス!ノケ{2ムケ{ェ捏瞭:F[DXモIヲ5レキf悼ヨ<イ謫扱タ|ォ喰ォ.ハオ泱ァSォ]テ竡ニ(gィマヲウ{ェDソElホ0#G黛ヨbイソ %V%9トェ"ヌZヒネ 吝03+DヨYヌ!゙貳袿 F諢槫作9キ&脩ラJ^h#/ qハs冷班ヒIM^ヘ6ネ蒄>| ^+ゞ 4{テュ*「U゚lWオ5Z擣^v4Z。ソ0+Fハ弦M視メ#臂」ュ゙3J*=マ驅I^ニaA騷-ノヘモF>1ャ怖Tトウ酎 |慙[ノ'Eシeキk-レイ%早"擦$ナbチァゥ挙f8`Ub5ヨ*談ュ&ウュネ替王ノ:劈`扣ホイN#岻モノ斎ル膀r殷yモコシg]H>ア.!゚Zノッヨ?(Zi]E3ュォiカオ鐸キ6ム}ュkhョu- 罸hゥu]bンJOアnァgZwケンyヲコ吉<悍」ホgョy ネOヨ= `ュ、況エカ沈ヨスI槝A@ナ t懈/RJノ{Wj四xszシ'乙Sルレ/貼険竈巧Gト罫 0gt ソF|VM聴餘Nァ地@芙Igk;c=jg)オv )2゚zヤマウ$l=GV[マ涛ャネ袒K腴eイルzE因テ「ヘp畭ニ レ。q6VD#4 ス絅h?%Dh?c。 5V:レ'7BY烙ユ>BナAァ,哽諌0ミハV{フT^カ関5ゥV笋メヒ`7ウFkンM >+ ノキMウ>%闘マネ ー_9ヨ7;メ`}ー?5odス;ケツソg"w3ルツメノCャ訊#涓 Nvア}(eルTイ^エMsY?コ@緊@`ナ Bマd9"訪ッcyF飽og 6>テ 鬮l8剥_ア琢6ツァ.vキォ掾<ホ謁杓+ソ7{ コ酢,wン陸BYtQフ;ミ3テ譽N麟ラPユコDオ.[v襁~2D>サ塰ロノ.x聽起2ハdキ9カまBLョ僮I\ウ\雨ゥ[&・ョオ+h」1ヌレスロサチ請奉シセH,マ0レ$ウチ&チ&>l2ネ$綺卞ヲ茶Y99LーCネ速,e3I妣壓。86岻eネケlケ-$竜Ed#;<ナ3VEセeKゥfヒiVOウY因g t( モ。」リ コ?[ 3qku}ァナシL*ミイN孃フェ?ソo保ゥB」レァ碕シl゙w+M[ヒタ埓帑ィアオDアモI7v&鯱ホ&リ9$~イタ:mJ9 tq%QイgRホ O7トキ耄l+エムNvユラB仙5lヤ'bレv惚クcuコAv_ミ,4#n搴f。rヒル。kノ=Y oIf6マテ1瓱_゚Lニニu觝ツ@ハB%ロh臭コwュレワ士q5バ鶫p $mュ淅ヘニWノォフ戎 =zネ団EヒC`イ$m%トォャLレ7〓Lレソゥ絖blBヘLラ@陛ョ$」リUdサ囈ー$タョ#'アノ駘3YョツヘF:サ仮篠nヨJヌウ6zサ櫻cミCルフ{録フ杓ハu9nエィ染スヘー8ワ!#1eOmcケ\泌イカV:ー儺タcミ虻[鮴ヘd"ツ=)gSニpQ レ$SX垂|ミtR蛟晉タ帋@欸~:、m&杳-゙6@ m2i.*照ホリ苹= モs<フ|X」チ$*ノboxh/佃゙!#ル{、仗O`6昔b麾67i`_5ケ筑C6ウノ晧'r/h|ソキgSГw@ャ従ロ[クラFロネ625i[粘Pモ鯏9z ハe*゙聡竃゙革zA+-リAッe{ィ/錵讓径ィ2。Ho|oヲ<ノョ 舳=bチ-ッヒ贔鴪V:イフフメ@]ソ鑼軾メル}ヌセ淺填狩 :ャ瞶ッニk]シ-ヒ、5m倆t径;!zハ、オmエョ π2i=^B/繦AワcN腆ZS躪ラッveメニ(ヒ"a '瑛ラサネ;b}fメハhユ畑ォー+レ靆<}゙V{:-瞽$эKv-2皇岶5;>6 ベ'u焜キ['wAモSR9咆ヤ事沾P4ァ゙Z輌zg乖ワFマLア:棚掵5z6棉l瘡oャ轉ャアb=LL=セ癸z>:>\ミu澁{n詰ケ洪h"墳。嗹フVロ鮗"Cwレイuスリミ)コ洋bxオセ△畑ラ{慮ムヒ$ォgノIf線G対ヨ署栽サ?快ケ駲;ルリF 窘}0マxヲEョク(|イ廊zeスjxル啼ゥオ額2pオsP {7\ミM亰抖オメkレ襍mコ(O%象擘uッw并zg@ャ!舁0 奏セ腎蜍H?碇礙冬| )漓2蓋シ・d*? ハr2隣脱シ,磆、Ho +y#9燥ノ鮠奥ヌW尻Jイッ"ラユ腱~ケΡt;タ寂イe#ウユv拳チ?g銖/リeカヒZwv=ロツaッ_ルO/'゚ナ・.(-,>X0>Rp>A>I(>Shセ@、」эッ]ム賓咨fム旻%zナ>|サ靄氣`5ム響+z )劾b占&タロP9"O芒ア"_Lb(G渦b-j!アソ8K"qケ ョナ甲頂[(カ嘘据腸r{リヨ{:#y<゚83S゙辱-y粭[「ェク d゚Dノtt>鏑ャ_ぱxtヤ-Y壺\ヲィ8.j蜥\cォウ蛭波メイモRレサエ]Lf\ヒキW渭耒'R{7;筌!t(vニ[~フ陶ス!駄ヘz*ivクY{vワ柢キ担Q寫H 5播シ_ヌケ^ノPケ(M畩ォカメカミ;簍i$9アネ覧ヒオメ梁(lp李ウZンホoレ,ラニFrlm8得メ1^ジシkミ!ヌズ周\ ゙H]A4ヌ/vセ゚mフR9.ソR#Vェ66リ蜘<湲3カ2&z; rヒ・$]ヤb9-鶫ム@0-"、D4訴b!V土8rЧC*ナq9V廩V遠チ曖ホァ-b-y@ エ8鴇(ホ"o圧ノ{oq.Z廨#ホ' ィ%.、ib*.「K饒q9ヘヘt蛮@'+顳ア膳窺4$ョ」ァ壱b3]'n、崙Mtウク呀!n」O棺顆級VK-VOムf[ュアb5AワgMZ絵ャE祺k悦フェOXォトk菜i.楸.OYヘ稱ャ警ホzIシh(^ウ~ッ3*゙`Jシノコア樞m6Pシテrナlエ縁リd1;H嶇浚テトglア6/ル9+カN|ヘ6oリ5禺v串爵%セgル冷?{Eフv過@;マQ祕>^ナKナ.^.vY梯モ、ナマ膜_(9ソLJ乃yゥ2坐/} 厥ソ6Ev\v]eネ漠「ッ.re){>b。フル[,ナルメ/.Dウ(塔ト縒_エ,^輩 /゙39L ナorクリ-Gネ49Rv淵9VヒqイL/代虱u 諒ケT 陵Ry凋*マ盲蕀`ケj]-+蔟rシ]ホ眺P頗+汾萼r~C`t」\WヒG_tオ゙mj%xO1Oトq歟ソ腑mOp「゙/゚t・&w4ソュ8O履OZjレロ?_i黜+ヒチn・%Z@<% 裄 マ 贋 $.@・蘰ヂL2ビュタ :リtミソ35t!Qヤ$]Ezネナ、キ\Bヒ*2L.%」2R"kネ4YKfネ:2Oヨ#d\(ネeXr浜$ラヒ0ケEFネンイ瑛+ネvケ廷鉢ネ?號ェ<湿-ラ随芍艝y Q曷挑Q-マ膾4O朏ヌネ$y!%ラムケ"z能O茹エQ^F女ヘRy}P^M泓+rX^C?隣Z勿liyヵ!oイハ[ュ塊.k仮ロZ&アeォオZnアN杜ヨルrォオNnウョ煎[-fu|ネレ&カ梵ロュ鈕#ヨ+Qgク涙ノク| 妄イ|ロO届倫ウ」 ャZセネ茘ャIセチVヒキリ]サJセマョイロ衂ャU~ツ帖ア'蓍iE:褸_jq脯^ホ.isホu 拳ネ、キ・8I?偃ス(ハ'ア悍マイdチCラ叩逶幻F[!ス┥W% 撻=咆緝容セ姨ヲモv)@リ-$m-ァlャ0サッWg $VHレ悲!G/"ナg2QB&ヒ_ノ!7rィ,%r9S&酖ラ+J。,rd湯:ィ{笛#キ+ル「「Gルc旆セウ鄭:Md訌ヘ潜ノエ=0Hォbg.鑒ノg.tフE|!マ7kィSz'ュ=鑼mCラ咆sァ奈<q「4躰ュGス豺ィェ+濤gカ哨jメ3髣Eセ「3餮EセリPVカ9睡梦6ュウEスZルN縢i&rD>k」゚ステ堤ヨム=ャa 。'Si$ヌ&? ]Mマ?捫i#c`cモリX_妄Zメ'?ヒ}・gァ゚Kk&rm]テYi淳ソ煮綣E><Ez0マd;W「ェネア+J鮫コ;v姉アア餾鴿ュ1Xオ詒・#Vvカ Xm イ\僧キZ拍*齡ン ijチ マヤFホリ`ゥwヒエ婁 ィI>jウク・ロレB蟷ホ親ロHァ<ワv銭q *IュッMjMヘヒエTサhネ(,ゥネコHzlテァ礇トツ峻`ワム#clャ抛GIツ&虍ルH誰)ヲaCロkモ/サ等マヲコハ0ェナ枕秣w*PナJロ5ォ+柯ウ]\7ソュ4ォォs.mmラン-サ?マ齲弊ム愎クラナ」/BVW<`uJJ丘ハャホミキヨ・ィs ?「アカ[ ヲ>ハ呻ョ']O貂テ:、ク]n ハdwNbョコ=V」[Wgui~゙IrテN糀窮LンsVュ #l リBkM&已「ョア肩。/ クL.クy3ヤ3スHエフC+3碍ヲユン(祁メ)サロ6+ fン黶ヒxコ<サ岔癰ハAコ蟋ひ3EユセW禿ヘーv炸(3綴恟u歯'~ネ)(g61ススハレ-ョi ッ"ャ^SE、楞vk鈍ノ鱇;クヨヲ-゙ソ6}wヒ溽「O(UY:26ッヘ|ィKー!憚O蘰)sM B)テ"[ニテヒエワN[鞨 fKセ-a\サFA闥-ウモ。疎キ、Rミ゚゙D=ゝ!爺K鶉エ鶲コ)?ァオ/勠水j(9V蜻ヤ0r・*;>MU#ノ;j?。G>U難ユxォ*「DMB@GォRZ「ハhゥ哥ァェC陦j:=Lヘ、GォC髯0zョZH/QGメMj1mQGモ婉スMユモT}\K欟策yェZI゚UヌムOヤ:N「ソゥ-ヲNア|鶺ォサZk昂ァ[耻YVゥ:ヌ坏ホウェ ュ%"+ィ.アユzu匏Y]iンョョイZユユヨCj」、レd=ッョア^SラZ洸ャッユヨ/jウ:1u#ゥ婢wオ榊ォリu?岷`A ォSア ユcJサC=ヘVマイヤ 1"{Zスツ^VッアOユ幗k循。゙鍄ェ駿。ォマチ黌~クユ|・賊S睿~キ捕ッ/hニ_ム愾」Xァッu鳶莉tw束{拒コッ =DlヤCE勤キ\qッホ場|./矮'=Z・ヌネt=^f"ルGヒ!zヲK8=IホメネコLュァノJ}成モ2「' ケNO覧 シFン'[lyォ#ロ\ャ'゚ユ薑sスP~ォ諸?鬟襃:ク^「飩5BWゥtPjU。劉9コVョヤRン@|欒t獣^Lセ封 ク`ィ>ヘ3ハレX苙y 棋:莱oM<ァ#トb;リN.:~7[)"ニR 2ソ゚NRルtOoq<6浤9}央♯襪U睛ヒカユ;ゥ腱v3Bレソニマ]ムeL?ニMエセvモ鴿m棒n鯨イア4チK「ホ・戒ォア秩~Rムbミ9コ逢 !: ゚*フヌ#R7R学%ルツシキYq~1箟ホqソ「蟹1サd'=?["y「トホMI(gネィo3aホ師ーカ!MUヌ祈Dウy峽o[メケアワ奮籵トアラテミラ9tラ7!;_9フbNbe煥ラpレヘg我ク{サMl ン&ヘLリセ17$Jル萋X。)+ケwネ9)ニUネMY≪エニBヌ続 '悌z-鬲マ ~}勣マ&駸ノ }>9^_HNミ zrウセ値モW乳&シセ⊂ヲo"o嵋'・セ脚}7ャロh゙Fウエソ~ユムア1:E?Eg輅オ~茄髣飩スJソEッムム')}[iY;ォチハヤ?ZスOヨ4婉ァ?ュ=wEヨッェコgヲォモツーK&,9-ーdAYV イ$% xbタ奪ムEIヨwgAP扉TD*)f午ォ龠辨eシ}LtWUソzRUス哢 Ai.ラTコM ミテ哢マi&ォェYャ囑チ喨AヨOヒd7kア[オ麝啖Eホョクネ∫\ヌロチ)ム゙%bEレHテNト泳v6蛯ワ-「q熊 サャ}イp薨]ム L\<7оヨL-Uオ0ヤメ「ヨrアヨレjM2ュメ堙ユZ クFk ラjュ7リソワ-危P クソac・4Gョ>W5m<ヲbチ-・M蜴iホ沱gナeラByッ aヘー?ァ須4 n「哲釿チ譱オ i?=尋8Vオ ィヨリイエホミ^]エョ星uZw「]#オ0Nキi0_サ榑タ*ュ/ャムチzュ?婆Wテォレ@リ・ レ`8ャ cレ08~ュ后Nハ猶ョ#破8i0R然ョCJス)e4Rハ、然nBJ2)eユ&ツ@mz2龕= タク側ィ ・\'e「m hレTルfッロ覲PdPーヨ-#ィC*6コクwルh渺鏤7ル蝿、ムP@カ,B,y鼎8$"ek+ッDユ-エua@>nミオ{タミmエムニラス卻m5Enワ Nコu゙@ロハ萍Np八X攻*・ィ'ロ'nio抱V7ヘKZ<i!Qシ斧。枳p&;ヲiオ'アナロi[tJモ)l4市|3イ &YUレNレRYキレVw圦「%メケHヲr)3ネR #DルオbnEr杙AVJ犂ササ]'Qア齊ッ\9,ズ60F/ォHウマ8V@%m%2'6ヨ档レhヲュ\mメョス7h・0A+)Z9襦|0D傾ミ゚J\アN韈Lgラノ凌驚$_%マdyB%rf"T>U >ア7Gニ稀怨xサg真#凶o'スB觸 エァ潔Iゥy{d艫(レfーエ-姓ス 5エュP[{ 9u陟mョレ.OoL゚cエ}0Y{ヲスwi。H;況wab總bj了「gフツ)ト演褌%;セB霞餤tトm徼q32宋G!手#ムヌ8「c8「絮W;#7ホ癡XP慊Q2q驅楫Gン?ゥゥGj(!cー ヌ:$?H{ヒ(ツ饑ハ旌チソエ スクFケ*ゥ 得v゚オ・ッン?}ォ/|P$焜tタFZ+丼モ蓚r:ム@EQト5eT證テ{CEZ痩トZ)$Wニュ gX:エネ痺?G"v莓H:Nt オAzュSg、kヌ_ケ ]'vツヤーE<ネRQ翡險E 7甃鵄 1:~*、7ト ・ネヤd3ゥ"藥F艸ォエハyネ・癲エ塗`゚闌&ァ耡淳|壙ホ9N君推ワ?帝飢鮹_A[殤#9ウo、途オ*ス4 ワd8z<ソY、キn、キ癇,礇髞ワx"&匍ノ]鱈uケ9燎RCンュ、モSo゚゚メ゚セ#ソヌoテゥOヲ'!ス3y9#yyWr~N゙ス'mウ{痘3痘%/O^゙他@ラ コハアdェv_ 泌マ j/o セ8S<比%果鯏x銖raホy&v5[トシオテ*sモ<ァi-nィハトキ q 牢< 截&ユJk;6痺>~ハッ。゚。g>)|8kタアナZY{>嗽訥リP~#サ過ト&q~>-X)淙vIs~脚/ソGYヘg*/芭Ae'泪縱父|柮狐W受ヌ百|。#/R~迢T?・i5/V%・|ゥ堙掠-r5?」v+ヤ|セRタWゥテウ麌uN敖ラィモZu&^]チ_P溘ヤシDンネラォ[u;/U゚臚鵞S条M1築弛Vソ躰ヤseW|徙kセlコッ゚ヒ;}m._゚ノ ^゚ユ|殪モwヒwロw3゚嵌゚ヘ|w}sa゚腋o)タキュ"ロニサーcRキョ@Dhホ_}チ嵶貂.ァ&kBヘZ7E=闘(G\辻"鶻-テサx荀フミbt・<漿ケ銛(ct庖^゙睡tヌr^-フ侃。辧X9ホIYェフ^木ナ ノ「歟l#・ーネ~ホ)瓧|ケ+B・t(EAラA%>ソC惆」ハ_k:Hx薯ト b。液ミミトヌ?セ オ]慾ォsツ隨J$ャUzユ8秧Sヲ絋)必iゥヌトゥU1{HセヤJ^@\ト&カコ ;|勠チJiャヲ'}Csiy鱈U」g峅麝Jソラ隴縡M`」ァx、/%Va嫺Hカク=(1ェ0、Y?W据 /ヌワl 雪 uメUン+v腴フUA@キ擧ヨ挈ナミhz5.]Uトレbヌ辞#}ヘ」スモ袢B歓滌*エ,U4*lヤ擒 zメタumw廸fゥLXオツ|%g峨F"6J骼レ゙y禺マルモレカ8セ(n)QBYdrカXヲォ矚'1hoゥメトユ悚EFd/逾゚A"i薀)&-抻$埃ケ魎e扇濤61o$マAnE 鐇幄nh+ト]wA=qヒ>eK ン幟]辺zLヲ梶ァ-Gシ%」キ*p蚶酒z[Q乞/れaケ艘G2)G」@アkニYィ#68ニC9ソ巫、;,゚"eWウ$|2,<ユ~\ゥ|]7qホァ 献]`ス逝瑶M ヤ#マ> Rモ[ヌ+5-ハ冨屍陬ーmoA裃AZウ0弌gテラ:タm:ルコ u?,ヨーJ7a屓サ 慓/_j、ェ朞j5ISスiュ_B: ネe・*ス!、7"ラ!2Nマ!匂fdガ<。キ ヒVd材嗟ミsノKz;ゴラ;Rェw・壜載zw嗇、uヒi{=殼モ h×%ィ_E}霓z?Zャサチtソ>梅、」_#乘_ヌ*」XU}"ォ」Obaf民Of[リュT>栽ユogEtカB淬゙ユf溯イ日A・ェー眺?「\「マVB<・セ@鬯?ヲヤW鵙ハ0ieエセXッ/Q&K2}ケャセZyQ_」シヲ?ッム_P念R蜚^ヲoRホ齶U「ソャj65、ソェ6ムゥ跛ッォン*}ァ:XCスV:V゚ォホミゥs7ユu5ヲTキ閾ヤサーzHO@?「ラ宵゚ゥg衝_O|T?3O}A3_カケ、/ェ疚ョ漑uミソ モソ采MヤMユマf轎ソトWdP゚2V官トP}蜀マ眼2セ};h仞」7繰レFeネUュ広柢汰ァ大kT1ェァ5ルUF-:」カ轍ヌヤィンh \lD5骨@}」i lエバ吉&gン<陣yメ;.ョh眷yッ剴テU %フテロエdメz,aト"ネH}\囃ッリyカニ=*1甬リ)*|コ派)タD':・rツ{7g事蒿゚冠KR,s,毘|鴣;ab(}>゚ =2ワi8エ&a9柩涕・wkロ崩'シk`-6Pンh1篇:]。タ才aq7zチ-ニ頻述ヨθゥ。ゥホ Mュロ}ロ`@約 ヤ5榿,鑢< レGカ」C7モw% 仄凰スホロォリホロ援ァ$スムッ・&ス9ュォ2畭$召]7ツxォスノラxイテヲ(ホシ酢歿:Vムサv:iRAヘ4、%Fqニ )゙)h\K=O+ァ3Ul蓚†5劫WU'<ィzト\廷悸ハ8/"~"「WDエNv匡&゙N~M坏A[k」ヨ.亜゚==マS0U8n熹飄1ト $]jツ木ト|2蓬-煎゚ヌC&P栖シ>モーqXDワOQニ"惆B淑BカiC]ウ240ォ@ ウ*苞C73 フlクルャSヘレpキY7テ<ウ,4ツ23ォヘ0曝9Pn6-fリf6M {:法^)"O、イv嫡L洞ハ8~7= ゥ嵐桷リYJ從ン4%"尅フ:ムミk0=。LY"肪リ規=fzモ0ヒルb"掘:eWeO ッ*f;ゥ%ZTh鮟@3.Yニフb蒋ヒ鰭EK厖I。 /P.勣括 -タoカヒl冉.ヤ2ロA#ウ=苻ュル.3;A/ウ3L2サツ4ウャ2{タfOリ`^咬ゥy% HミOrヘ\メノHzコ"?e淹ニH7#egk/ョOrシ(~xJヲ 9W「ー.N アイk癢Jツノト2・「yヘIミノシ ~ホ悉+Eマエーヲ(qI蔚ofアе欖qL8i 富ョヨ鰻rTfn砂」ハ\6#RFb゙竒K}誥ト擒孩AミシZ奘BGs&\n゙」アg>キハ徘EtCトム%ミP・yR゙詠/」EィVサ29ソト`憩zDャ-"vUVAサ罰宮rケ];ルzキ] pムム`ルム陋。クケ}姻&咼惓ョロ゙コム$カdsT3壽ィg>受サ:婁Pイ,Eiイ 侏a道%ヌJ8fョ帽s骭4F7Xト腰yM゙"ヤツヒ泳餝Yヲcユ ツ"ナ,レOk宗」キ艇Rェ*,*hソ偽ミ(A~/ま゙Aコ$ レ既フン( 程ツ・譖2゚f諧hcセ qv ヘC0ヤ|F賂a「!ワi~+ヘ渋笹 5?E+筑2M9イ決3+ ハUホェ(ュ畑X。L.^ctCヤG\_ヲイtヤツUシ牲Rト驅 [,s !#ハN,[科゙サ%I4Or臓チ<ワ<賀裃(得D v "贐ィcセヨ詈鑞栫|, ツ酖ヘ_~塔キ;]ホヲ屓-「ネtgモ楙Bー%ゥヌ"呻C礬6Mdヨイp-h@キ4ZTオlィnUツラ~ノセr マFツモーコPrKA$F>dウ戴^iユuSサy マ ィ$ゥ易YQdbMォxBPヲ゚ヘ フコ\逐ヒュ"Bヨ凭XfE蕨n モソ,5ネк|au ォ軆Hマδ凭怛ラナ|モ eユ欟 ォ.Tア.VCィk5Kュ┓(エーr旌YヘォLカZチtォ-v蜀稻Xヨ#葦ル8サs56&シcHリ ]漓JYモ゙RケモオハィgオP鑚6イf龝 2cRPI決ォ酘Xs!是セ゚モ「ィZzァムAメXK荘泡#d片エVC。%vdァン,ミ+uレ jク蜈s1ホy。xQ@ノ+ニb3iュo%4faォkワオ ェd)フ#オ嵐'ーヒ3'紛tPル9鬢eOK。ヒAンlS乏 uヌJ紫齡H1.シ「チMツ吠@マR7ー6ォ蕋/ifキM壑ケア ;1リJJd`西'^_6FP緤ホ"DツT"ソs夊サキカモkサxcヘEpa反エレミ%ヒ]&'Z~cタ* ヘpン*G:ワtrト爆[qツ }ャラ滸\o)ヨクラレマgュウチf0シf{ャ」暄sアNKメロ ア~ sュノ4yツ:C[酳2w、3 m.mO ソハレAF(シ嵯/キ"蟠諂R ヤs|ヲュ6Zァ盃(刃モ$Gクo#9・ーク ン「8ォ筌 貞ハ 彁_,蚰@クz}蒟bmT6*カル ロョ *ミリセZルC枉コリルミロョレpス} Lエヤ往u暮悩B&毓ロ霾汲%_$ホヘpニ%h毛eモ0\Pヤq・学i1?弉Oヤ[ lf」RwD[#ニ:K嶬1ヨE^W干ョY*ェnBж書茸ナ随<"-儔"I、'(ィモ緜\テ0クfチlv,歸=eYホョョミヲロヘタエ[!6[#6s。ッンFリ]aエン &ロンサ'ワm_ウ0ロ sXh_ Oレ}`貸ハ~Pn_ 幗pタ.Г`、孕ヤ。cトエノーワUォ迄綢)qコ;ョラィコ吝サbウW光.g/槻 サ2ニョ*M遯醸セb%Qヘ剔nゥクf[膨染-ツホ浜Q+簣'$HミョェーK7\コカ)#ヘア}2メJ,dヒ'Nアニ)ハルユe0ete0ク禄蓑%o tz蓖 =ーkrRz"zCカサタヒ4O牽曳 ホCeマ「ノf侠ェクレLニハa虱h +L件 w_gvュw0メ。+_=a{@ョqュQH|I~=m堕Aヲ}ヤイヌ@s{リ86J8番Bロサ。」スョイタ$]|ンアサ c#ラ模゚Lfコ燻d9Cタロr$N尺ケ,"フc巨ワネ懊ユSヒ牾`;斯$X(ナセ「Oa8ン.:孃f#虱Bェ怏ツ6}YリクBヌ投r繭).o'}ヤO。rO5oロ゚BTオj襲ヌ>!'hb櫃ォ&Kt`∴r6Eヨエェ00ヒ[ZCPKB<マnモPKma5antlr/CSharpNameSpace.classmR[OAヲ累uYA[@.^zQ[コヲ>@H┤儘@|惴モ:f;サ儂}1(_J「?E摧Dモx袈ヘナ「'メミカJ4鈎ニ6ヲ5゙x技:j.4,AOj+h!u6テ ルコュリ疂*T溂ンC添Gu3クCa,U_v#マ$3孱タク!オァX。L `ソ,ノ5:UhメN#シEュΖω制 OQ傴Qシス)旋r瑛!7Eセ1Cj w /,<"yKオJ!I B ノ.ァォホモサaチソ4ヒ>イNvPK4歩a「PKma5antlr/CharBuffer.class]RロnAVカ[カユVエレ"l+ヤZ家 IMHミヲュ1ホクコ,8ス|ozン、メDス-|Hケテ7髮涸ソ~Pニ= ウク垂詐EqQk由Xニeュ]アpラ,+&VMャYーP0qンDQ ス(ネヨ_サワ陥ュKキ%ユ}∽/ツsナ饒鮖Qカ、@ヲiヤiHフmmマykfシoハ^鑾セハ、竒アァ。呟+W=舘mゥハコы,idlャkm7L芭Tーi&6Mlルクmキq撈UロWF 。ッ*:Aィ、ロ刪 w92*/ケjzァセ・餃U礒=゙LCミャrクィテyo#ホユヌエ躯フLケク刈。v1yイ/ jvヌUo、レmキ凋、゚:WハZテハ'ン^O-ス隱dqフメトシ、幾ロ タGスZj1鷓(マメrhヌx[ホ ┻ア」!z28 ャ"5z邀@エ~3ヲ」ロホqニ1y゙ノ痢アm槨讒ホu異'゚`スリ8チフ#f (1ィメ2フa/I溽ッ.千9wー・ヌ9F bマ,gX PKセーQ8PKma5antlr/CharFormatter.classm枯 1マ亙Qヤ#mZエpラ&Snキ[ラ「陦"活愬9フwホ繦サ12癢p ?'トラ& s/レ-79サイ@ー" b%ウ"]恂j=aC9[d階&レuR廈ヤ[qy 3チ :薬ネTu0e2yニ゚>迚2 :。ス:}忽4:ャ}EZユヨ`ソPKフ]軸%PKma5antlr/CharLiteralElement.class}T[Sモ@BモミR.A/@ーZD BュFTZy瀘iwJl01uタ_・3/斐ル6Eエ y8{ワv勝メxァ"+*著R1甑q5Lクョf・cNレ讌ミ・HIaDーE7走Z&%,+ク・6C゚雁X*テ從rヌキス)憐xスホスl`+ケ5疆w ゥコ[ qモrトV」セ'シ゚ウノメ_酒クOjHoEレョ[cム ゥNm2}チ+Yオ6シイリエdリ>Lヒァ|{テu碾鴒#ラ0; jク hxGイXQXテ*(xェ!5 xヲa+6泰\jシPRラW カ4シニ oアュィa%dンmWEホw 3佩EィP桁Aゥ ソ$}韻ハ波ハリワゥf汗g9Uツaー槿*叺- _譁トヲ撕径T.*0DN3コケd鮒ヤ纂諚 學G モチeラカEルキ\蹐ニェリ5ヒ/ 氣K^@DJ椅[宥聞「ナA゚゙8.Eイ]鵜テw9彬gー2 cwヒ%}4薮0゚1Sホ9f鷽,$;エu恤」ヌ}a垠愿」O窒火5k_ホtメz痰N]マ3@神DI社#型Z絡ナW0c=゚j"シp型IォメDソ鼠"b傑娜ソPaDr ノノ8、ソCウ、ヤd凹0\ツXミNGiF」ヌエ彖瀰%幣リ(=:=.Y4エトロYアI;!|>Mk$9Ak&PKuF@ォフPKma5antlr/CharQueue.classmRMOQ=モv9)"j*5Q%チ I4~)}葡vJヲ-1.t耘p。lリク1アツキクwg=wZヒB2yセ{゚サ釛{゙タ ョFa`"& vロ&ツpJフ%9ク,サ+bメN`績,4K%號=ヒi易ヒoT^ユォ者-シV*ユUCCヤ)クKNテ-ォ:ウwヒNケ1ッ!真槲ヒユ,靈オメャ眉リ.T レ{{ハ)ハナ彌肩巻*ァアリ靫3ィ^ルrナ滾Y*6讙jm_=(サui祺ュ駭3xnヌvラ坎ゥfv}ロタY、 恵タ@?ホ nタ能Rツ>1テa峺モィクル^ケd+カ*サZリUロ 郡$; x>鴿ーミ惣v寝轜ヨwhDュD|_フ~' ケ輩hJ゚;lカR団号!Z QレA筺G ヌ1B覯f.p1、リ寉Id1ナNホVy鴇>ャ:W#B"${ョー莵h"oヘu)βV t鉦@ゾ# ソ,IィZ妖コK逓jvo~チ驕[Tレwヒェ廡Tケ脉・騫\ハ 昔、Dり{殼リeeZホナ"盟I丼HЭ0メネ6堀sエ 襟紫-HdUフ蕀「+T}扮専ョQ スI2廿bァトス!7ウノ棆pャW8ヒ3)帯ツ℃l眇D懃|ヤ%ウスo/PK粒AォPKma5antlr/CharRangeElement.class}U_WUンロ$\"ュM6@,VヤヲV FWTTTヤMr ロlv羇)> >磴{Iオ=ヌG*~u&オ!!3swf7w赭」゚ャ49ャ%0禽、ク牝XO シ)熔Iq]bEiンH瀘ワLシ+Rl&ー頂mケワ騨GPツ >LbZIスヌヌq|"ヘO)リg營E i止W/仄U/サ斬ユ橋Yuテb`[ aaユR\゚岸0,テスノー耗tヒ5揩& gヌム娥ン)Xルn颱)wタルイk!・木m7+ツ)盗x]Xツム]2テY飆ォ|'ウケa#ヲm7舅)ラcB~"ニO呷ナp/k+萓ンvェbロ申2['コウGホ笆)堋rWe8ヌK(sシ殺,(後ェぽ溘舌9巳_r|#_s|バPtU8ヤ9N`(クテムノ lツR`s葭b$スル>>惹則>-r|誤 。ユUT Obーcテtc魄qノVコラノ^裔向ァwヤ黽゚;サemマ<テエ+斂g-アm;2ウfPY'_?'GタkvIカメ[02モ蛎ャJ_=ーォz・mホテBタクj屹ィコm掲ヒ,lセpゥ@快ヘx$[砧サ酸ヅラヒ8ムァコ$チワレdgo9;=話&タD.vョw0譫MklXコyス<ル゙、k項Boッリoナ=虱髯y\椈゙jyoター^チ,メオ丼 惟乗囘D゙O/R/y:F腋2ノヒエ!B囹ヲ;IV僚蔭願迅1u・遵=サEナ;HィュG3Qメ癲石#刮v1Cマ_j|ハテ ノu健 ヨIイS丶*2トk基ヤ4スホ3マb厘ロ、処y母bクMタ+Tア,UJ>`1&b2晞bJB、メモセ賑経+、コx0>1ェク kヤk╂Jョ PKxffTマ,PKma5antlr/CharScanner.class更 |\u囮シ遼ヒン、MsthS:擘M。P G! 、MiBkZ疾N^刀ノL忱i/.ッェ+ヒ.ィxュコmキ"ャ梓「ヨu=ミuンナcWQソソ{f2)~ノ鍾}ァgリDエ〒B4Igハ }^タE_e裟「2裟"セ&メ鴉!|[タ裟「Zケ_!xVタ7ヒpュ}崟#ォヤャ惓ユw裘{イ}ョ霰褌ケ}毯 ?シー曾Xリ$D?・殃ュ」_ツ/eぎ~%ォ_k降「衡Fァ゚ノ @」Fメ鰕e@笈ォFワヘ$CシКd ユ eャアョsY(:!ョ濳抬d_-h5:ラ鷭ァR晤。57;/騰蛯ワィ 撥荐Y@ シノ-0骸Cシ津「スシJ燦ク細以su^+""ツg斟Q旱a w霈^ wjシ選ロ1rCカセ]LワマTム櫟fl3i6ウ\Kト、 コ8ロコルfjJ"ンルキclxラィ搦''キホO劼nョIツヒツpHW蚯挨 トUミ;耨3火ュq{ヤイ鶫ヒTリ血XソネGキLョR菓鬢BFハ2*u[靺r ニVメ蝸Fヨ-4畠H^ャ+秉腱ム窄JoアHWd覺h 。tVcェhDXセ`ヌ6淀K゚「ヒヲヘ,7Yォq腓 L幃尨ふヲtンロ浸る倖ォミkIpB)TがF根Ml3f2ガモヨL*m、モゥエクタ柄ヒ冠&損オ"*フ3抖5T霧29モKモヨ盧$bufA."{シ*ウ ヲ卍亘マ」ロコフ<乗コ確e\*ケU$>Rナエ /ヤfァS%'、TrUォCノ +)u碚瀏オ即ヲヘホタ#示fモ1Kネ績儉Z "ミ;鎭デ韃ルfワLキt+スツロd{;ンばウHc5(I)dlムセシノWメォ`スツリ ョL9スホ蒋ヲヲウdー顆:タMッ1 FG セ/ヤxウAoゐ|(ス゚Gd{アl゚Lwj|加]ワmpシヌ|)ンoミ'餞・ ン2x+\Z逝モン_.\゙&ォY=Nw」ッ#4萠「ク9/ロFsン浣オテq笑 ォゥモI<( ク; ナwM嚔セXフハd トエ(Uテ1Iォp<Nヲーゥ絶H*キI>Hモ1m{<」jマ嚼8Ix\ゥUモ}oO゙チ; コ齢モxト燵|5SmA誥U%jシヒ濂Q43Y.-タv`#浄n゙A#9-ゥロョ0ー+彑vェSヨ蘊aogイ$シ/胝y.R・2哺Nクhy 盧憐|-_gミ!:ャ゚タ臓盞ュI、6幤徠@纔チHカト .争ン果ロNHB`w8ヨロ+ムD湲{シs'; |セフヒヒン嘯DDX42ァTケb~a.caD、。耒ロァョ闊ゥYpK&g%キ、yb";」ョ螫意<飭:fmアpnァro*_泊N鯒 Sヌンnセeニハ=ミGメ゙ョフ9=サ」リヒcチ醍掣隶ネウフ wR6b゙]$"E_篁イeオ_47Oェ\ ネ}ナ ^Tv![2カ+AYz訟xェs'ロN~チヲコ"ヌ護Iッ嫂Bエ1]_フqサ旛モーォpY"eNク。^qノスハ=訪モ@dンセ~eィ4イッ_=款トa鬢、ケ/T/オク)RDソE<\W5ョフ-囎サゾP$%TNトDトィ_,ゥ鈞ァロ$~LQ%-ァ8ン員M9エヤHモ0d8c ッセ|麑サY儀ィセ鼡魃セン鼡 ャセキケキ+シ ! タサー;T 捲&詆園”オ>Gチ(マQiエd4zq*?M。9*ヌメ]ナUFkォイT}Bト{-瓊克濘_ホーサォfXロh5uP:i~_J醵(Dッラケzハ%ヨ ユ執洶コ9ZQI%ハmヲjコH汰゙%盾XネC款ス]霄。ZNJq6コtユムSエ07eゥ゙O})itUムVE},ヤYjSzヤロp&/k姫ルメSエ,Kヒ!ナ*|qすBV0縛Qィ~鼡イエ「Qレ7」レィヒwヘ8iェ.{ホノRヒ1・メSヤリ?Gュ橇冨甓%ワ O_Mヒhュ、=8]@{睇`ヒ~悁彬Lソキメンョ壥ョg囈rオ皀TァVMG{yソ,萢AZ★厮]房\ヒ[= j勍U邵~゚マ5<惡TX!ラ{陝ワ エクAKユa嗚陝兇ィsQテ.ェxキユ殉+ ^ョ.ラYeみ傀モ獨、」エ呀^Tヘマ@シ繩イク[*暉ョ籀ヒリ諦ヲQcゥU}ヤォミUョ則セ黎,シぬカsa已Hチ{oQ#ヒウエニッヒ;、Qサヤ循?UワャwウnオS_Uョ[PeヘG 6Y:7ッ。奴Cユ?└>V諏hサN窿ヘレ,E椄uロ;タ+レ碣E'駱零ウエ。脛vナ崎ナ釁棔・MユYコ0K寶QkbIコ荳イbュチT オェg~Gゥ猿モケt乂ミキ鈞¥4]A召ワユス ュ^ミツコqzC霍;キ4_トu輯クウ)Aヤq>;j1「O。o?QiDi藪瞑ュ2C^py欖ミロ}oQ淌:ズ、ムー?箚淞ゥ`_減l-ルメメ mpXキd鰓」エ,/K贖Uツヌ^|&/オ陰、3線U5ZO_ヌ |QO/貍 Xヤyマハ海ニキ眈x =゙$#サ庁ヘムメシ診ア2'ルソ欺9氛駿縅3$!ワモ&罘M6【-マ磔Cェ銖綬Nxッワカ@セ棣タネラタ~LFQ[Kフー秬礦」^耋$tンソ'tソテノq/ン Fミ溯<疂コ,莇$ラ^棗∠チG 泄@ミ勦w(レワウツ孚ヘユ6ヤ4o*閨"シェィ足ゥkpzユ袗證ィ閾|Fr=iワ(rメァ\髣テヌW~V(qユ<%紗 罔vx/シメ喇5キR?Kヒ>HンメRw弑j、ョヤ、Fq3蹙iネ犀%Y奏tgオ轂msエ゙マv=すク頓x#nイ^^E\ァアモMTホ孀rmオH6鑓/。 w砌cナ5?~/キ琅1マナツォ$}0Hク/2i d'覈Y探 6W/ ! 錨U8クG:トカ({ラュ=FMゥ6V耋'徠娼5靡n@ケ宗。Qクjp゚障 +a+。涯:Ok慫,チ麁r則'i而Iネメセ棣|ゥ;Bニゥ檐"(ツラメzセ^ yメ2護sゥレツyステカ+ミx毀ムP69OR逢hヌゥ冩、U愨'1G|lz醗~9澳^fコah,駟UAェ9Iララ゙是w1ニムOル< ノィ゚蛞7 セqァルQtm]コカ借啻 ヘ74オ1yy8テw「x-゙ 1ッJ~%]タッ奘&t♭ナX給qT它j~キ!ユnァ(゚『ホエ」ェQム瑜マァzモ3贋ウ$ソル?7PK[s禿 モPKma5antlr/CharStreamException.class;oラ>=^.f.vnvF6嵶シフ;FQ 洳トイD愼シt熬「フシtkヘ0F錮乃F~淮シTソメワ、ヤ「斉、Wp~iQrェ[&#癧噌ヤ伏俯Z像ZP貯洫2順≦&羨21ッ$ァH*F悒_O\連早 鎭`B!@ ネS3i6- チ@ ,* T'イ8PK椦ニ>ソPKma5!antlr/CharStreamIOException.classmPヒJテP=>メニエU塗ユ塀、7EAィコィt[/Jz#キiキ\ィ狡」トIuQ4 3gfgホ釋コレ8ィaサ*vlレ面チ錢ャDャイ蹊i&s暮BTi福コa颶4!Tル$tFJヒ蛞*ヘュ丿8緲ifBヘ碣^碗n、椀 Rセ{.リwQM8:OM\zD8;ンy%:YK"+ケX┠aM*t3υ ?ムhDvォニkュhrvヨヨZ@サラスチ_Py^m8*=セム? .Z゚PK?u#ゥPKma5antlr/CodeGenerator.class氷 |T蝠?軫&3吁薪BDH& QヤィィyS'LFP|ト帑&2刮w&エ*m脯ュU ィ(「ヤ可X`ユ*nュuwロカンュ}カオ/サンョロラJ釵;電2ィ已oセサ゚w゙轎逵/o~・cDエzネミ峰/2ォ ?雪~(テVp#欸ネァェ>ρタウdh朕ウE徭d8ラナォAノ 7ヒーFヨコy斷 郢クE觀7キノワ.rvクyス7クy」Biケ?鱶Dミヘ拵rqネナ尨 エX2j胝(4?雖卷態L`*7 = 'ヌニ鏑蠱mレヨ7n2"ア#綴 ク mtT3確Zャ E =匣0。0-カマ耜ィ゙沍トc家ネ鏐エq3セモオ羽トェl炯}0モm=;G簷Аj1-コ*枢6d08bΨリgヘ「ナーfャ」Z2)タ6pロmタヘmhスpCo[ィス」wCGWGwK8ヤ CleェnX゚ra0ワロルr~Goマ法クmcoxcwGマニPー,レHd7kムq=J刎ヤtкテ=)$r0-イC崢∃タヨ酌@W ハヘT6ェ崎=ロ#ノ碣ー。'絏ウフ2mXO$ァュキ・-uサ[コzvsテ=゙9ヲ'ヨG「zマ濮d沌iT 5$瞶!个t&lヌ8?M$Iョcハォゥン0i鞣pクヲk|エO7ツZ_;スc、f6iチT-モp」ナイD場」姐D僑w ヤ5j瘁R}Gメミ刀ミ`sIM&r!msm.Mjr-ウY移SLt メャャKNS揣劵((ヲ%#zk4゙?b朷ヲOヘュ借タtヤ齲i。藩、Es猪sモ モO遘OンH゚89t鞅ヌフロ裘 *メ寸2:翦43嗄メ呼L2エィ゙ュNgセ8}f/U3シ3Kハ夘3ゥマヒコ椦(カhム鎗gn吋5c 'ヌュコマ6CワlJAd 3賺イ gHキ8-3z"aゥ4メメ6凍1.煙iyョ靠v゙!=)ネョ%uk鈴建秋9I拏33カ$罐ヘS"尉j謫.mT塁{イ[ヌヘ7cN篦.ミs桂c筧メメレ碗ァツKbワ鼻K@X{ソハ・'5炳tx5フig」ツIヌ碧9*lヨシエS-(ーモ#Zヤョ9L。\ルdレ%ア粳」V1フセ> カX23ュネナ#'チ tモィ }4>。キ$贈估z#>レョG5Cウv「L臀C渙q゚ウV暗豊Uヘ、ァeU@キfェu&#,ラfZtァ猩涯ツ%衫lJ刊nコ9sッ髀レネモ7u)M $ルアR、メ=tッJ4。メNコJ・マメ、Jキノ鈿2lァ.V餠レュrUセ7サx玉ナリ豁*ス&^例5レ祠Tセ/Srナ@ミC*}コ*_$ヨ靈*%カ- D」 ド,歿ニ屑T醉\<イホキ、メ~犁ニクイ}ロ舘ャ.V9"LキJwメ]*ン香」イ7ハpァァアムキャ・+ゥァサ盤X ヤ部B崩5ャ。Y璋qウ咐%%」&Z/遶lpB螟7ホクpルRケxサハ;x'ノメ7$辿コo橇!゙拳>(セレ鈔ォタt彩WァーXェ5|- J賜a位ラ廻シV庇ェ40テhタ4airコク耨*/B ~オ*矢5zMX\^蒔t5+'z賓Uセ頴V鴣エネ*゚ツキツSw;-鑪|サハwミ.゚ゥ]|キ具Q^恚S~゙ュメヒ開_B<轌礁{y淕斡'ナハ*]GサTコョU-ムァイ゚喜ホルヘ淕ァT~姻聹M9ハjトャヲ6モィ0?ツヤ7ソ|+NNuot<琢>-襪VヤQT> 磋ィDmZヨ\}g6@ケ陬エレセ!y耡 jム貅,イ&ヤ!ナ ュメ48俾^AneRコラ2サ霜+fトxィoシタ'ハXpレエZンィJ-1JMセ<4Z@テ∠ル芳2ゥK\ H船マ俟ッヒ摩カI{+電ヤiン<ォト~"ワ囿@、Fテェ-Zq$ヤ藪2レ#C$ニvgQ4#ぇ\゚郎xrシx6<ク^; タcタ4a`n″0ヨvレヨレ゚?ネケ紹フミc=NOモ鑰アz撼ト+= ~{>ヤGナル>J薤%ク|/Zィp貽|セリ^r=袞[ケニ?XケヨフィlU-p\?vPヨ Wャs49ハr覿ヒ+ヒノdO:`ァwOeッ」b/γ+ーヒラィ#/エX@oタF_ァeミf9}入6"床モキゥ ゚~vーュナ苓X鼇r%補蹼Qルsv%ア怪錐v-救]67(第Qm曁>.゚KュuI~Rアt\造yマM:゚=轆`フKQE「ウメサp刊u圃゚ ェシOg`}.厥ァソ@ゥBU 4ムメ*\gォ坏 ヲT(ェヒウtィカtヌミ゜)溽Rマ#?/3ア^ヘユ胼ヲクdホ証{~ヤレc8h2モフ'徳冰、}=?mママリウワ,フテノ跟 tノ之|丑ッ~]"-ョ{解イMム堤iゥ9桙1Eヒ:b哄掏能~リサイソ蠣Z▼キニ戻ェM狙v瘍%イィキh・ィチJスル<ス+3リkd靡悔Rtレ槞 ^;eZオレ =T<搗紋L`V8モ鑒U8蒋*憊ヲ攷寮ψ9X。苦*/ムケ ョレユヌiyEj>至`2ソE5ijk3Gホセ梓ホ碇M藾m垓ミ}テイCZコOフ扇シCTW臓「悶tJニ発"崢「mlョ但>xラハアRテ\SY,&ケ搏08ユv寄5扉]t'$琿駟セ℃策6゚Lュ.゚Fフ|Wセ擾Vセ純x|_タサ9v゙ユC,^G 絽ゥHテ(h]ニJフr兵K "E「/"ヒ 66^ス擇遖5 -ナウ)ス ヌzサクg0ラK8Zヲフニ|論ヘ`梟ワ.%7ネノマタ*畝0ウ2ラ!嶌oAc(4"ヒHル;$劵テ3$liアャェフタンリUP!gユSf9●疱|2ェb!8槎ハ6?・"Uマi Nチw-ソャ[z5SリホイュV環V,ナyv碾P瘴蓙?Q KメZト*崙娠Mタf+PQノ#キ笄"%;ッ}、ナル$魁bHS" 蓁咢l扨「メアf!ー膝ノ+Dシ抉d誕ア゚dモ唹e病ョP*:獅)「iムシ椏ハモ"I穰u5シB。ス粡)箸Mォrt.ィェp。葎'署cpォ襲8Eヲhウワ鬯2,ァeTィ!ッ筴J5*エNiヤ1コ焦%レアx゙ォタ > シ岑^9尹不`俳dツコノ.」埠%kStム>ヨ藝Kトv.Nサゝレhメ遮[予n_ :eoナユコ(m0鳶wマKノ r*蹐ヨ +抖ゥtムb%Dヒ熱Tォ\kヘ絃Km^ミシカJ聟メVク蕘者・ウtケ|/ニホ?WハY6・ソ *ォ-゚JNBッヒ'O<褊Mム{ィノミ9:靜w麟E X]ь;_レ`カ/&トミt!=?>尿> zトDLッ;QUKシロR4イ-ュョ8D肴^ヤ゙ヒ3啜Gmェ(ュキィ蟆ヘゥヤ~[」フ望カメ<リEテuCシ タΧT」S-WカムiハmTV&HSvムイ鍍*+Wモ彩$ンォ\O*7ミハMtXケセェワFッ+wメ;ハ=瑩O鐚se~カ_ダG$s&&ヘ(!*vシ@1oZ{¨ノ繚眈mー~ラ||ッPK、w纐#PKma5antlr/CommonAST.classmR[oA=S.[-エXZ[JオエヨスTカ錯チKLエ 糺7duル5ー5麸A錦?ハxfXィRH83s;;;3ソ遠韓用H8,.L カG0%Xih 苒蒹;_@エイ!ィエ゚x=7「セロIAヤ,、ォカラMkQA,~M殲;蔭ピリホC?Uェモb=$ツゥ ウuンフz%ーmカス( n/Cソq4r殷コj3Dzマ貧7q3-uリ7テアhラニネリGcdソホ%オUGサミy&Rク O)泊ォカ9 ,/g凉襠\仙ヲ6 d"ンヘホ\bi^w,gセoゥロIuモヨ蝎ホvニXセW(ャTl扶jpャI]A%C_製影クシ議イ| ゥシ駄カs q・ムヌuJO鏤}ソo慟?d#2{ 4PKZゥ\PKma5%antlr/CommonASTWithHiddenTokens.classQMOロ@}K8H$マ爨エェJ#Uh/果ル$Kqリ1ォ概慰CユsTユ7Pレヒ巛7o゙'覬リハ#喬タj羝タ車vsリ萄o!/モ }_b/ ョ試;ンdユ ッ」桎迚。ュム ヨホシr$ ケj}vo\宜(ルX@ムニ<掛敝柯オQニ嚶U“XWィs!丙モ*jエ9 厖!&M(1F\豁ニィウホ齧「ナF秦&RJ゚f^*ヲモbf堊ニ。efh>藐&゚茸6。姜rス~男LSォDセFJ}銅リtソ誠y#=跟oヴ椋ー鑞歡オNmjeト晄盲シGADm*>渥rEタ!6クt凪/hォナ鵲レPKェ・0ホTPKma5#antlr/CommonHiddenStreamToken.classQJA=」ォ崋ケ圈・fjミj。`%пG團m.ャkマUPzィ靜f"% gfホ徭ス;殪8Cニョ=ケな (ネリ猷dP乎|hル悖リムMgl7Zヨdb冦ョリ\滬ャn6"゚。テm爻aホダォ゙0H-k@)jヌ0lメ轅O初IkW掉Qo蛍sヤDテ5#・ュ逎#エW+紛ニ゙ヤ・馮櫁ヤSヨ6、遺ヤZ3棒b傲゙コ魯AU@LA![1ュフ_スw9碑ュdzJ$羨;Iメ*ラJロt*ムハh ヤ訌マエ。ム テdT2ナ\ケコ晋爰州jケ9|^Cが&]Cla(/nし9^}d演cg。/,y♪エ]2G蝋+ x・Ej=花.^!yサィP、Oッラシ#メkトdワPK'校 }5PKma5antlr/CommonToken.classuRンNa惘ソロe。ーエDQェ`サェR%1iBシ朔「タBェ.)[」艱&PM|ハ8gwi・ヤ并ヘ9{f:s璞ソ,篁子ファ`「$eAgYTN篝5<陣Iセ,kx(ヘ# +Veア'2z堝ZX」リ ェJ鞁=sCSュワィ9GッUw蛇「nC!Qゥ;uoノ。噪-ワ6%^オ學vkキカラ$Wィ^Fカ0x<イスm゚吻Aソサノ潟$O.アBユW8ケ`hャ)、(ウ6レM、キンレキ_ヨナ阮ロlコホョムv蟋 L篌ョXGヨタ,ヲ d親ー~+/レvK!.o`マ フ マREMロ(ノ荷%qンタ(ゥ訶uャ諄昂 Cチ,b゙幼Lヤ誌m蹇。4闊WF3柬゚Wコ5D塘D1 ソ~ッCノ=XァリYqj戮Y 托nー傘%s)勾シ!ッB桍ヘtyヨヤ「VゥスO!8xィ寸b望ホW楓ゥ癪)ヘ_イaD&)"誂疱( $Bワ:Ek蘭チォ双}ォ3ャwp7\ヘカ+゙s」モY帙dカ」キル}ー_萓ナノ\ラハ幤?匝{痔ゥu栖:ミ呂0タ0<犇濂ニS,ー゙x孚m帽 ァャ0タ*'EャソPK7竈aチPKma5!antlr/CppBlockFinishingInfo.classm設Nツ@マWゥE?。「WXx演IH^`シ牋隔K未マ薤c竇澆g「1コノ樂撩f2殪レィミPJ#イ%%U ヲセラチレ宅*`ネ恋鈍l1驫9Cvホ要タァ」g7p lフ旋ホ^潅径_pシ猛=マ゙ャ9Cェ 7ク`姐N=Jルミf8碪ホ栞畠エ症ェF鴪セォ|foオコ|遑OワM1ロェヨタイv6cラ@F堰]-Ovュeネtセ,ケIムh{ コト:ъ「ア甑(ニタT[メコム(Zッ` ウ!齋"D竇エ#メ2彫「W盈R} ヤ#R~KIケフtヲHuEjЗZ・゚<逃ト;$゙Q堝;& 9 ム版/PKf+濛PKma5antlr/CppCharFormatter.class控KOQ=fFAq@+>(m・~EィZ@潔MZfWョLワサu#q皋&ヲMワv鰍]リ;3孕ソ=n__セト =悪Hィ8。bPERナ衰N盒g慝!.癧.ィクィ竰d +耐クヒ,、T謙ク「bLナクt、\U0。`Re$如(:pS 憊クエマ訃'Wzクx7=奕hナ.:騏ロ闃fnブキロノわョ%}/ョ 5Vm-p!u濶@s$輝アケ?KFクオOヌQ」/ケ3m1逍;+フ~OタンNCフクセウ&}ェソQoァユク-ヌ]ラォウマpi去{Oタ寒袮レタ]}oエ。カリ.莫ケU9j2サV9ニ「テ<iDxp{ ミヒ?J3行*ツキ^>ニ>A|煖タabKユゥ9Uh!L=/[ュエM」 モレhcエ>レ4ュ6E[「]5嵬fィlヒfsルl)妍ル2"ッミjO゚竡ユ∀キクlニgィケエ*ッq+エ[涓ヒェ@ァッュみケw?キルNW.ラ}ラ味2Jチz蟄Rh'` e;ニル Vy浩gy蕊6偲F娃8#@H#N3 柔ミツサ喜ヒケッ0)セ'Jニ哄k メI0厩シキた。 :ke氤トサヘ?9uUG3^4シ}メシムニqY*`kZィャレU-ヤリ*4kスD慙|ウMワワ'3ーツ5usタKワャ鱒C78yハ.Ktォネウ芭堤・.ワ]ュゥ2`wミ鉗.qマ&ト~PK宍Y0ヲPKma5antlr/CppCodeGenerator.classヤス`Tナ8淇3ンdセセ;゙)gホ9uf蜑゚セ16:ミ3ビ&GD#%M妊(嘘メLノ癆エhヒbキj恍VJVhア2丘wウWQrD8鋳ユxT帽#q ]章be8J而イイ0YKw'f遠トノハ)YャH廱ァQr:=ホJホ、gトルツ鐔-ホ」 ニZ\税Jナrメu%々タ7ク2/・6容架ナゥoc椋B\ゥナUカセ坩ラPr-6jx壱鴿7R魚エク9タ*ナ-Yアトュ nq;Uケム:リbsカh[@ワ%wクトス q?袷』トVYトテ4ィGhPロイナ」1Jラ竕;T<刔ノS 棣セ泯g)y.[ヲJ>・V櫺遏Z}ゥナWYア< ンタヌ j/Jセ・ト ~#U トマ ~+衣@b;H茶エ@レ カ葡・ホ+$ 2#漂T信ンuルd9Yヨ%X ウzメ].ネシ^{⇔ル嶢9辨P」セイ?ネ9P9舒A燭イ 寸 ⌒ 舊夐'オA舷< 9ゥ) 玖。蓮猶R rネムYr芯Aタロq w9莪 'ミxv」啄NE))」dR万,ァリヌl゚痔價タケ'ネr須〓9 d%ネ gリヌユ゙ ォィ゙>肥 rfキハYネイZヒルY+9艤 軫V.,廨リホケZッ蕋ハDDy ンЕlケ8タォ蘂 ヲャy(5 覓@ヨぎ&メラソ rネ睡Q晏 ウャHN|h「サ\コ朽掲ノf/LQU?dK5ヨ敍Hヤ1ァスZ鮗"M隴ケ ┻sd凡驫モp8亠膕 ラ<1Z 8隻<)ャ・萋ヲ裙D'@>IYOs= 竓g)9積モE瓲/−+ _ネラAセM塵−巵セ兎_!゚・AソG材ァサ諛ィ?薑 ? ネOゥg ?ネ/A~5N・ェ-5ネ菫@~ ゚ ソ=ネ@'?莟 #2Nノvュ(槐7+ 熱WJ3ぴ$゙ロエ*ハ附(*T7トMuァ比 rェ'ン!W_。ミィ^Aユ'K桧PシSォ~簸5ヤ@Pァ 5ヤPPテ@ェ癶F ェTィbP%JA5ヤPcA撃ゥニQマサP2ヤョ&レ ヤ&*5 ヤdd5ヤTP{レSォ,7オン彿UjィJPモrイ哂ミヲツオ柏クィ*Pd米ォ+<ハル曜ヘ、d艮夭fj?PsAヘC>RゥB`P I膀%ヲ"fW:ヤ"P帰鸛ェzィ*#ン「B摩ェU*梧SユSRPヒ@5ミap衡ロ*B尽オ還s3ィテ鰛BI TT+ィ(j%:j(ヤユネZュ者j ィ」ヨu %ヌR」紂PkA揶~: ヤノNu*ィモ@/ホu&ィウ@扨ウvQ秉P轣:ヤ.オヤEヨレ鹵Pゾ 2P翼レHOWコ2K]・ョu =_ ェ ヤuョuィ怠ユMnu ィ[AンF鞏P臂ィM6ニオヤ擒u7ィ{@ン >Pィ@m ィ@= Pロ@= 1P純zヤ蕩4ィg@= 9Pマホヤ侠^2z#J^ィラオzコ~湯キエz岼P5「、ィwAスェBスOLィA}鹹P麾ヤgY黌ノラ濫|Eノ゚A。@ェセヲ湯o@ ヤキ ;P゚S芍鶉P?ヤ/~ィ゚Iオョ・腑PロAウ,取-ミ6hZハエヨtt鑞ミ鈩サ:tOミケ@コ7>h5y コt?ミA= ミアA=0ミ;G.]コt 鏗ミ」@=Xミ;繆8、ァ゙ン@DコH<ェ郊頌ネKzWヤホz*ス%サS2tメ@O=ミSAzOェ_z/ミァョ= ミ{Πス閘$v)陌T鑠ミウAマ。授C]轤z>ァチス:□/ ミ>4Iォ>tミオウニйゥス2ミ スt#&ミミQミヘヤ&紂コt tt+W^E}$靱スム},鞳@「ヌZミ'「rユ'。諌'>ゥO}:霑>セY檸猥} ェ>yマ} 閃1クタ腟イm8u/スミΘ@A_ 2Q9%)ケ{%隲@_ ミラO}齏 ィI[h^ミ嶢7∪伽}+ス)Vミw+]7ミヘロAo}ァヨwミ鵑徨キヨdルg閔)ケ薯)%PイU鴾蹟ミDm^セCノg」tWO!P3霽P躍續<。電ケセ+ー1}坎3檻テョ淅闍ネぱマ~_"阯@ソ ミッ~ ゚&霍@ソ ミ~ムェゥサ宗 ミ殍曼殱ヨwZ蘓 fヤTフ儡S1{Zeヘ弧ハケ褫gマ蛹/,7vD$ェ7ヤホi ラヘ ッ7bAgji8R>o>g ュマッ郭訌゙゙。リイ 叢元蘇E4狭エヨニ」-m7キニゥ80ス。rgZモ,3/゙メY: ロキニヲ「56勸rCKツ冴コハニpS8D36ヘOマト馨藐E笶ク=j」MMムH蟐xKィシe)籘モ3'ヤj%kヘ誇ム唖1セ。ニVッGxUmク9゙採_ヨ]皰[從ェ」7孟 ウk[[Zーヒケュヤsf(olCO{5Fk励sシ溢ワpャオ0狭テリ凵1+ヤsiユohCヤ惨 6Uョm ,aDDム ナ糲Xンレエ$ワツYzvれェTメCアシ#テq8カ,レXH)Cヤフーlニリ14ヌ&7リCサ6レリョ・眸ニ4457勺?Lウ警vHS@}GBM畊sィ68rYィ・*6ュa) A ィW。鐓混キトチメQ$エ[ネルネ -e弾サEe邸フッノ「ニxク%7ャトJ%JMVe、ポt`「ーG3ウ寐3えケVリ_レ'Q:3シ*ワ祺フ%UeB「c>墅私gキフ憾ーK E,Fイ懐ワpスソ鄰2憂0脆抉与スwイHfXー0ヤク<$ミ 講オ。娘=. キD;鎮メY」3フ螟M/1蘓 X8飼゙オTLUf ヒIqナウavァヒtヤアe恪K2Lsウッ;ェ間G'e>7,JRー勣XBMFモhハ関磋K仏フネ キ.`ュ ヤモH劭]ヌ=/NAュ0vハtフル]J伎屡嘩km0詁zエpィ\ 83C拱棚[tCRFラ'楡格HI・V箏ヒー'ッユt「dP、ョ&4)iIム慵ョ5`慕キB1チY簇:u=亙ニjワウ\3O{ヤ"3F'髀ヘヌ顴rp@ィ[ 。F#.Wフ"z&ツ纎rt嵐1uEルョ嶷ッs>uン&皙ア壅qォッ5ルシ#囑D'-鵤サyュt擠ュアr3ワ涯鏥ケs@灯4ナ冦潰WヒxEヲユキUW/ヌeョ頴貲\ミI監|ツdホ EBKテョljn茄「@クMq註毫./g#2y呷#j猪 TpツシG+#ィ甥甍「〓.-テ#ニKネツナ S勣T La&ン聖jッ=ホキ4q6セー齒t]Wdtヘ、>U3w室メsマ/フ\1ァ1|Uャイゥ9~э+メヌ ーGイrlキ&ヨ$uRSィ勳o齡 ,呈%ヘo EbF!gDュ+碪ァクメm蚕I:U+xネミウ3 Uァp?A"キセaUメq渤 ro4ナュヘ*後ァィirm」逍gヘ狂カヤン(09險侵ネ爛ゥ+ヌヨキc?j?齎象O8鍍%OS %/リ述沫!0rJ>エ檗ャ&賞A、NS(刺9 ・cf隸゚オ」ゥソム_V「上 ヨオh0Lヨ&6レム゚ルソ:{ェムア%$カtOgュqッXチ綱オ-┘ヌA鳫タQ'ツ傘, カア =t滓qe撹t、^ュ、eユ0、j8靤%ホャ澑イネs0pヌ'コ蔕ッコ9sコ;ミr惜;ニ# ケ腆.z耳ヲC uA ミi キ]. bナ^Ьホub乘ス9c∪ネ4n v;撞! @畊mィ9l誓Q~GIヨhNBq_隋-ソ粗ニuリ ヱXcWiノ~I」ュ`エ>リBz琢テ@tGャBg w z=G」カ、ン#1 ウヒーiv0譜飮iC絶 オ5「ヌ'キFb K#aキレヤB,カFa2<зe( モーテ5rムタ ンメl(%P スネイ`EIIー -X ヌqh<kmn叱トΕツKZ:P %板(Fテ cリY9b)&0柴'籏]噸7隈Dト,Qタョ「:! ム1Uウ到`7vキモ0ム2倅ZqWOFV(メ0EテTIシ\&f或リ尭>リP,ト永n済C「GM]レJコ?イtハ罰チ「瀚*H3*4ヌ~ロ~ヌ(8mチ)Aヌ~穎s Sk1」[「ムxム$\鰄ソ」C%u憬。 カFбメ`4d劃ソB11@ヤツ -アxナ2 ッ 汲Cィトルフ}jゥワフヤ{眸jフシ-rCu+B腿ーi9?椛ウiホラ 、u!゙ョvトムヘテ1ア゚%NG3*G矧Qab'臍マケ#X^x\温ナオ。RKクセウ台,uケア 改uエ5F ュ VEツォ8椒テ則e茜。ム^「サZ皇/ハワ#{フ$"ヒア!ナモ38ヲリt局i} 4>.LR莠綜ヒf9PMモeタlコ冫PI]Kゥヲ、if6ノKアハクC宍ハIメ抽Z(セャ!f ェユニニ`xUsク・ミ5BIム.\属|PフW"畭kpUト8-紺 .u-.)アFRヌ\.F萱借MD屡%zキユチ1c盛G,*ム0桔j#'コ:0愁M4D礬{ハX中Tメ茲ラcO&嘩;ー'ケ^Hwタ9ー豫H4イ埃#eリ[Q ゥ覈AZ ヨ。 YDJ8bセX6遷ユ7U&脊Sァ犬ク陲默(Zm!臥ャIチ5A挑瀚Dケ、Qカハネ;\・-テ*ェ洌iルlj{{ケk$s"D0Gキch隋 @Q凾恆4Rbツコp=Z:E7zA攷)%NォゥチP'-リ゙サ茅QコX!ュ3%XXΜkJユEy3汲{ミヤ浦矛、Hz . Q_(ヨ蓬0イ!ツァ 呪 b鞍 rャ1];&)DZア 5]DエY( >. シヌIリ 濤@2jjィキ]-來q・ィNセ"諞ケ0ルセl邂  知娯(MフiY01シa #ョCFifP#`fヨョサOゥqロサキGD7!ゥ 1V)]゙~D`Mワィu オ5,i、JE4ヤサxbA%n聖OャGt鶇ヤ]Iオ_、。義M イサ2寝m.e@jn瀧 Nーツ ニェH毓?6ャ2NTミ亞髀 (a#$y轍篤 Bp6iG 1 蘇,2゚、`ィc[Nキ*「3'ヤミhァI8ァスa搓lG」7)iロ;ャY「q18坐Qヘニ メャ佯フ:C螟ob牌「メッチr。I1Qhv瀾@W殖忽ヲ \p4MテサVエン3j・漸メHoG懦レC゚$mc褌馗殻*.歇<シ頁 WqwTワナZHヌ:ォル竚j4:"@<コニホ89椚ーユa ホ槇ザH扈4楮eg\ヒG?コCタJrXオァpャ゙] 51モンユCGv I'*朴'セサ鰯ャ#8Vwmン%"6eワ`メoDぃj\CヤU5u。x此^|0箏&被レタ;p アハアpェH|:゙4sAb3キ'r iz]?乘k┰`レKォ翅 dGラRr"i[藥4N"~2%ァリエ瞻ユr%4モVE2bク|懼詰 ァ9+茆NJ0申 スモコァra%ェ渋タ_鏈鍵mF:ホtトWz4キkワ保*鹸ウyyチ。ネ團57縉p2srjロヌ国ゥj爐5甕SRxuKvPラフxオヌワpmti、チl|uhV 衫w]オ3:c;VNlF綵メ>6リ、mヲQ@クヤ 薇」cカヲMwg;p廳アメvホ#゙;歟 滷悸aィJニr6・」6オXャ・オOD$H拆ラ筈#頚ヒKVY冂Iニマi掘ォHa1y羇0&X ッ,'ナ阯:ー惹GテEネr驟・エ6{エミ+ャiIワ眺憩゙rエW=Q込f5,燕ア0=h,アホorr|X穡u5,*・ ゚%xム 2ミャヲフ[Fェ聴ヨ 驫pユ4w・エZ車5ソ!WD鼈ee鳧8ー9ー.F$2Tゴ*岳"ト[タ%F|I:p)Ew視Nナフ抹・鉤L.#ス迫ヲーュ。*GnアヨE+キム/ホj$'ク6:b/Q瘉* ヒッー?wト`◎_Ie9ツnSnン輹ョhXロュ3腱h]IラメへL2#2jmoィ}3テ+ユアィ+wメォ゚ソc}ラ{・V!ツ泉鶩★;B崑87タ唆bD:斜撰Pーwー畷ni%6#憲瀁 0MクDコイ&Nレトョ殴 シ9p+ワ貘pg%Nムラ5ヤ" iケ0ワR゚リm証C「PハEp牘 1Z繋`-レユ煎摘ョd2,1^ア逧#ソbィk吏3ロャl&9lリB?*瘍сムヲ8コuノxレ偕4Eニナ#wオyNXミ。ヨニx晄Zョ5ゎコケZBァ(Jハ-)ク$\jER」Kシ2dVイ簍S、肖я緻モィ!Sーフワ。.;XB04」8マ`6=クサノ∈T「E |ンバ粃!ワ iク゚∩5鰡癢@ョ rtP`+&ツカキ#餞k@ハトV_7夜"庫Sワゥシzフケ医ハ>j*ノ衵揩ナcネ江Fgミtx"ネwチェ鼕Zテワハv耀シ4ラエ、.ミ磐7,ヤEー#h葩ッコ:U鳧ツ9ケed0ステ阯!k羂慵6bア2ウ暦コd。ト轍ィト.〃(リBセ┣シ*Cモ廛rム夏ンーめyシ・/ウ[0ィ貢0Hェ水^Jヌ1y'wtE紹ネzコィX0oY5蚶ェ径. 6キ.iDュス&リF[n そネ。甓ιQt#麾s祖喙V璧qシスユノ嶮.Xャ0|I4レq~惷シ0t、R評O[x k」^C゚ミ1j且┳Iミ[x^、s#セDユ/テ+冓メラシ ッ9:obンメ+Lt諂|L+nトI'、クャャェz儷モ J、ミ$シー癧Rス'Lォニ5t環呀ミ俐DタP*オ^ユ`佰ン3Zナ!リヤ#2儘L8オモ$:W゙┷`トシワー渾ニ゙髴毓ワ5アケ0敕ニ 0ラシtrmコ R ヨoム:pA &6フa゙$ュケ3ヒ w.2謇ヨ」アイ t エ6G#Xhpdr(f@ヒテ浦ルaヨゥG巉tE痍]7釣VTpネfG_セ4ヤ匐苺ヤ噐qdM!Pナ類,ュIGサ擁疣D0lウ※*ケ-Xスル防2PCck irヒ[ツtコツO-4BミI 娯ニ波nメP74I」メxD轅トモ\ホロキj辞9壇b?ウマaW゚「塗ePz・聾)・Azy峺 木{イエ4職Ym^櫞鎖{ヌムfキЦ6D k iウ、瀞ソ紀qg・&dー:「コモQ踈92r}J唾メ'rG ?゙ヲ ル拉Q%6aチ:ッ[}PニオレチレXルキ$3/3Rユム Q綢dX$&X.ィ鎗ヨエアyx4ホヌ,袴BKqhニ紵雀シチ;ワ$ホi郊ヘLエシタ{、P゚5翰>、#クミ¥)鑓>。+Jl゚-。Z$t.U胎ュBRn・+*Zu0T関ネlクuQル]テyー_7QV15&ホ鬆"鑞wャ?ヤ圸トr\Wメ>7^Q:鱒L」ル迅砌e頓)ヲヌシト6fレヲ蟶L&fゥルナi& N#ッ238Ыイヨ84暴Eャ|タ・贈レ闃0琵vヘ#Y"レ=瘉クヌ8、敏トd];ウ:ェd:OQc4ノ)ィ"ュ麹セ|「ン))Oタ エCnサ4"ヤdWヒモ 鞏チ敢タネrホ謖E"}クoJ、Nゥト[鎬>i,…ソJw囈∝-K[ノラ1Gpメw驗FネwLム糜メi。aエOヘ。Lトmd0TKrhホOi 筐"6!:x醉>ニ)7ヒ>(_4イp|y| Qイ:トrエ&T2卻5跟ヘrW7idSfL$ソョ3筐7Hz"ン;6X詑!ebksニ]ソsjFU[h霹c@^g7#.ヘョツ $-FG困μD去倆ェロV愕p旛整ェMG籃ノ3メメア瓷ォ?qノフ';Z藉?ノ果MD*ゥE =フ;薄KUゥfMMmsck鯨レョル.K_迅トnwF櫁蘯箇y萠コサD楔^ゐ駟Vq%戌ミ mヘケ封53gマ゙キ|ハimEnm淞視y@US゙メ「e錫猩泉xliI蜴+アbT$Nnーセ%レDレ_ ・zxモc(掉s洋ン<キョ烹ス 3fロEEEミカセ*"}U闊セv桟satS俥%・<(:瀚ツホ/7メノウ2宇F凖ヤbモミ?チノエ'=P~4 ゚> QC渟◆H錮 9エ ソR侯巍"G3菎庶ゥ、a藪K繭*B テn`f癌й≧刑r゙bソモis;8'猗vミ5畳Zアw[mlm敢テx+ー宛#u^v"瓏ヤ゙U兩{モKヌкtx3;b、ヤ .;2ツ]遒. +ェ扱2 縞}Ur j?(ミ3膝)由Kミ甑砦ー軅$xマシVAソロ}ン慮F2瀾Q倩褻ハ$罵ZJ摧+フネI=Gu笏S迩i粂ィ# ラ~姥)g.O怫ハネKナ0「Cョ= Mフfル1M1%_湧ヒwソYミ,bゥd5#収^。マケ4ト譴猩4粮%N-屮ュネニJ,勁L`ア檸廿]シ巒ヒ8Mモ_T;"n$掛4rンI;* コy$ケ5モ(G ア萢ハYョマ筱<ムha>rR辷|M」"1Fッ7-ヘ冑'」到」#新%・-セ,茎伏、nu2e意奧セ浤ネ}ムオシ5搗xヘロ侍Bノ猟{ァdムN ミ8 n%鉱トサ[j!佞BWヲマcUッ垳ワg,「ユ慴ェo咾ネネt8Fー9-?PfE」)ャミ-荀キ領2Aェ7Db箆アFb9jハ・a;シK2+ハィ3廨嫖贔5Bu. ラモノ #テ[カ赧uhィ+7砦[竰)アェ枇メヒ椿ニ:繆フ3#ワy^ヲサテ功 H゙青「ョTvc>9鬲ハヘgbェ:レ、dX睛Dンノ鞅ケA# X'bX*?纓Evヘ薦尭0Q&ツッB、ー拾クJ9qIWkネヌ竸'>i堀Rw[レ]9?奩ツD7ヌ"課S^ツ靠ョ゚;ヤXソ0レR轜n キ」ハ =]m;セ=#(+袱W3鬢捜トヒ(j,ィェwソk>マ3逕hk;局j[壹、シ})髑M,Dェ謡疾"ル鏡ホn1Nv-槍囮7?qHムシヒッウ.コGムム夬Wfノヒ∵篝ッuト迎ヒTラ|・ル祀b互メ>アDV斜カ葆9倡{ホニKワ,ホョw'「浬eル Rケ\豆ノMN娉処ケef3モ、竭リ叛$@ク匁mレ>シ5Dホpニ&{拐m1唏Yム嵯カPkIァェツゥ?岸qトXォ"Рt}タA0)i陜。C」ヤ>Lウ庭ト」fI[エク肝%ェnイヤU]レア j_ヤヌネユマ。UGWメミ4コ熄ソ父晝X站! ゥO`UHリハ」7ホツfノ%保mN]eコ~ロ貸ィ2d屏ム恚#碓}ソァォ釘覆3vム&ラD馼糺Y=ァ テMメ@H4ヤ{Hユァ瑚滌哩{k鵬ヲ %神幃メ!,gdン30 'オキィghG゚瓸~イPメアホsr鶴レqン馳?コkW<、ラ燎ヨ^*yケヒアF葉n錠^僚}:ヒ|Lマ{p'モ:ァユ| if uH頁R チMイコVy卮**lヲO2ヨマョOLlaヲO單j&夐、蕊& Mォ鍵マ2{尽]ンJnoオkobセッミ!ノヲマ9sB覚ェ2ラノ、セzzcゥェK好フ'オy}0#ロD_Oi^Fア羌キf渙!イム \?&9ヘ9クシ%<ラ哉d畸絏荵ヲタ|m'・ヒイ=クn燵渡ゥ:8~鬥ゥ{T彦@Clfリ肴VタHmm&」九啼セ心冷肥xtA「鰥n樺ム旱輦:}コ&=黠61ゥ仆|5ザオ'ル1ヤョ鰻f痕G鵺P$隕既Uウォノ->ソ贅ヘユヤ&紘ル荐ュ+剿勲 !゚ヤ。#q?苡|I.:3 Q8甥c@,yhラDセPケ アアキ1ニ1F゚戝;A溽1ラ'ス遘シモ゙ャW9yシ2z昌セ=ソ臂_ョッxラWスk゙uw}モサセ蘊゚カ゚1ラwス銷ヘ。チcィ犒タ淹 ゙[ァフワワaョYカ匯~蘊nッッルkヌOツヲ椚Y艮\|留 カソオ埼ws!ウヌkuニ駈ヘフ「トヲDP"暁fヲ鎭SナーターXヨf慢コuィR7|煮=6ウ憬"キソヌt&X ーャ;トリヨ麗トイl(ヘFイ=ル8Vノヲースル^ャ模bu8[?リ?z8伺e,キxYメ^ロY棹サ)ルO湊アkト悄偵錠4セ^リヲW;ヒ_省秕nvヨ'ewF沾mF(-]{9ル, :ト枌O胃ス禎.リホ rイ62オヨ稾m゙カウ)d1飩lク9Hヨ岶ニ皹bルォzLh/ルニ ワト#nニ娥l゙L癢kー8モ二4[リTX?ヨ7Ljタ浜a:ツ:モY/v"キK~キキ{ー趁ヨH5愨 ngチ穽6、R鈿lX;ロ s゚゙a#潭q范アv9N8ムW1Noムz井縣#6a?ャV4ォtQG7CKS}gzヨン砺2ラヘlサ倣フngだ9襄Uf ヲ'ツクt.飃-俥俺ケ ` 8+ ョハ7チ 64C誠卞t+6|!dY仁C韓Bx`<ー7 Sq傘ヤzカ+ U) ユィワムlLv66wgシ!ヲ゚&6ョ晨ケ1igサカウ サ}?ノ4{ケ閥gMキメv'[ンp4ル4胃泉%2ホ+ヤ4ノ$撓ホミ l&6} 妛、T.タ\通]ンQ皿%踞コ{Wtサx Gフ「py]減QiチWdsKネ゙ノ&ョgZエ。荀トU\>テ大6G、j&'゙テハウIユ%eツ jタzカ[I舸6u音匱Oウトj <9♭枸l| チm3゙エ麸S゙ケM フE];ロォL膩秘柩タFモカイハ(xォワトヲキウ「nヨク夐ワスヒtセ,4nアー@膃RVmエ0yRコ啄ス*ム>从 MG2iR n$Sニャ颶R 7cッ即ョc捉ォ ン8コ仇塾Iセvョシn゙ Nヒチリオォヌ゚テ金nオ瑜ナwZy時ヨJォ゙ン9ル,ヌササ'ァ孩w{r^ニニ| コ4dP。、ツ惧;ルフ惧e?ハVイ・Lマ49昔9:Pm|lレxb-lヨzカシリワ<%耐[ヒ_ c都h ユアu煤:4キ1u6ガ_ヌ% ;ム省ネ?、W' アW瞿QレX*Z+ケ劼、\脹疔vAN0_]ナ遶彡ョ4コ1'_化リl尓レリD_s&ヨチyリマ^」23Qセ!w^道 ゥt|M'@レj!ユ $+!惱=)シ<]ヨ,FtッbK併Bラc戎マ抽@W&GEヘケヘォロ~M棡。レ腑オ_棹カxユカyユFuョ薹クfタ6トォTレy念MW昭-。^y5コ厄g模~EュテQイ|ホルHnア*ョリbョル<タN贓tーウy7カ∫ーyOW寨カ>l+/`Ou>k>倚トロPW>弦磬n>めヌGxGy o簪ムy>ソトw豈qMセ ハ?ヌ|7淨ソ蜩| 於[淡X k゚ヌ:k斟ウ.糒ャkセミz/カ~。゚イサ%vO^g竍ルCx」スoイ+磋<゙bマ[オ|}?メセeo腮ロm8{3?゙゙N脯$ム供,&モナ~ヲ价マ砠営:q-_/樊ナソユ2歙+ x巖浩4;aH0エq、6ouィレルチ3Kbpカ75=タ拔コ怨\拒tォ孟|ユ&YーKムヨКムァゥァメサリR溶aヒーvfヨカキ」袙ムャRッ」則^G祭」bM徇タ`ケ!BIヤmJ6日/A>vcqッ`W」マレ潸隴0~ヒ豺ウ^6 逶リ^| ロ゚ノfサリ◯^カ゚ヌVイ5!d嚊疎c'進枌W釖ユYv 爵ツ_ ヌ%5}"$゚減エl`Yxアヨウ!wーXB6箘tカ谿H診eノ&カj=x=Gカウユmシ付ミ&vヤz妨惱シS};A;ヒmミホ偲ェヌ# U絅慍%トiヨコ致"ン沐靹レ' O!ヌ楴栲慟ァaスDモSmナ溂?軟マJUM籟攀}マ \頤:ォ#pケ軾毬Af媒 /、]}%ー$襌ュタ熨WY)削ツ_G6y僅與ウケv湫リ イヌ霊*5サ鈍フソaキoル暼;*措(5ツ/|?+_フ!;_ホキモ0j<゚窩%ユィnア゚d +u<フQネチ>ェKVコィ3+邂fv%c」Kf睿・}馭v卯ヒP|/キリヤ4!ヒ],クb サメ%Fq)Vh[=Xゥ陛ニY=ルnV.嬪蟲rォ7ロロテ觜リ!ヨVo dヘヨ`カツハV[テフ絢'Cロx」コムU扁韮チト澤1「Pbタ番 8rナソユ藥 l゚t^絛<舸vュO夊ム'Mmリl鉄Lコ樸タェラ燕^$@ォ&;峇ユF(ホズフメ&vy77pS;サルdン"カー[ロルmナケキc4メゥヤrツLf:レ;fフコ[辮8聘kW恟ンリBkwカニ堝執&ウ ヨzkOvォオロjUーgャi%tウ?エfーマュ*kCヤ妾モ煎葵ソヲuャ妾%Zmb寤 巛vコ#o飃;キa┯菓メH}IzgスSヤケK%ネ==#、モン~瓶チ!d]レ舎スラヒhOdワ淀6ソ#Mクワ?)シ嫖ヘc便|Vcマ址ャ・剃4>孅檪ホ 8猖Y>マァA篭戔BコノH霹20メ斐3ケ并メ昨^゙タJ蒲wОッRホクv セ丐鑛Ocスン=9広aY>4 Q 餮\、益==7ヒ8ュ授ラ絶m ゙!ラARJlRP\セ岑+dWミ:キ)Y泓檢~Fkコホ吹AェZ4#rナヨQィ、拶#r,代リTxvu"技'ウモュSリケヨゥィャツnキホb[ャウQY敝カYイァャu8%ア7ュ緊%ンコ枝oンタYキヨュ|е;歔mュサTn>モコ/ウ輌k+?メz神o=ツoキカ小'/ヨS牟楔イュgャ。ヨウV。5レzム0{/O\テー插。so~ゥン?k/ルAw サソオ溏膳ア=ニm招vオwカーヌ[sンュ褻$ォナ枌拯iュウヒュサ kォ=ヘzリ゙ロzニョイ楾躪リsュ゚6キレルvO{アンロ>ネ{oマ5~オ ケ[立 ミ伐輌'C獵ホサer栞キ8OHシ弑゙#ヘYナ綯&e8 gイg哦4啼」n アQv嫖Yネn`v#[n7アfv岸ツホー[ル:{サフ^eロヌ蝙jE⊃蘿D ツン浜窕 ヌ{・t8fモ蔭&」Y栲 嬋淆ヲレ'アdアO1ネヘ !{qラ3那exア柆s9ソキ1hdm?ュヌfesp`g牀慄vウマb褻9lコ}孃渮雫/bヌ`ァk緊ゥe=倆 2A」;U! ";コ晏]ノ開ー`LィM|l(ロXnbソsリトヌ負カ_ッオxロ張爺nVhフy覃f_ヘ 66ホセf゚トZ巍咢ュBvv}サロ゙トエdOリwア霊{リ/}ワイ蹐yッカキヌチS0i゙l?テWリマユsXy~譟mソフ7リッサ7ロ}テ{£ン:メレ」(タzリイ6ネ ウ?澹-P?? ン3岩ャ=]ワ%E円.Z屠a゚ 滉sjdソ3)スVゥ&エ・リ!FB@l甦:hセョ頁TヨIヨン嚶キソE-ォタ著#[c サト江eホEO!リヌリラ"タセルィ<ナ1炎.ソ。k-95ヶフ縵、Cセァ 蓼椌9^G―オラ槨cOヌツ8授Vqhシツトq|?弱部東$=ムニPoキ~z8剌sア4mィAPァサHOrシ2_Q ァモケ<>8スN鐡ス;T」`OFeyシ.」メ「イモh誘莢Q「'州3c=1 ルm` 拡E'*アクツCルアb;E形ト8カAツョサイGナD(c/緩CアD筏/ナ^陷V゙b:*ェx。リマウBQヘW9Xア?Iフ蝸楽jア?ソ^,dだア 黨h活ァネS=゚エ;ホ>鳶 ュYハ ツse:メ波悠゚"怕}キyf武Uj+涎5ー" ャHォカ_ケWコ標ッg徽獨8ニ擒3ホ殞ト}N{ヲ・6カW遯括伏5レョイ幎匏メv>]ウカッwセ$ンニ-|A@「EセAHiオlQテ、8 KリPQ鬼\マKル>0カ盜ホ考沿イe「5凝YォhaヌVv湛ノホGイトj懋」Pヘ>ヌ籌楝8冏欣xセ8暝眦トY8ナgム秉>^怫S9X=ミン]9笳ケvl2Q彈A鱒Dゥ !?ア$_カh"ミ(薦゙哘1UYlw蝕&ルチル:ト<ユ$ 塢-モ6Y(wchk-Pミホ流g7%a9チ|ク兜w.g・ヘヌbレ「eエハt屎カKiv!ア娵qソb∧サx旌峯ヘ凭5^Lキ ヨIョA總 R M}g"-M\覲僅ネ 6担栖k%o}U拡a}身ョcqツ婢q !nc・箟lゥク5顎 G5;W<トレトテ9t^~哨$!樊・セウク暦&^炮y・xッ7。クソシ1雅`゚シ碚sク,aチ疝EJz~Q:?4ミクヲウツa<ネ塗經ー誄ンルヨkトィ?`9#ヨO|ツや3T_T|タV怯櫓uナ^犖アq=宦&4 x荻g驩b<5bgRォ4テカj゙諂栽揀.ヒ?(S Nノoh゚~g篤vV液ワWレlョl禅H趁ノ、jフンй83サヤヌ碾kA.?゙=wJ18sハKvgZ`捨aCd..{! リヨ$廝tテ」%、#%ィ#g貧箜mシ逑血_OwャニiVキヨ ャ?]Wャ逧ョユリvコョワタ~ハ ト|ナ&セj=[9ヨ僭O~礙リl=ロァSクRkトャ决褸NヒJ膤nナTィ6.耽vタヘJj俄竺テ?錆U鑪ン゚`ヤニ?妲;z=奸$:コウ黒麓エ")ヌHT ヌxqm4>75KX_戛8qア<96J仕ウ(カJ使7ノ]Yサ愨&'アヌYYホ^養q&ァ,9摎ヒj゙*銜c臀~ョ\タラヒンr@贔dンネ痩=8ルCsuBh'クnfサモy烝4n:乙{;_サ-ァエI爪QNj'ァtツ)ョJOB=ユ晄^Nw/q/gク3ンヒY褌膀rネュマ1ォ゙n鮖リシmャヲX ユラ゙<]ミホ/莎ワuロ洌C毆7ュ鞠墓系鉈ュヒYNゥ{Kg广摯ニ^XAzヨワ炯}アOヌyIq/5コRsY鰺4クスSエサワ8出iヨ+|Wコ橸U齡沛=準\導。_3ョマ型_オミロワヒu褝Lサiククンー#L」貸リIFejxモヌネpウo キd"C1ワ廷ア[カ4璋テケ#N&$wソrFヌ故ノ」ヲQ゙ノ」2イV60z9テ倨}ッ:y李,V& 膩セyヌ |s價aミyィ[LwzOw・ッノメix 「\嘯uwムワモ_{ロ}襦1"e*ンムX&ミa_ュ゚:3゙瑾ェヘ~タロHヨュ3隻4;/。ョロ'ム期Cクy疱ュエ(ロオ勲レ譫コ#I!]アォゥ(?V?イシ」齒サワモAE?畤,詆O巽マヨャcヅ 匂l徑`サノテリdルネハe廼ノテルcqgォe+;^ョ`ァハ菩lケ姦敵ーヒ苟ケ壥*州Cケン+叔ノcムXヌ^婆ル;d<}!Oe゚ノソー゚\ヒ3y/y/堤ア|セ崗O*y1?^^ハ"/耘ヒ獄妨X=蝠Vy5DカY%:k愍゙(oー塙X3莚ヨ~6k。シン:Xnイヨノヘヨr丘」シモH゙mw憐リ}蠖 y歉(キレ聿D誠ァ|リ!ウォ裙 、]'浤/苫レラノ銕蔘[「怖ユ^セf&_Bセ!イ;"Wセ+ネトPセ(1c1A~"ヲネマEH~!面/ナ碪kqエァ8Y~#ホ量oナF攤M~/?袷ノナ77屈]reノleヒ1Jネ雨ノJ・袱 臀睡ィ,Yッイェ<_u覧ゥ/ッW}蘿U T艫ェソ|F 俳ゥAハRAP;ゥ<5\ャ ユ。ェXユォユィJユ 5FュQcユj邏蠹BQ蕀コ シU鴬SウPヘY侈ュ憤sJエO>緤HソRJ靴Z輅ヤC、皐xRHRェハハ茯駛#_マnロ&ハタst|;チwソ征cx(栃}ヨ+zアm5ウ&矇~双?3樵艘割ヒL!h#モ_夫o+樵)ケg;7iロセ 蓖Gwn熄8磧カ。wBレc^嬉゚Hンセチnソ>ユ-OW6-聨?8ァ&0。vcルjwヨ]奮ijロGMfユTカXUーZ5-W噴Uヘ`ヌゥスル駛_v棯f劉ル65mVアヌユ鎧ネケ:ヲjt糒ユ~ェ緘糒jソWニキゥ襞)ユhシ`閨z゚F霑た`タ{スソt゚q/コ憐/エョポ゚レセj{H)」中称4セOモ 滕ツ=ミ・磊從贇傲>ヲМ量ーニhネ際Qソー゚Rト^J;x?屡ヌ;D纉オ~?ンaソo゚ZセC4nミ霙v.2曲◎Da、=ルE+畧ガ-ャ迢オ簪nWモ^フ;tロセJコm =スcッマ:.-/BG]T 、bl患qj屬VアスヤjTkリ~鷙Tヌア」ヤu;M敲ホQ'ウuv:昊、ホ`キィ3ル晝,:=ゥホe/ォリ黥セ叔`ョ.訥ヤ・シL]ニァィヒム$l蕘ゥ+ミ$\ノ酋W惇5u-ソHンナ/WkユスfYm裃ゥュ<振オォヌャ碎 k奔メjROYヤモヨFシuュzチコIスlス、^アュ^オsユkv_=Hスm、゙オgェFセス^}`_ョ>アッQ湃7ェマユキ鵺T_堯「T}#ニゥoナD旙S}/fィ淨\ォ8X&Uソオ]ト5Gi.Nミ椋S 7ュナテ:K|サノ#髷rwンKNユr夭-ム}艚ンO.ヨ蝪z@嗤52アl_庶ヒ]ヒンス#ヘ ~拉 柞2(釆キ儉h04>IhシフZ%キ}モホ塲:QウPiカ4 ォYワメY褪* 嵎イAzヲイqz8屬GースtロG硫hヨェヌー」Xvむ劑ヲヌアs."=枉ョ'ーk&=揃ノlォ゙=、dOr圉ニ゙モ菩 =gpGW<ス/ァg!z洒絏=。銛~ェ^ネマヨuzソLトッヨ[u衣ュ歴t-゚ェ:フ_ヨmス、雄0+ィ#V。nカヲラ+ャ#JX}е>メ:[ッアヨ鬟ュK1>aPzロウ9。曜メコリw{ヲ#k゚ヘSナ!フ鳰|Uz噴&_採1トソケヘャァ~Oッ@$!儡「・jエ{タ!x[セク大圜&Bnc# 4ヨ* 6賤 エR鐸n\ヨ 輜-・Oニy;附ラァアン鬯LMモg珱數跌ウル"}.;F渮NヤーモLス持ッ7ーubカnヤ粒k褌FM'ゥJナ(:瘻nT{ェmワテ:p ケLuMョ_:搭'h゚゙ッセチキ+、 ォヨウラiヌ、$}キハ%w゚竟U蝣 /`ミメ晒チ枠%;ィ=ャg;uリ&ン、゙Yレイャ6yッf%ンoZ,テ槐ナ`oキ,on'X ヤm%邦_靼ネ*アロ-Q誡岔ン+マR^ミi アンメゥ[タ@t寵カアモu6チレルj|@澪リ=!`Oネハマコ (ノマ「セイ イエイラ3オ6タラf!ス(゙d9W!ナb琴ュ, y0快ヤ$[ンロ3セ乏セyV畳p#ル4'Aィ<ォgイ・=!;?サd燈kミ;ッ$?嶢s DoSmネg燈'ニ埠+鑁ィロkるヨホ^ロ`#・=JaM>カン]Pーホハ歓v};-8D&トク C8ス6ゥイ u観仲"ノ%V%モ8.カN東A>ノー霏!.q愴樅;堰ロ'穂;4n]キ@拇|ヲンフ,キイヌcォ4誓t.n得 弊 券ョ誓 L得ァャkレyqタj1E,SK侈ナル[・t#;キdzw^ケ井9j"i凜ラクィ-T勇ワoス+-エラXmbRKGモーwチd!" イiイョp!トzチコ5蒭コ4 q!'蝋o"ニ ]pヘ(ヘk8ン觚*ツk懽譏コJ9ニ"ッwyョ訥タ1ケnYクVQ純∇n/「ェ忻\ュ孩,号qpqPユ$冨ナ:ンチYタP5ラナSQ腟辞篥肱W厶ホeN&nーァ>'ウR^y2_O腓 。=Sヘ、^gクnlイ|?_ロト_!\マ?S)セユ!ノシリbkムマ!岶PBヨ。罵By99イ m'_苒\ fヒe`怖l賓mrU8*ラキ袍―ホヒ ケ「rCdネM-7Cir TNn穎納」カrヤMノンムXケ'zXfハ}ミvケ?レ-@蛛陂<ス+゚歳ハテア&゚kノcq#y:1Vヘ眄タy6瘋ホ/墓匣Q^ クラC&ワ8 Vo09oF4^墅iニMモンTpハ9/nFロェ?オツヘタ賈LjU;z ケムazク.45h$ヨp屈t蒹0T>ノoチ=xB>K萼P 氈オYリ"サ苡皺 」ン 檣)彳摎/2ヨ犀ナウ]ム鄙-穡説4'_b$%レW$セf$wF゚0頂ツH;F`$=t^O@iV4b紿マ。ィklヤ[コrDキ ク##a市リ4エb n)慷61D コ(Lラ"F凪bF"モ弟筝ニh2レ」盧スdワ>Fカム:官聘A秉セ桙UtC゚>鞆Pソj{柞ル禺/ クS8~ァ゙キサ~dw5xォロn8(サ#ヌh捫ワユ鱶t+ッpxy?ルテ5s[、ワ3レLワ+ルqテ"Cヘマシ" ネ σ'ヨョエ-ッ>b|G 蹤Jヘヒ<ユ錬Xチョcラルサウィtャスノ}ネンrZxjGヌ株0unノ畆ョスm- 箝Pキ<凌w゚米p&` b(キ>L鬢:・シqG-ヤラセ霏gA具セj~\]モUaM゚。0?p」?pGトォg5" Ln5ヘSメモニクァ,ヌョ淬3セ7メ~菷Hw'倔}ク_-|%:|'ミ(_ワ.チ3ャ"AEEZ*&4Ulィ、タ ・フQョ〒白ーD)マ+eaオR6*裔蚫クィT/;・2ェTATゥ概%UUBィ秩)(Gゥz)ユム ・ョヤF(uミT・.Z。d。-J6レ・4D彦險メ抖圍ウJsケメロJワ@i;(qG・ュ葢~Jッワ')暃cJ挽)=6・゙ュナ;J?|B衣U盖ハ`|Nケ。ワョ チ?+CI)蚣r2隙倍H 蘓ROI*捺ハhメFCnV ]奮、ァ 飭W ゚ェ─ユ△UN1ィ+ィCナF麪ア:Blォ゙#讖云ヤQb/uエ8]#ホV鬱ヌ丸ヤq筬黜ー:Q<ュN?U'祇ゥSナ+4'uzヤG「:#`ゥウuヤ9ワ@u^2觝ンIラケVwbヨse欖サ"コロェ_ユッYロKー、概1 棔Z蝿p讚ヨヤ)我レロ:7LェwDL;ヒoレ?JH|エ穂?クs徃KQ氛イ鶸PQ}jゥミH] ヘヤ・0H]3ユ0W}朷气・ xC}Nェ/チ;*H}.ェォ盍コセWラ「Tu*ュ「伎z醗n@mヤ昏FキゥEhコンォnEsヤWミJu;Z・AユスィHンィミA0:ヲセ>Pゥzgェヌp斯7SO燬ユキpg$ヲセ具SOユ偬qッV?ニシYoW/篆5>ヲ~ナエ星キ麁ヲ|テエ+L 防$C花V&ヘユ_ネ`W2F広T'ウ5 Ojlユ09ャヲFノ咫ゥ&メ哭ヒk ュ、ゥエ圜ムlMァヘ4ΩユLレG メAZ 」腹エTエv ]ェ弗/ji迅=ゥ払gオr7杰ヨ*ミh/レ ぐUェh7 ヘオt。nヨ2ホZu。ァVS隗ユラj エ:ツbュョーGヒi┘Zカ茄HLヒセヨ ゚iM─オヲ「オォjュナZアョヨ6ホヲMe/&「r囈pJ。uYヒu゙ゥスs」・]M等0ョ咥RGPエ[恬イス慮{'A彰マpε'袒ウ#ァルセ#ロクcvt咒ハ{矛ル4B謗ムピJ若、'ヨ?ユ-!N誇Tュ+hZwーオロ「ヨ nムタセ?舒LDSRU D$6。絨ンセо6或ム0 鴇q'{.ュF詰H、ヌェ:6%レPァZ燦m・7zB{"ラメ;XKg變/\サクZイ マト鋏H神ラサfェr"ェワ、 =圏$トョム1CPiレHク^Uエ1ミR エqミY=オ ミ_嵎 n5メ酢フm#カユ\ケP臭市 垂!vv闍カw}\$~シ4>?」BvwY ^F9、ヤウ(,pテ:ァヲ桝"X良3kコチ2'7q斉慫ッI@エノk宗 m ヤムヲチMレLFd就7m6ワ・ヘ≪レ|ッ-レBxNヒユレbリ、-レウェカNi+#マ&Q X釿sソ外ッ2|燹sュテセXuc蕊"gメKャ」ォXG_f]ヘ:コ勃ttメ6テ8-フ+M#Bレツe" I」メチラ.彜x~&/ル薪nツ+RJg%レ須Η磋x=tソ艝゙ァfシ+ハ/&,^」)メシ蛎Rエッソフ@テォ&gラnニ+xEvo攴eー4テノヘ薦Qカ9Lチ饗型ャg"□7jォ5」ハツ&*6ヌ-"タb|,フ・ `;&lチテク(Bョ漢%レレヨpトソi11アラ彜ヌ季7ォ d・>bィoE魚0ーキサリgァウ+Gョ+Kv(アJFェニ{クモ}ト0 3ユkッ纔r<ンe矩n隻(テQ;」ア簔Xロcキ_、[$リソタ{v$フ?&4畫4!ゴ;ソゥXット=癘フ替゙Cレムリン7}襟>+梭゚Oニ覚'髦/耽 ヲ<ンA M秣1sモ゚{'始゚閤$b ェソ#0>オ颱vエLイユD懷ノャ~"タ跂P5y6q :゚繽竹ェ'{緜彦$~g9ャク||0dヲBnN&FA$+ルマン>Lレ鉱ノ没_@コ播iラe槻~蛔オ城゚ハ?i襃億、瀰クャ*OPDPg` 塘サkサ>ケゥGッB]m/ヤモA6サラD;-エCミN;フトkーT;ヒエ」ー^;ニトqリュ捩レ[コv゙メ゙^エ痰v ョhタレY$h「レGィョ1jャ}zh躅<コKサム>E+エマP。vi泪レ%tD]ム.cェkレ7ク授-nヲ}k?珮レ駿尾 棧ム?ョ君J]タuocラ{tヨ5|LラI)=H6i。ァ生z メ_ソ翻ッW「ラ髟iス*ュ」ァモz レGッE魴驀z:Fソ丑ヤモゥzC:GoD渣s鑁ス1ン・7。ッM1ス}WoN?ヨ[ミ吸KzYoEメ[ Do'(z{。゙A(・ <。ゥ~ウp゙QクSソE。wFキ .ツ4スォ0K&フラo=ez/%スキPィ゚.驀=z_5}>PV$ヲ%}ィh鑿琴ab>\ャラ顆既箋(e}エク^+nモヌ閲磚}モb墫譫瀏L5ラYU3ョqgЩ&u 遺ム GテB チ;Ka笠3ォ垓ウ.カォ$":ア5>「ホナョHJリn猗軍セク(Iヤl6$ナuテ種ノ猛、MX跟クョc-゚ア゚DJa<タ407q下扣ョwョヒ]sル|畧T亂遲メw9$9詈LS餅幌イB?Ц叮ァニ。.(モュn列:ッq靭m1:Zト2對LPjHZ。オ,藕ケn+)Eヘt゙1マュ凵ラメv's#e|C舗変゚h・トJ%邊WOホヘハ9エ&Aゥ_h瀧扞ョ、 檄兀キトレョ陋怖|ェ(ゥワ勒イHァj?ユJ8腥S$=董、貝ニラウa6>j鑚。>コ頤。ォメB?} ム氈{e0F_sーP夜Pソ オ∪>ム7#「oAェセ篇_A(、A5}ィ7ヤL?鎧鯢P;8コU?】'Q/mtァ.嗇ソ跌」gS9 *ヤマ「ュh:ュ_@髻。O/ミ゚K鑒2ニ7Xメッ`[輪+゚絖ワ_ Oラナ/ 3D|メPG/:セbリ 6(F*im\C0ハ藤ニd撚コニ'ミミ8キ遑ァq淌@ f|_テL椀|k`二31~&F` H2E杷P9SE筆e&ハ2-把QモFキ乘ィ孳-麥ヲ。チfi4フ,亳贋CE^棗ヨ1ャ{i縹(0ハZn41コ~IヲSサcネsツヘ縄Q茹サ#no階>矯ヨ逾kヌzLホ艤 広'キt次Hヘp,ゥ蜉ャノ^~ 粭' テ衒d:N<チ龝/ヒサァ#s錏ミ=ソ呷$.0+B)ウ2ワ`V ウ*dミネャyfu鑞ヨ/f]ffチ}f6L6タ 3譎Ma・ル6-甃ウ \1ロテッf.ェn゙齲スQcvヤハ 噺ヲmシAwa_ w・#X羯ネュT 妲5ミニL +@イツ檜e4Aネ7;トトeスpトMィJ8映人 廡ホヒッモヌ&JB唸ウ?H(aネ1A誄話死ル=ーユシ#6レFスヨym0計"「セ@ヤOo$ヘ{癖 縛= 」磴2,.$cャ捧騾9uィl糸8ィo紫ロヘ 0ミ|Faエセm袍懌柔O/.8ノ_'ー,ス、。ウ:htオro禾濺 r剛希"r「朽ク舂.xセ舂qヒセ_オH!M奧5ウfUVル$4ホ輅D$Xツnトサ1YEZ杰TSoGTg」$エモq繃#モ弩聢cIラHk_~$エ#。}$ヤ5メヨラネh#「[ゥ「}c8ニ甘 鰲pnエケリ警ェイ1 2ヘ) N誥xタ|1ソ勾ヘ'フルー゙ロフー゚|.娑。剌j刹Q}s)電ヒム tァケ0檬cヘミDウ=jセf岫ミ:eエユ\材7ラ「s:ケケ}knトラ啾\マワ郭フク了O0w瓱フンxケマ5痞s?^iツォフ#飲:モ:Dリ jト譽ネ`(ノ+nゥ贓ト蔗、#ホAフャ2^la哥B(メク*ウttレ21:戍4ハiヤゥテ閑シ凡ア゙@_誥:タwェリoヤ莽 &Cキ端yr5実ヘrRフ向sニユ 3゚Υ踴Pナ<uヘウミリレウa;ヌdヨ6d淞|3Xf^5%リn~ ッイzフッ皖 シg^ヘosヒ・y^Tナ ニワD附ォノソ$啻ルyU ナ冽Qナd襾 「.7椌#鴈t G圓ニ。、kワ4阮s}ウヒ}ルマh酥ンケエテ&巡PミOBリbメ・薦p 苟/*qハQ序ネス-ニリニ7z\ス e"イ?ツ{fqvメヒ麹K8&B嚼&n&= sl糜}!メx僭EA|M_モNr/zz'」羣 Nラbヨg公'ffW梗r[3nA/C/?@EG6_~z豈ミミh!藁aX`QネキXiIーレメ`ΗテVヒン - ゙ーR-ォj・"j] Z破疋螫RィイUeZQ=ォjjンZY媛Gォ 麥UEャt4ンハ@sャLカj抃Mエラェ歳YuムWヨM8ナj依Z膏cォ"ni5ヌャVクォユカレ眞V{<ワ黴GYケxV3柩uトウャ[fォ+゙nuテ{ャxソユ#ホRfレムモアy)2sン獰ムヘ'=コオYロ綏。ュ舸 画N ソZYF@V?YwB{k-エ肉髱 n22-u革Bッ袰肩コ碚9>>スウッス'MT]ュ゙-壽+0wコ|イナゥYユサ9 D~キ uシカg高5ーュミレキXチ}wャ5 &Y統セワノ命nfu4"tホアモ<=キd"扈E>ydゝzネス}「樅W厥リクI゚マ/髻ー!リ゚タ│$調- v列蔀ケモサm'ョΔュ瀲 Eロシ+歹蝉dXa^・<篳 ュ04sネ$ヌ・ァ6>}ユォ鏖Qオ2 ?=eラ"ナCエfタヲゥ(ィヨlクニ7Xs。ェ5XOB+i6p aサオ珊K眈|jタ゚ュ鼠ス謙YPkス3惣1ツ7サィ僣燭vオ娃ノ%リッ0*ロuュLシvヲ]シ1Z穢PvャE蒡<哘ス0qン彭。・Wー%wェタc?e叫4Tオaホ4ネ「…bd: ン タハLOeg゙O┌B霏謌」ツ<*t槁咽iOマrWト亊P欠咀8Y珱.$O9ソEd。?2凩#ィvミ ヘLヘミノホだM0ネョ#lo7Ivフエs痃サフア崚Sv Xhキ|サ5lカロ@貸カレ-サシm゚フ鑁P!\'シ-ンツJーSタ3&ツ オAd8 g$ラ@゚塚訶hwルUnミネホ>88知L| ,ォ!4 9サ加3TJ印7エヌeァクコ+mtヨラャ煩{ナ"イx#Z秧<サ茎_ョ僕1韮昧ノ2&Tンメ\6刄ロネウ=リョPD枋ヘウ'+:ンワ担ァ}Fフ徹V2敖フ膵ノc%/テレ\m}蠢ケ%ャヘ逹ヨ+サ%ァュァ;*8レマ雪砒ヘ`オ 戈:コ%鄂,杣;ェコひM祚^tgニKQ9&V癪リ;シY>ルPj「oao3nj「厮ヒ'ウォrンQモ<ユ茆y? 叱葢ト祉ャ恩厠凍?A%畊y2>ンnセ咀cレヘァ,・ァ訊孱 稗rM罠0-タ]C撮95オ=l迎*l籬2僅セFツ疉@.R hヒソムヨDh;゚67 コb/oノユ\純u[VヨルャウQx゙G鐵>2}_-曰X?yi_ッワン゚N_yW1ー襪:/mΜ=,19f簡0ルモヘyマ・:6コナkAdナ*"1#禍qワ^針S`ィj゚オ!ミヤセレロテ=リチp~m盾qhxヤ qLD欠・Xe?k解0|lO TクlO鴪(廷Hウg「= 5アG-y(マ桴コレOロh$コラ^&リ巾4;ヘカ」ユ3hソス擴離gムキsリーW熬J\゙~Wイ_タ5\ヌ~ラキ_ツ9*ワマ^'リSv!~ニ^勝ウ7郊攷 カ7絳vセboチ?ル[ イ_!{1$ロ゙A壗;IO{冑!SWノ2{/ルj#'膃 リ骰!Zヘ>Loイ_」M#エ:hソA;ロ」=」vMレ゚>Fァロヌ-ー゚「'迩}=zホ~_Pロ>-閥マu巡ニ' ム;」.詩a \ヌフfリ`#剿B2N截1i仂Y/*ル2N∃`A&サイユタ]ワ迹/PKRV|PKma5$antlr/DefaultFileLineFormatter.classuQ[KA=」ォョ溶^[uスト圍ミK`メC H/Eイ(*ヒレ゚/ヤ毅Aィラ メ恙3sセホフヌ銛+ vC#@"2Rすe(ふ22dPuテ2ッVコb」gリh? サワセヤサヲPワi珠。\8カa 4ュ}ァ゚USキUW'的)Eg|rerVワXメウカフテ5ヘ9ノT^w旻Kヲ憩=m(頽tRコケa曜ヘI3餃*h 6j&0、サ、i,醴ロオョoァ(iオ 'Wェk,X朏。:ehsq#$3ヲュ稍ラO:ゥJョ棔ル ~z7ツvtヒ!XYjア烽`ミ憤Q2ヨ\%%-ヘ0・アCス#Pレv gュャ']チPUWzィ3S゙u鰡∃=kル1アYcGマ・ 2o・;ォャ%ンヤw kTFクき zセn1ェ@#ラpK 決タ\THテTミ_w@Ub9旱ル1, イ 嚏a?←B捨!'p 8&DH LHrWU<Tfレ4*7E漾レ? |/樞丗s|%5ェF濕|'0iWシ|O獅ホ2Vカ8[シ慳 +・fウOgA66・ロカキk=Gスッリ6'HA4XwPォヲ旻ノxノ+hマミ7H゙ヤュZ@ーtQ繁ヤPfウ筰-_,SF2ン*;+cユhル軻_pfヲ&ュノY0ニチハ執w;キm}RTヒミ諒・9xチU[ヤbテTiMルネ+ハUtwf9酌ヤ係KBユ"~hホ・澡4ナ]7ュAョヒケ."コ忰駻n ケF;n1泙h B ソ@テヨ!B凾ラ\R?|h」ョYZSメ(oツ捉R@モマ%鞠ラc^'|テオサYmァDkリVリン*レタ{V8竪s8嚊:7t畧f-[ミ}H「サ墾:ム僭Wクニeン碣ッ靆#@8N<ヒム3,BgY0 :5ハ謎ヒァ蹙&+疔掻 隧ト]_ヲd「ョIo・I&Jn履v―q!、I}」Pルj洞タ道J榾B x鵜 &Ys)!棊Kリ-zメ・h酖ホXgx jュム!゚」妖造=ロ1ト」n/サW~テルFlツ|懆КU>オリスチ゚]ホノ<{セHPラ渦H=a!Oq舳ィ驕1T」Iュヲiメ)陽」wキ癲*エ蹣gワUPKiZキ) PKma5#antlr/DefaultToolErrorHandler.class昂ySG ,フチpクイミ5X斉D」倏%YDヤ8サロタネ フホ"壙佚侏hb4&ムi.鰻R豈T3$"_ ノル+Pzコ攸~uマス_lタ 6「UD宸<エハxサ9iアGA!:$tq/']懍導$Tコ奐H8、` ヒ錵_滋滂袰q\チr弃"Nハミ猷>A C%NI疏芥*艱・色姙荒2ヲタA殻l歩JHK用Zチ゚淆3"ホ岳Iャ[ョイmSレ藪ャQタ担グ2ャ;サm'ゥサ.sл9ヒト[クヘー wメネ」Jェ{Z」.テbモノs1天、D:9レテワ楳ネ@キVoヨP}壌ヨPcッゥンム的ンi<レ1牘tル>フgエXMヲロ伯Ciテ=#`Wd椪?%.觜コk元fモ誌蕚ルムチオ油Zロ&隈5キ/「ルSラgクテノQモ` .vPws[;トルnツQルハエ駻ヤレヌv鏃ツdホ至-xYタャ墅ァ「b3杆6 X6ナ踝 gル8苡l゚ョ禧シニノ愍!笨7q^P+-o竄知フJヤルブョ=ツャ*゙テEォ優|ィ#|ャ|ハノe殯・Y'サモfa溘h/T|迂WT\ナW,[7l3Y炭ョ官qMト7*ョ繙オ&0篌カl+チ(ミ$藕*14cVリ!甬X@A于ニィmQ^=#XユM揖埋チp現驪模Oナ6d゚n笋奇8゚ォキ@タメyイzヤG ヘ西rテ1Gキ篥3vEワPc倥ユリ)w髴g詰.o~トォケ傭(擅レネツェ^ー範ェ ワnヒcvK%;93話蔗*ソ 筝iR m・ *愈fテ・.ミ8ε" ぐ5ニャ嬪3j9Bヒル、フス際稔Vェm4e傲タ」慄ャ!wリ孥(朶tセ^Iモ-}タ>モfWIyp*ゥbnッラciSwィ。ャ撒/L/ヘlョHzクカナテ]L>g瑯像ヒ2UzミHuア 4・]サ拑シVgo~#・$・クYm礫ハモh?ソY,R謀兎ナBソ;頗粲卷zフR蓚ハ!吏」ャテニ剴b^8\zソ$ WU蹟"2ハエ蜑4纓。E_-カ儂R゙(ウI,ホ+梦fヒqラyコw ツtラo、ヲ,スAぞ痺Q'゚l・ンレ掛Bムiムネ檳]「5wQpシs5 zbナD(E ハ画也ォ孥oワGaYbbリ,、PチM\メト煤"「5dPヤ L「U+ Iァ競SdPメ {K年「dハHィO3Xr E唸VヒE5賊ネ}キー>gTヤM!3 ネ徙ラT槃41ケ&詒ーシ$ワwl6、阨U┠aヨモ-"リ泳t"穫ヤ(ュトP$痒ミシLX^!├@vfテ zレ _ェ ヒ'1Tヒ3xbヌkCh rmH茱「ノSX E壺ゥZム*BUCアヲ_%\!iナ4WムJW8凝ヲェIG ョホr淮治ツタIワ肅S1;}外Cタノ_ー烈5宴ユ隋MU(Lc-扁&ラdー _OソHユウミuモ{コTXkー沃"Pa=`;qpヌ靱:N_gq醪G#魯?ncwh弱w愨8I嗹。ノマヲォァD」 xn閣レiD)V幾 5Mホコ/ホコッ<>_ XヌSマヌbテCvXOソァク榮タdzG造コqノ4, G@3蝪?PKァラnテY PKma5 antlr/DefineGrammarSymbols.classンZy|Tユ?躁22y!ノЗヘGdノあBリI$リ*N苧dd2f籔V\ェカhRAォクニ]6・ョqュK[遊ワェユコVアオ規釛賚ツ$テ鯢3゙{ホケ轜マケ迸{3゚;U「タ (思ぢQYp斬2緬ムヤsヌ&儉bRツ、祢途&ャtツiXW砌lィナ)<ヨ E8モ8墅d舜ツX慳8'奥タ譛櫓ェ8gq゚ILf;p&畚ヨ8。G)8k厠gコ$チЁン)ャ.凶dBワ7N1ゥ椁@0シ(黠[ w*-zラ_ 鈍%知們ゥレス!/ 6オン 「` メM錆mヨ[撞JセヲHヒc0vヒ朋r櫨>菎_妬mz;¬=]zk、カテ\tハ畩スK ーFFwAQM,;Rャシ膝Wセリ縊q^ヒ・゙ナqiz:pbマトT^J並 奪9 稘ハG94ukbj7陷8「=ヤ糖鰐ムH溟?ハu28G饐4[帋、├トイQNモt2ss|イシ^F5崋Vモiロ゙rフN。Ik#JクL.g゙n,ヌモェ@ォァ%糒{乾鷸>殤匿]ンセノシ&=,ス戮 ス>=*島&フ# *a・lZエヘ,エサR]N:タBy ,L碼43頂、V罧b从ナ星ヨq#ヤ7\涌ヌQ )8疏ォホpゥ,sケイヲ椣摩/Tナウェ0/Pァィ&TクvィーΘ1VQ韭 ヘクRォkx朗%R# 渉c*<ホ &}ミッツ^リァツnリ」ツNリEヌ楽ヲ2nヲi*>oェ>ョ8@< (リマ盟ッ笂矧O3*>均ゥ|゙2T喇2 ャE|Iナ゚ツc搖ソォ2>ッ+*ユb「2/緻G%;g^、ス暃/$V/hラ8$4Tu樶 7リタ7)セ・籟'゚T|署ツ?3yE(]側ムg,X5YラエウェェェホゥヲY>`Ma>ト&%徽Tウ=ロyホ巵Aレ綟$迚ツ*~ぁェセdyヲ9ョ傍ッ9B鷹゙zzツ8ナチ*ツ/7次地モォB」ヨaュモウAラH毀ケソオ晟'、冉h?ク;ヨ6ネチウ|ユトU:Sv)ネロェhエtx就 。?ル>ニーー.6E2ヘK咾tソ。}・o館Bク!LアL3sヲヨ エミカ靦x;=aュ'ム6メl膩x淕ヘb~ォ窈TUkF1ォydツメ<>ハm=啣択ニM譏D}醉受 マタウ*<ソQ 。b5QナUリネオ"Cdェツ.2 事ナ啣h]善Xkムオ酔c-ヤシmtイ奠サ筋}コケノ(ウエ{7ゥツチAR喘(オR叶Hwキャ)ヤfJツ瑚*n ・vメUtノ秬k'童訛lコ皮嚠エ鶤6.」ヘ姑;t?ァ4>緬[m7Un#」ニ菻 濤MN^[^ホ*N原|K(OH^xA*n瞰僻マ哥潦(Jfェx1%,kqTィヨ6ムルヘ:鏤4*xエレcエOユコ*叔H八ツノ:ェホo2モQ鰌7>SS5m;・6>茎e;干゙囿4` 4O6クゥ{IZMcs mjユ謙)UNM」Z4~_M釈~^ソJモ油t -、Z攫ヨh膏 鞦XK3uv ゙@$dMシQァ僂レR^?y6SァソUラZ;ケpi縄6v|:iル3セKgoィ脚 moレ-z"菖ァ)*%・エセΞ゚OZB走0!ヌユキeク7ワ#1uョ6」カメチ`マ8:ニ}ムrk ウESセ餤o( ,S溘議kア・ 3ツクxヤュ印'、8/船|ハDラ!03俵ァXキ!<a4o!#^Cエ゚Iッカ、cU閃ル 纂Ciオヒモc戸繧9囂ォ荒=8nォオV'メT妥徒劈ж囗底1ノ、シハ)゙C鰤_3J7,E8 OナO'ヲ浬息掛哢クク陌樛゙nYキ j0・[テL」セウ?エ。ンf5ィbljuテдz#J梹謬n4ヘ懈X'i, UiIユTャjZヨrt2Mvケ楽:襠ュ"ォナフV_]垪ア-Qュナフlセ。S[F 切-lB M<゙MTV'v/熹1ッ$1L剰j梃A~ u%ン。冖キAフ NxrヒpワKwク 2!~0wツ.ロタ=w/#Z瓣筑粟フ%セR疋>B,(<&7汾ソ3Iv$オカC&彌カーl?5サチヨアマフリァ>kl.e7dウフ鋪j 0ニ$キャシハ* ソャイ\lツモDgミZヲミlGA.ュb$L Pモ N 姫 Uー糸 +hd5Gメt#5ュスlミハvAチ#0|ヤ庖Cau&キwチQフス0Bミ2v「5フン牋カ寃{a$ツ ロ^韋QQ ^- f襯ネ"z<8ノィ<ィ&WヘCiワO.フXN"゚閃ウ6榛ルサーmQ「ェ平0Fタ686チャ0v惚 <コレ騅fp&ル2Dタ0 ョ*イn5,5ト<シ`レーQge0aワク+テlC2モリ.S!N Nァ9<4"シd黏ヒ0#]NヤFキヨn9嘯ュ韵メウュ#{x9ン、~「ッタォqlゥリヨ}唆横オRツ鯀ラ癶!蟠)8フ^0き|展エワ几`シlトv/ワrッマv+゙溘Cp握~+}メ;ムキモウ楢;鰮セ&ァl゚}7-Rテミ[ニゥヤkーユ$彡|ヘyBe&>弄_勸ケPウ柑ラ57ョs゙テP[マス)ャ1rヨ 呂ヤ\エレP ?wDmタIヲ 80ク゚dナ[az|椅QxZGRkュー;驂售ヨ;b6フ騨ゥO關K)゚ H。UロPaNカTリツ2cU$.ヲBョ`?, 7アtV<゚鍍ElX4ユ塁。92bmT 事オ宰cチ8rec:湧ワT-.*駅ムJモErーツオ*瑤ユ?シbm7'ェl(ニィツ\8`>コ h%#炙,N 孅4lニ1p>祉kーnテー浄モ[X`)蠅2ォ濕怺ケxコq:Nト羈OッqV紕恪軈-曩ュ_瀉ワK6ャヌク タ&タ抱*ョニq ~ACx塋トオ"=b カ崖lヌaサ使:唄アK怫ン 閏1"テ 磚ワ$テ,)^ヌウトA<ラV嬶Exュ/イMチKlウRロ"ワb[L睹~壗 ッ Aトu5ムソ、gロJテlラ(=ロ D?NマvムOメウンJモワNウエ5+゙Eッ鰮>9|ヌfOナカ!2ロ.「儀c掴槙.}t ~沸榿}D楚ロCDiア$&ヨvV盗薐{!楊@ラノョ肉タ哥レ錯J僊VTコ凰脂K@\tテ)ョ[鴬JSタャv\ァニg({・然,蠏 |8G30氤2|籾ミ/ツ卻璃チソヘフ"G:宜aケフmァ5T< v羊?ア[レ% *セ y 拌ッムネラTノ喃囹F$iハエンヘク;IムAR)z\6鋼>z゙#]zカヌb+&&fウ・ン縉r/~咥1"鰈~Fl按d[K金躯4Y鰥+フ噸m鉈砥C匆ヘ(蒟ナNコエト丈クニソケ/rリラ麦ソ←Hレメレ$ィ?^嬪ゥjヘ茹封W)、+トレ、 d d2ュk}ン`賂2俸沓*メケヤ?=Y2rナ0超[ロ,偵mケ梹'O^v~チ .イ[h'蚤9ュB6OッH`ェ0俟Jヲ )餠@@n1&7L#aカKトムd#=:ナトヲ`V嚶/&嶮J佛= チ。2Egァ淇坪T:k カ3 eu0 'ミZ燈Ol1墾ム,・l圏+扠*m24配栲;mY5ッxエ3鳧オWシ"G杵U~馬ァ逐ル萵^ホ-ョホハリeリ^q欽兵gトtgE,匂チス祗)セ.Q\冷>)^'%コ7W"yt#楽イ;カノr」夜vュッ諺驪ゥ1Hロ`美ノo#ウY N!Yユq添8Qn0eCワ'l牌フオLン,カA。\罔5ZP0w糶%i9┐ウzアトタ婆8ロ\C<ワホVネ*ピz癶ユVx16rn/<*G~(G.」AAq !゚6ケ6n罐テy曳挂3ノ營「答*b膀(」ン0T1 ナIP&貘,1瓜々拊ャ吟Bアョ災+銘Nア敬wナ*Tャチ|ェ(ヌ意p。 ムBUf+-レ2。羞「w|Bt稾bセ#|ツ)(ム-ニ寓b館b」X"6qヲクT+ョ?河稱qセxU\,セ淋>ホ許 xc:アABq~\8_ a+蠶デb(hGKカ峡ヒ ? G~達>KルFf:ヘ Eニ^オTsI*5FZク4ス9スーIェi*5ァJ5?屠誂ル*+妝ラec)立ヨフN!徊」シёメS涅監0Ml:qエ駕ミ-nトヘpケク wタnq'<-8ナスpHト8\ツノb7ホ。 Eス8@nヒO[絵囲服qb+タ畴,「 熬當ZEホrqVいセツ ~ク「l 1e慚v閾+5」B傍Wッ+ン咄O ラルォcJ\ソエ4ハKケナtMモヨ8ヲユa`賁ンー此ヒIZ7W3p撥ヒコ_ア-\レ\マdムj9n筝'氣京xニ吉。J&P -Pヤ\|IV Ku{*P1ィテニ!@ス)ッ纖nヤオハ朔ィ゙メgヤ ュ諚A{リスeアソc?ニ-゚Dc.лC#Ss %5ワ)セ。枇- タlルイeーG・67 リ;R嵳タ!スs0`リ-ダャM- ニロイ。ハヲツT[.フア蠱|[>ヤロ\ー゙VWロFツ攜Qーモ6リニタm,ォーリcリio荼夏V麦ュ蛮dム# PKw\ツoッ9PKma5#antlr/DiagnosticCodeGenerator.classスZ xTユ?郛$s<カ a_-$aPツ"D$烽謎%厠ト !Zキ関jユZWー.ユ*Vロ* Qエ-ZオオュVォuoユjkォュ9ス7&3。/゚.迸wホス=ス@ケqLホテュ8テha、r蜜<:d`リ課葭蠹メラ]&"輯 x埔.g燎x鳴oハウツ朶テワマ到o鈬xツH繞"yサX痍%B黠vゥシ]妄/羅ョPx・4_・pg>ツォ~_~_#?ョ壁u>シセf畄?7ネ紮?蚤7)シ9w- $o壁u o鳶モX蓼,S碪vyワ!?ネ;゙・pッ0ウッ{n(ワッ^况淞)シ_2漓咀青>「Q?W >ヲqK>眦' 8|J皹ッラァF&V碌メ3,&|\>エ瑪 _(タCE陳 |U疚「憑+|]セ。MT鳴キセ」])|??タ?ノV。H睇 "ッ(エ漣ォツソ)L矚Cヨ<ソタ*猟/}閉ッr"TD褐|ハa:Eケ這キネ綏yp/^-縮蠱-,((*Td*h「A+「ネッィHムミ*ヲa恰ウd4BムHE」i4朔ムリヌ゙F緝|キMネァ4Iム!2+絹(坎hォ襖+*)璋2スTQ劼rE3ヘR4[ムEハーケメ=Oム|E):\ムEJ゚"y,VエDム竃嬉閉ラ26-緑巻LZゥh覆ユ巻) G)ェQTォhM朖u関ハ:Gミ:ェWヤ ?ヨg ス 6*:Fム&E[@ヌムl:A[舍「「 [ル菰、ィY則ィEQォ「6E!E')レェ(,カ|JQサ「阿ィ「E'+窗>竓BォSム6E]憾+鷏t瓦S}」・;M+ :Sty柾セゥ鑞E;q撻鐔-E)カ「] *森ヲ 9ヤ"コXムw]「閔.便ヒ|tケ「+]ゥ隱レIサ]ュ顯ャ殻 鏤bヌク^ムンPH7メEキ7)コYムnン筌!聨船ー)jZウ嗅ャmVォ4GC替kコホー邯ヘy毅善b 1m懷ムf aPM(bュ駘oエb チニ0キュVaトエ啻$赦ャdムHUリjキ"怨2stイ+慍b疏"エヘZ6mユス」ワ^ンTiNv.o ニjB<=ホJZラ#ュ鵡wクロヒYアUア`{{0ヲ{ ェカ7Yツ( ;メVア鹵オムサ、XkΤx:堺4ムル:ォナサxキッ>ナfr>ニムンjE2X圃扼}Xイ3fYル詠ヌニ`xk:ヌI「C礦ヲ`ャ9鴨ュXエッリニl=マメ[ペマlラ1菅萌ロロ」v+VFテ疉Wス氷拷笨ョ。^eヲヨヘォュ`ウC(踟喇tkーl嗇癨Q怒ヨXロZ>)苹背オニhリv Q゚隊x蒋停jモ4コ;$Zァォ6 カZカニュV「イセa9$aルセpィ;シ疫a駱珂 オwgn濱Q杠sRp[pf敖q ワY殺+ムウホラ6Yク岻ォ) ロ「ウuモ2ァUgX$-ツbチHシ:メヘセd~{ーCシー5ケ8 爺燉娵ッレ+isロtムホX瞳2$ウ"l好 tZォャkカ\婢 M恟凅8.`ル惹>VO{0ヂmメュt姜?ヲ滲隗&ンNwh終wメ]>レkメ>1駭コ!ヘ0&ァ{Mコ淳fメ4!zリ、GxYz梅」_tzX坎。ヒhエ栄タ看「アxケI肖&悌`ハ,到爭ワ1、ユ莞ャョKB唔OメS>肘ソT褪コ5到イソ。゚奐;z慯ウ>%ム法0ヘ:ア5 ~Vエエ履=oメ "ル偶ツワ挫XШッ4エ笙ワ宇ハ-h蹲"ニ0ア2VヤMm&V6'#+ムヒ&ス"フ谺ゥk`F_雰_FOソe=u各-- ェ46Z眷&スN0 z♭゙Eシ痾莱 ハVjo咆カミ・r訳y+p ヒ做.スiメ{ツヒツユ'L垪ル7G部t~ クネー攫aケュ3ミ チ+ 4Z-険.hカ堋AY欠ー%コルaM象胝 >「殉>イgI孛Ygネヲ>tトMAニ帶タTF筰 モホF{S徊メァイ Meァ暄懾リ-懾zチvK橿xツイ6+nnDvWー8キ"[タ%ナロイコR>6駮bキ;3涌[ン瞑魁リホウq卮メE4;:s#フg.yYI?26゙gJ稜gホc=,ゥ決 ;瓠」Iユ邸Wエ3ワフrkヌ峩X.eу型QヨtB &両Cナォ4詳Zイン%靴Xヤg$3!ォカk躁.應チヨ`(Oh粽レツチtcテE「 誘レX・□ヘ麈スIw(メ講Dm6エ8レメzE?駐gx=ャマ.ィ4lエラNYェ#サマ、ソ&>梁。フ}-ァjヘトM嫩3リ侍ケr、鍖ニ」セナI_iPh昇ユオ_Sッノeア\^%3タ4PXoClン坎イコueオuェモハヒヒァマーQ貯xム 4孝ャイhャャ]0「4朋チ\xiロリヘネ.,レ%kXZソ%[ケNトzタルラrニl;92q&ml gJlx覗ロsgDァヒ旒3 モネa7r<冷&ホ#ヲ/$アオ #,N5 %]ケ+ヨレ)5v\7懾4!W>Us)6;6Sカム.dイ蟇オbmチ塰1。xシテmlTtm蝎フ=9JYッ, エレ↑於妲_3Lモ 竚ッ諦、Bケ%mク2オッ mA。フ&:c9\0'カ光瑠姓ホc嫂*ル披、配ン;儼ハコf+KャU/c-癸ワ暑&_ユク)+咆ミ尅zR7サソwC販サ8ヲa徘命嫩ハカN竜Eヲ1TWF6ヤnョZWラ?ヨ#フKヌレSxサp&瑳左>c亙 7Fーウe=ネ#Mc1ツ4FcX0゙レ社ネuツuqシアヲ1N;wu$膩2肓_ヲマy略ラ#YJv?)qツセ(キコホJ悵h&#E、Tォ)ぎv"aモ倭L4巧ニ!\%ユ& dKmマワC%NVsィ洩ヤイj・\瑚ェSテ8~4NDDsx酎ョiリ:乳pン゙N:メケ*ャcアョP+N蒡i5阪~z地"(!^ス'eィ志hモ,V,@+'ーYケflRMフルp(nオウx5チ、斡アNヒ擧憑゙OНqsュ0)リf・0BU8Uvユ\ミG[Cンシg案ヌ&将ヒ@cィ5ヒ枉lタVキ ーiッ冓エト跌ミテ塰O3譖ニaニ&VNルX M」ラ#:b。mャSモX(袂"ゥ「。]゚ FゃnFbモ.Jbis 葭」Hヲ2:0-Tn風 リhtkg;C艶洌キサVミン Xリレ叱3 mウb畫ォ裝Wl)ミラ賦Kァ呪スサmz危ヤ牒スねツオホF5G豌メtySチoテ5サョィ搗TAX僞{`Wミ[駟ケコl!晨ァュyニ9RーB1、トo!) 儂エ3ツZミ WTk鈿oz」\ J。9ミhカン9ハ 4wツ+ケ5c=~X+ハオスニFセm+儡゙gニャヲg+鰛チム羊Uj%+ェハ&(ゥr衛ミ:サヨヤ羔tム珮Nチ楴bオワA1砂ワシOC愎嘉ァlCM^榑V。XNOxルィH」坎j=d悃V[ワVーО"ヨd肘$双Y)ゥS「ノム1g\o\jォ餉モィ、2ホ#頬;鴣69eq]レ6z1disロ\kワ践wD#ヘレメjf;ニ蓁ッノ苜ニ=)ユTュヲ`ァ: |r、cサ0o挽Iユク)Bァ9レノ]vrg盤ヘ9H(ョテrナ-モ_6$%引シTタv* lモ7ネ援ーOアチュクユヤ'l>ンhュ臉I\k:浤ヨ46ヨ![/詒E澣:斌閊モh0ヨ嵌c#キH飄モリ:カ s+<、モ:ノ{タ蠖邂1ル,ラk4$Sニ8CVhН;翰aqスLニトクャ、5ンノ:su0゙ヲコァ鯱<ムイ7*sコgj夫,スッ菲ナY;8D9?ハm_U抽1mf伺ノWN」\ミyッfObテ餓n詛矇{劒ュ辺キPn椚D欸マn)N'アI>6ュンセェラゥユ剳>izイrアンロ不J3yア ウゥ_<ヲ心xG0"[ト,]ツ0盪セa粱クィ肬?fuDc$セ"肺ノ$ロウ]mHnレオ; 陥[Efョセ>ヒ66+k!ロE峪Eu|W倚rリユ穩<ャ鬟C蚕指ヨ}Jタ讀2ルケ瑣h#b狭ロB蓐;屠>ツ「貿3@F党{$゙寤コ@矢゚ゥォヌ-I ヒ・コムj 9j1巍ハホ+g!kわUCRニrGY%Cレ棒狂2ミウ~FY:スiイ/2ァ/O(:sューカ1イah@S'迢H「ホルt3n`s\苞yz~鷦-ム~ォーbZ,Iヤ齠遊aネ ゚rワ誼通ミ`S-ホQ};:Bヘナy鵆タ ト膾'w鼡匐xu0M愕(区協p4n艝94ロ'!ャD0ニbロ_'オメ氈ウワ刪/+ゥ+ヌ#搭(_碚rd\ロsエib#リ%ィkィ|ヲ鄒場ヒ穂]yニG+ノテッ>゚ォ IWヌ崋8*5vエ・ `Hニーカdテ褒P>$dvチ箙1ヒェw麌Aチiモトホー'積+WナfアL珠k<+マヲgrヘ粽g*Cウ}・"\HZ」ソン誨 マV+jj6J讖ホ>ニ"ムウンワホ:IチF,゚6p陛籀'%ノc%゙宸Mr?6ヒイ4q ラ ケ堪鼈ツイメタ>}'マ6覿トォMBY睦惓6クユJEMコ皿~Wdヌt}゚「籬 UQ梗ゥ[ョ絖yAス裹(_ツV、U8qFMヒFsOhN篶G。エ。 匂F ハンニ18螽oq:ャ'ヒニ。ノM$゛vP$O蠱RリMCモf=絅hサ搴6D宸狛畫結ィ IYマ ゙Kw隙0◆BoI釣_録Nノ^ユ53CツNXツ/〆[ハンz``iI 締メ0丐エ/}R゙C{xw6ケaLョカヤ!7ョヤ!7|Mル~ーォン調ケサン\トネリ」h Oウ`#ノ隊軾 ツ  ・0fツ佛ソfテ< `>,テテfXMpエテRリニ:V3・呆MX@オGナャo#g1;!o7九ロ絆クa|Rハk5ユAヤテ`hーA纏齔鹽EBイト?。&僕レ 途Nワ笛f.{/L゙ SX。S 詔%i2;ァロc孳ljケ)l1 守薇b8&AヲB#尿3フ A鞠。メaa、ヲ"透Uユテ9テコ楼カシトマヨヌ槻リ)ー1ヒDrマfアVウリュf鉚タ。メニz`ョ マゥ&ぴJネc%刈Fテ)ワ~*y垳0アワab崚ト壻ルKホ w9ar。痢Ecヨ9ャッsル蹉-|[ーHx0rル、<滑ナ 暁\K舒豫錫キ;aゥサNe曳Xヨヒアb ア{柴eャミャM,W鱸ハt邁cUャNカ#枇ォh=モ裲ネ4ョG%ヨネマZw降ヨe[pュ^hろ<3ウA?ラワィ淺韈ヲtコ囁ヘノ`_アkハ縋セ_W代 32ァカO熈リメ'レス゚ヒWPヨ驕F旅ヲ蛍BsXノ・'クKキャ)ロ ュカ黨K讀ンスッziロキハs/,} :3Rョ{8セ疹"恚a-|ヨテ・ 4Wp`臍ワ'チユミ゚gソZq\7ロ昨4ワ マチnxn7澀'クセロミc>ワ祉,=8ト pNスXP鞦p7フ~< ナクOムp+<梁#x5(3」レ1Z|ャ兀eG*ヘ`~姨鯔4、2H;ォ ン譎ルaイ=2-[vアГアEテ漁チーンウBw淆Hr+タ_9&>纔;ヌト_ツ&閻-徨,Dリ晧虍c\飴p)*リノ夥炸ッイソャ痂8セF?訌10古qMニムX残ク8ヌタ*怙kワフ='lチiニテRニ2<扁ァjロ潁Eロ1,岩ヤqJ慳jィMvJ9ナ孕N;牾鴕モト使$キ憺ヘLgル」ホ等J@'リo糅p椌Zムソテ踪フ抜ュvアYイ >79x章萠メスュスp゙cー%9瞻 ''ヨ3aク`要Lレ・ホdッャ゚Lセ09ル耗ムc橢{ニ7ナ]メ_6sB クエ搦テ,Fカ:.B7ーm:p l",ョPョgエサnニ。浚~ユkFソ邁桝j "ヨウ74滷ワ5x nナヘレハユj5:eナ%N=ロH庸)g.キ|ナx"臑b#cフd茉vノudソeフe6鰺=eフ^亮2Ks蠱ヒ テl2限*Z8ニ繦遂酖 シヨ礒ー椨皿xc蛹W0ニ_ノcNニ]佼ラ0ニ_ヒニ縊d!cM佼73ニf諺16ニ3ツ゚ホH~」ニサリ1ニ0ニg諺1>ニ`1ニG`ヌ 78NX^籬-岱キdDキ嬋{拳qv_ツT|張)スF]f]」ψEキ&「ロイユE?ホャ愚wシ30歹クy&碯フタ フタK0_讓{」Uネ聡ハfレンM'ヌOj9et 篋ソ!尺ンス疇@Rwレ*リ」ス.・ヌ¥_タL|戯y肉鴆タx|覧泓^涙.st-I.ッ薮レミr揖: >+Lタマ@ミワ。G;ル桙ケ柘@Oホ猴ル 9oィNWエ ナO髢<膺IWFワキ~&河ソt ,*?炉ツX2ヨYrニX朷32uZ遡ヌノリ拜鑓bァレ-&'ケナa釦lo^N? ヘ醵t&e状唯就。g咥漣c( iセ~オテタン_コ栓$<ーyg<ン捺>マテ<~ `/ g゚蓮S荏6o9覺,・8ャ-ネアTヘシ 9Cj,゙慈Gナp1項ヒi$、Qp劫[h<w▲c' cセ妥ノ;。トGァャ}kロラC。」ユナ9Kニ嫉Q渣ス1vNENノ亞篌マ危セ傭エ 厶v(「研(犬添1廊抉F]0慮テ"麈GbG<蛭篤エ;ニZ固^D俯A┨┤.Oサaトンz蓁5;詔シNp|d,ァヘ 霤R UE渉ムsナ_p`o} vミ/"zョ「ァ. ワCマタ,シLマチ<シG/タァ |Aッ「AッcスC鑪,・q6淋愴>ツ嘗マp}ホゥ エ陝・%ッ|ホ'; ト' ツク5#゚5rテ {鋼ハ3メc 5モ8cM2Tfモaニ0:ヨホP} ハァハ%"ム qt'匙ワ罌タレ4&0ュp郷チテuコ~ミwo7c?歯;Nw ^テイ角シ/`嫣尨 LrBュoノリR飫ヌjo=Dタ功朷ロGユc読ヤト櫞sネノ・6@1f フ0ヲツzc:l6Jc4ウaォ1bニ。ミmフ3酵pセq8\b,ォ クノXキ金~ハィ迯e啣o+痾cUレ>錺 :V帙\k巷YSチiカホHソ!2jFg。<[ネ鞴餒LaIv l&lv0;レヤ4」ク脛律QワqFヨq2「ロ愿サm6クテァj~Nル^d,ュネ敕#カ U紗Ru愼PS'」ホhw エ#ト)笠襠qアG弥f盤0患オG餒ロ萄WモヘiセN3テuNルサホeルXJメヌミCモ6ァ1テz奄a ヲ!F]ータ閹JXe G゚哉^gFmニ7!j 敞8ユ8N7セ鄰.リg\アk數ョ=xヨク^6.cラコ>0ョО賢x蜷XOPKWSェヤ2$6MPKma5 antlr/DocBookCodeGenerator.classオ: xTユユ釛囘゙L^Hカ0YPBI@$ィ2yI&3afツメZオカVュオUアuテkm赫キnナ・オ(ユコWュKメヨコメs{ウfP g;ワsマ~マス淳uヌ=P)\9痃フg0っHコ]親「G9pエ フsチXタ`撃c3NチBヌ+8Aチ"F3乖$^ァ9q2椋p Nヘチi8效 娘`<ホd濛ェ2膨uu,擯.<メ鋭pョ(8哦ャrャVーkケ醒"俯b訐ッ`skdームK,cツ&ヒ戰オGヘ カ(リハォレ\ノィcxr ロqェ;弃症*ク墸蓚綵クラ*クNチx」Nt窘<謁 v1ニァ`キf{p.Ε旙琴 j旙ヌク>ヨHAソワィ`タta4レ菎ー #ヲゥ「N :q3naアカ*クヘEロワ。猖シ゚) 棯瀛,w戉]<=ソg逋<}魔゚Wl@チ2ァAマQG 椨炸 椡熄シ@チ(x。?Ug ^、倏 ^「潘/U Bチヒ彭ヤヒシッT*w)xオべ(xュべ)ク[チ・7ー&ソr瘢kテ7ルN;t wウ{ワォ瀰ワァ- ケVシヘEセ效 T.タンlワ{;U>WF<ィ瑜囘!VU 1テ?アgрO*狽O+ O=ヒ=狢|rPチ?;I\"Eチ|YチWrU|削コP(Wセ鯊キワ僧チィ釧}+テFwウアル `Bnwネm6カョ ツュEルG「」゚F宸u(Bホ@リリL#ニ"経ヨτ@リK#ヤ」ヘEセ!サレGkl・3W#リCンB~ウ?hエwス+@ライ事貳ONO/m゙oヨス=リm醇Iィ3」hユ1Γィア$nヲz ツクメ8、`(塘7蘖:\メナ$o」尓ィニ`wメ籀d}nモr=髄5マッ"堺ルアアY2ュ^ヨー徠5n,(匳0Fヨ4レツ-。p稚qオV瓸*醸2レ*」'y蹣ア9モd$/害t6チa「衆扼>&>6故Li「SlL8ホエモ頽鱆;ョ5ツ。t3d棟e舎潭I賎。PPョ"ヒbI(mi7「*l%kjtイ帑モ冠Fミ}aナa#'C'qXf鞏F。ニヘ。ミFスヲRΤテヨn*ヘエ[」メd蔚イシロサB3活「}[正9)リ,LB/fヤ。)ルsyトワ;カ p「舟rヤ{ モUツ!汚ルD2カ>ソ0wァ・mrオi郎Srイ#ャ#Mチ霸讀vムコ囚ー。G 却dLBケメ人"^@タサ埓B睥ャ貮VYムW grYヲオホ~}テ揖KM%/*)4駐9p嫦謇{゚tb{h03鑓ルモ !゚b壽r)CQ'+匍 マテA桷ァTx朮Bヨtス`♀ヌP哺゚D瞥)ウ慈O@TF%ム*~ムQゥ簓OKナ'所溪g\ナ/KソツCo)^PT =Мリ"Kル%菓クユ%魘AC究4##分pa乃ツ6オナォヲ*慊*>qUィ Iuェ*r「<賊ツ愚Bi1B#E>お$柤」鷲エ(3ヘ23oッ1bャ*ニアY舞゙* 决シ丐"Qャ渇lー度Bcf添ハユzXッm0z#オP犲ッ措ヒオ-}~_滓宿コH)TQツサ)遍^ケ^マ咬1ユ!ヲ!粂マ{ULgM\チX。Pナ Qェツ・CフT辻et4eh@"$ムィマXシノ「L螫Vィ、唸7j 形wM /アUE 史/緻ツウb68Aヒエs燃*アスィ$8Rsト\乂乾#UヘQ]栴@v飲*2ソロイ>+dア゙dヲトh捉s闇ェィ ムィ%bゥ*鵬D sH欒禳J&hァZv}Aス゚ [$ニアルPエ緒A9S諜唾リ\lDm、慯{zソホア@ク<ヘDvw驗!トhaモ!Eォヘ_吊D{n 堕.G「bj萇トム(;ミ\ケネ R「:カイ゙靹コカiユ2ネテ★沈htハンイeKe]kGェハPクラ[R+ヌユ^IH岱トI鐡F=甕?erF苹%カチ洸メj`ーォ广&Uc#:ル"]蘇tT4-ヲNfb行・ョ イ徊o諜旌セ>}ハ偵c゙Vナ ム-ェh蜥嶮RラQソャア]ォknVEナーX)治J襄x4ルョj 」ゥT!,モ<墮ムgXe-u朏゚/g景=聖Ok)+コケ$pウメトャL6シ8RゥiMQ-hヘ6広。恣%Eウxヲuqiミカ」}ウ裡SuM゙テェヒ犲ョSUウi/+ョ,ャREサ陟C稔ア\8テソィトムセ、劾.寡チ略Xテヒ&マ>セ鏐ョQ直T1[ヘマ%Sャィ磁#Vナ:q<阜*'uォ4證レノZ窟 戰%ZEフkoツスモT{OPナ噂9ラウ'q{ネRッ侫8サタャセ:ネY(鍔メPT28リマ ーカY D俣セP腕p 卜Y扞ヲコS}#カ-.エ.Lab゙:"蜥K例モIs7:「yトw嶷ァ;Γ4Vヨナ萼"[ワンtろ「K>Aa4"スソaラヨh|Q35倣@辯ワp覬E剰u>UナZセアヲ`文ウホ根~UE0蛞viァミ…Q叭オ癩ヘ%whT 聴子4^#リヒ`Wgコ徃イ:イ凌エif=}t#ォロイル-爿jラフ*ムPフぅK3イ]ハマY塚゙H!" カノ0KlzMD キ厚゙ez、/ハO b _*f /フ3当6ハ枢ァ-.ネ8AーO条擺1ホフサ ! 勸{[ラJCb:"bDモシソ乂瀑GッZ Iモ ム薔罪オ顫F髴「%$]#wスム」tU7? 2弍ェac 屡aO2e{クテ=j振!ほ袁#ュ;ネDtf滑1辧?H.」ラ4W:(^ヘQ滅ヨEオ"ァンヨャKkl:?mV^肇ワAキR鄭ヌョi剣甕/ Qエ F」豬j趁S+Yヲホー゚Lソ\゚ 兵`ヤ\菖atv翰yB靭|_Mン峯q$ンク」FXハK娑l得畛KUk ィ ゥduNォ如ア笆テ{T瓷 _ !サ湖D゙賀a紬||pイ2{gヌ W吋ネソキ{{机iマNiユ.ヲ珮ルネa6版、 ゚藉4[・3專6、9I漑$jFョ/フ覈AI8&EmRBウs歸 疋[g>ト、J~蠅クヲcォケyc]Plロホ簓杼 {魏謌:?1ニフテVメ:&鬣&鯔3ョ゚pkKレ圀ロ:ケヤ4e^>sリゥルッo蟄ュ;リJl33ウ|ヤクムッ{]オi(ツpoュヱニ∴?タアル*ytjレチメ4ワヨ婢ュャxiム7コ4ハ'泙祕rf>エァWォoネ菘gL笙フ, アX。ヒeルコマ'kZカルラR>#qツ, ~ァ「'カ`Ti&睹|fptt祺設メNY葹JFIE7rxsA[饐=シ ar|ソテ2ァutspppg8b'、簽メル゙瓱エテVヘナ~~e坎i石ムP]8ャo縋j貅ヲDッ目帛Xス+」ノ?ヤh=GgzLノ8<v$<%GOテ3Yb武[觀 キs7エ「D"LD !fHDセ('DオD0Q+#M┃ャタ>ホ3>ウイ!us\ナ0も,汀 Gツ%0ハa}5p44タ|h*8タzィツタBリBkN:ZR\ヌー・cmK:ョ劣歳 ゚チTJ;@チ$_3ノラBクV弛%ノキ$[E/ヌ掫!9マニNモ}0fニ6累vB9 -$ヤ!("イ!从ミ_x苙4レCOワf.テ!(ル#」。QD傳UBチ5ラ単ネム]0 / }メ< Hヲ チrリ$|ナー V/?ルrΟ廷U詰*iチd刊フ=eヲ&ィ]*84ケ箏ob|カ${ン"kウ藍ナ;チ駲Oヒ],ス f胆ワd。\rM&コ)ニホ」スマ\ク<ノ 及スナ<兩3#S醴ク"qeコR梁tr3+}C匪 アリxル%c,磽゚ウfzd碁OU&せキタhp7チ狸ス!,ン}錵Z1o牧舞籐ニYEAp袗蘚+(ッWリヌチオp<ワHチw瀁リソ_Qaサ炎ル #ゥスJ゚ヲXy#ラ峇。 髣皴 Qラ!゙!マソ+Wシm9ネーツA゚゙7ヒソュoe垰Eフ;+Z+插^T性、o檐ャルGルサY$S!hNWツLォ。?ネU牒>"W貼 tツgp|I+Dミ{0 タlクx]$貘鋸ob>シ# 歔|…メワキxア5襟V斤ォeZI阮クYレフエkケ Vイ脂ァエヌ、榱qッ漬湯踉@テ"ネヌI05藷a!NF!ヌレ)ナ$籔棹%ユ!3佼>ーX ケ80&帝サ_SC皿ェ!ォソカ獏0蹶Y ,)吃qg株ヌVト$"q1`=Lタノ}九Kトqq`ヘラネエニ2ノ碌Vレ彷ヲ oモ贔%マレo<&; 'F?ノ1鮮┳4l,lカA+ョW<:88ua7Sル±p#Fa/nサp<ロ ワxフ~>ツQ掾マA'ワエ袙)Vィ8ャヨWルS((ニ.ム)H?kーコ詁チレ&tツmHzAMァ,ンHム(崛3Sミ(ュエ糀 匝ユ冊a*>@ム{屑a>ヒQX縁テj|ヨ癈`獗ー氈(> ロ怏マチ9エ<ソタ?テ.|∈e9gケ茅」劭ンマjo Zャワー鉄6%、ハcz|採:Lツ7hソゥワ巖゙カハ% ュJdフ/`;] ェヌヲ醤燒サY驤ャMCM,ェオナu2イ<オ f3-テハンt9「、ョ|セEカルハ`2~Fカ f s  ;,Yミ% уv\クX苴ヨ疫炬勃}フ聳"業V キIク拌L、ゥu鈕、Bs割ァJx噪゚択サ.磔$+i抓q^セU ゥ5「9「`Tチce務fXbN|トエM7ヤuKタ7,ユ1モ6糴tハネホ'S泝L薊]3sヲ+ミ%mラハFヒ2]モアs痩ト|T'忿vム5%アn:湮0rK挈゚jr-ゥ。 5エUチク L~]ラLi佝撃Y qIテJaq@O悛 ヲム-ヲw.2チユ4uヌ"tsーFsH ム9b3ト汕(T3$ァ!ェイ+"1;/mノDロ$マ7_ー+5D參Tノ.萓PKモsツ_PKma5antlr/FileCopyException.class;oラ>=^.f.vnvF6嵶シフ;FQ 洳トイD愼シt熬「フシtkヘ0F錮乃F~淮シTソメワ、ヤ「斉、Wp~iQrェ[&#「懽 *]+担 J2@ヲ0ー0ー22'謨馗ィadロ劔ッeP``:h薪楳祗@哺K{;紮ー<;蝉喰ユE8l&NPKハ_シPKma5antlr/FileLineFormatter.class}堆Nツ@マ-・R-*7ナ<ト号тc穩タ#ヤ綴チ銓gA「澆oヒ_説賽ワ3゚悳&4(8イ拭゙Dムト1!=州)(?ャキ<_vシ@カG びO]n滋_$チ臆ノcOキ「邉b・jュ,KnSi .サ[ルhヲウエェミ ラ呼ウィ"ヨgb」ンョn割Tウマ_`uヌ堂/」!ニャZtロF )ツ,ヤ憶6xBaKjBv穂ヲ7筑・W ケ.ノ{ニ5テン ッゥ躊ミk レ\更エワチ-ヌソA{C:L:ネホノハメレハヤと:r:aワ・ク_d9均⇔'S鑢揚slョ9^5PK8イMnPKma5antlr/Grammar.class標 x\Uoウフヒ臚4I嶢BZ*ヘメ、Pカイ、iレ銚6。・猴フK2ヘフシ $DDP\タ綱AAィK頁&咳D( kルDdォ イ y&鏑_7゚徭y9枹マ=鄂{?セVェ9橘角6 ケ神・神 ルタッ5& ラハ:!ラ ケAネol2*dLネV!7 ケIネヘBカpKモモNネュワト>リ.呑e VAAネワト瑁 趙吁トスc 窰!ィメp;H嵩Gx,パ号ヌ?。甍ツ沐O黄ァ9g マjォ4<ッ 5シィ% サ4D4Mテヒ^ムw ッjラ4ショ Y歟vサ8p1゙ト[B゙燵"zョ姥o<ゥ>? EFツ菓=Sミ;ンIウフ寂」~3aレcル 圃」V+b.ヒ)uフホホチ埋YoX)\fクagH柴{ュXRュ3ホ0售N4ヨワn$」7f。ヲ「アHKw盾:Y。ト迫仗ゥ~禀ツ瞽a'zゥLヲLサ5f$i1噺3ヘンMモ懣Xデ「儀esf・D崚C弥ャカツFッャ0 XCNヤJp;3ノ隙ュXフ サ岔k^ヘY:(Cカiト餘屑蕨L\W"=/7穰B業フpェ+2羶ヲ找ュ0吊# G|ッエヘH4l8トVP~bメエロlロイロ好$F翦 cラチ8 隼*ス魎ー沽)g((L鋸}F*訶 Ha+7|^ク(嚠:ヌ(,ィロ=イeァォケノョロM侏S祺1r]ハ鏈挌Eカリ&ォゥyiGH^Nテa3} |ネC7旒l+疾勘%~.メ(3ォ女2 ラF9gL-貪!/>nラwクキツノ`farホコ y*io「ン/0個ク控g|v/P["来qO箪フ舷:A兇ノ%1D顯仁ヘdイナOノムモ|Uン)ァZ枦'uセ・Aミナノ1ン Q!「rJツャコ=ラQ*JfOaボTA<.」Bマ、b0スホック鏐i3頚キ粉テヲxチトvr,ァ緲悸」a!ヲ>X'dT泅 MユSヌ鄰]ナ`0|{-Qイ¥uト岱 ンR 鑠缶モUセ*犹M彊 UE/磑m^jルqテソ匱メpA@*op>9ャォb\ェ+]戊,>ァ綉愴J5P・コ*鰊Eオ*,ァ莨讌+qシチyオTメゥ5kYhルj#Q{Pmン僥イVW號* ラク樫"q.、コ圜U。ォJU・ーソ9襖ォ@レロY8旌]ニル}fヤ  リfrタ寄,フ俘3ネマvヨ筧#ii&攷X[ヘwウエマ%ノャ6b)ウカマイk}JクQェ%ニモ圈Y}R逹キ'クmメマ9{゚サrコ゚オ妲トエサyハム睨2カ;WV*$9トnヨ"iナUヌンNトm ケメkキZ呶\L=ゥcム坩ォクPWユェu,キ鑠ケッチxツvシト洸fヤセ SAMdBFモB6MWョ羞ハ\ハィユワアb^H-リCャVb 刮H[フLンIe2mレH兇ウyn>ワ螢キサhHソeツ{Fメンクテ3H<ェ,コ#塔レ紳'ヘqD3モ.D甘゚`「我 貶<ツェOェR<ナ桂トウェ マゥx^ヤ2シ、Vb: ッィ8^U^Sテx]搴7ヤxS]すヤシォョナ{黝シッnニ|ィトGj?エb%リノマャァU栩゚Yサiイメヤ妛Hスヘ#ナォq・゚ハケBェ ェ恍ォニKケ犬帆.ノゥヒkキbyァt1ャ陏Vハo赳3潅Zオシi コC=< 涵リネォuV ャi ュカ體XMヘヨェ9P(V葺ニ猯Tモq゚ァVユ8Fユ]I全pPKQ43V ヨPKma5antlr/GrammarAnalyzer.class;oラ>=^vnvF??ララ _O?マOgFFOF^錮シ竰トシ腸ト慥T踰32引{;zク:コトサクзトオ0ヒq逞%ァコe讀コ%跏&9%覽V・馥%%22ヘヒ)メGコ$ュ沒琉沐伏\ツtミ。L フ ,@+佗莎#9$#'佻PK、ュュィオンPKma5antlr/GrammarAtom.classmTmSU~. ルヘr。!h碍B欧ュJーmニFP栄uキLレヘ..F/鶚Igtニ淳G9>w7ヘ壷ルs}ホケ<迸ウ ` ハツ n、*njセX+zUムbユトGn盡ナヘ控モXテコ⊇Fqマトヨ媚ワキhゥ鴣kタB沽リ2アm禊\`リア嵬ネヨロ'vルアンテN犢ワテ橿i^サ。~ メ¶Dケ此諸ィ $\妾リゥ4カシルRォ-キワネ褄カ8~yモキロmロッカ ァR+ $ラyL潦ス蟐ュNサゥンt*ィGシ. ォ0u"_ 1D祺掫隈Hヌ4」蟷s>諳ヘ シ(カオ縉}オムメEdzWyK嚊トxCbW$rク*アY;%0+fYゥhH|‐2ョK|憶 <蛮テラ二「ヨ:)_F`ェ譫リN ァユVn浅ツ:V$セナwヒ:ラZ嫡穀※ト>リペセソ 功5Oラ 0aU'Pセkュu/ハチ!x餬Xz!蹌リア昴XSカ寶ユ~P)>Fゥ<実hタ邀ノャ!朶 =IルGGハeI泣F] O術2t\サュ「ムメウノЫュ{Nァヘ筮||費{dT千"R聒'sjgチ#ゥヘUR7?ヤ恪Psヨユ酘 ク1 猾 リッ&テQ0ハシ (クYチ- nu智カ)x獅ロシ_チゥf瓩x佻ーo(ィs璢ネカ`TF}゙病」サナリlー!サ; カ[W ムヨ!語#ム&ソ0び訌?叶ユ66S娼Z`ヤョ$鷁゚fテb_榛 「オカ竰5Pキ$モ m.#ワ。w俄Zヨムレメハ發笆 fスメッ{+ロ」aRjaノp 5繼cミ曖チリ箴゙ト粃ハユC゙荒tBlT丁み唄 b }zクナGモuZム<セ間2GヌナFノ哥xiXーq5mャ(Y8/ニカ"hャキツ レナキオRG弾トァア」V=+O詩儼J」y~献#エムS}Bpイアチーa、J攫cイニq。>キWwァ]q・!倒,」nl_Jトム BA9,G%!ソ?エ・ン. ルJヨミ魯ヒ -辰3A#Z悟ヌ骨4アI$a劭waЫキВ>"*ネNキVSi、ヘリ&ァ>YA物oカ@WネoFqエo r'妣レ ミ}悲ルワcロ9キヌ%yケUス*wッュkhzヤ0w0'ニ%サ2マ"弊@ソソr }ツ鴣sAl uムP メヘu~磁fレ`M@Hハ鴫4{B馥MシF$b2遁ロ ッO%kャHウ@ーHソ,ゥS'JN貘m舎顎゚tb{h 5鑓リモce鹽モ ォツ汝 テ#*< 娼p 吮テ(G)NU<4<6U。◇}欒iBF鐇ィ3sハア「M! ラESgホY@{?*z」帝*欟9ュ穉キ|^ナ゚ |Qナ゚T| H*舍/ォx尾ッ爿|Mナラ、bモV_4モコ -メ継8撒ィセ0Znタェl^K゚ツキ@7潼Yナw1:、ホW]|Sナ}レj糾ナDノ 矚咎石/ヒトシシタ週'>K6ゥ闍*~フ悄狒T7~ェ稟lキx灌タ,k9ヲム鞦纃晩B=圈。^#t.モカシ}/「饂dヒラ#覬:X。ツ;$OaC劒罨ーウョ@0 ェネ*シ 9Г ;咋 Uヤ界ェX,ミS`i1喉エaロ P.y)|8c?ホミチ0コ8捶&3M3,ナjァdm%」$筒8キヨ┥zラ溥KS粱ア慓 ヘチ斃チ晄$攪LOノZ{O─q 寿>~98 エ輊ィ.ヨキqユTイセyィ$K.チイャ繁テt次qj4ノ枋メ=鄰xGdQxL>n'鑿 ソ4Q4}6L スm?nlワvマ棲dヌbAオ S アテXpB劇 モaツッ2籾蹉@^x?($トルbォ き6ソ`=ヰ5゚枳Yt+d亮ユAネワ mD(売&。3$!ヌ$。ZF噪吋ィ符Q&チU嗅w⊇従S:」鷺^#ゥ-'=+a フч0 a6斑 50a>エチIーNウ`@C5l。9蹉-IEワxW拓llDマ>ウr Jチ8抉F暃H>ノk-」弘(чトF/リy賞sOノサ=キイ[サaハ骰X 嗇tdQロDRz)コ ヲツゥ0レI=攤ラAャ蚪T蘭ZO)阮lマZlモhフヲxJン0u維%タ蛹巵ィd頁カツ\Zコヲァヌ=}XpFyャョ4DUl+ノ+Vヌ?kウ6&dケネ伐x鷸ヒャ陋RfE_}[>hp7Bモヌ^ツ吊ヌ^「a)gケTマ6d弑dQ^Lソр2 エヒ4クm'tチ財嵎/キツ]5?&xケ匠蕾痢ーシHル m ツ)ツ>傚ュ! B甓キ,Mヤ<懴;ヒA蝣マセiイセ鈴1,o+黔{ヘSNョ沈yチウvフキwウJヲ:_2モネ・%rユ淙U/騰^!Wス晰眤;ミ ツソテrト}!ュ咄crハ'4Sx> "|>C[棺^ーワuセvォx[膿-σJJキニヘメ僕fカpe<ス=&ゥネ>Uカ%I)/tBェPルミ# ァゥヒ許.K) 8D&洞UCクB_メWテ丞ウ''B6染'チ$,xメ操'ォO'8寛H %ームメ2&ッIメイク*桝雁、b%"P8KJ|ノ8ラRq|\ナA8:ヘ・ィ'%ムJ。 フツm }ケDZeノ'緲^'アメ&ユ0室 ォ。 A;ヨC'6チリ^<ク カ緞クuリ疑x{ gア槁 pァレキ「キ<'%ケd9vェ*旬]RT~d9砒4犹~8x簀G厘BG フ實ホ[E?呉8}ホ澑臻渉フヨイ皰!庶ホウル・コァ熊ハAWcoュ癸U/」5.7^A.ク*陥タ シハ:WテワI3^{H. [ 'wコwチコR* >ァホャイ釶 ャ#=U蚋Pp徳|専 。@'ZoHs3hXヨ~キ ゥ[EU*珎之タh膕ルR鶏。s T!XA3>I~ Nテァ甅糀ニgHツkゥヲ極lBヘ タ>カ呻lーAォd:ミハ#MF Lニ゚メ7r・4B"Pレo+zィ。估?C`'吾゙aX弦;f ェ`ル)レヘtウC`蛋3CkュiアワD)乏&sXAEl隠ケトLJ為%0桓/@6咸e2Qhタラ ォ}Xウリツ&\cク ?;?a 2ヤ[ア2將ムシ」シ鑷G$De;タmLゥヘ&v蔡-ャレヨリ&ロsdサ]カ醋シト'CムCc_IZ!&ォイス0拓kc躾リチイセ楝 ル~Sカ゚弟Eヤ勹咋R d ィツnsE&T , ,Nh.X%イ *TクBdテubワ$r牴 Gト(xFク畍b4シ ニ゚テAu[ア┳蓚駈笳良mヨォ輪莉/サイャxョアラ葹yフ\0サハノ/ネ」抗!ナ-E菘hQybLLS`コ蠅61ェE14Rケミャ?ヌQaッ・X少ニV[ヌケ・ U<;iナ:壕魘ニ蹶.pパゥホヲョSャ,リ6Hムh ,異d9#貭X141看IP*石Ud9dZX*AァX gQ゚$癬5V";セ#ロ+d{・lッ弟ユヨ。h拈kツオ-CZwクョ葡サe伐;ヒワラ稀7ハNユコ傳d9ネ)"-0^エツ$ムモト*'Vテ"アD',ァA婢 kDャス$畩ェフャoi}ー]皋チセヌワケ瀁yBミッydSコスケo硴$J7$B<n袍inKュ%ノェ馗Wォ6Cタセ1 ミ「;礒C}jメネAイF遺ッ, !E。Cl∴リ ロナ6ェ8セ!セ゚チオ稈クQ|nWツnqワ#ョ竿ゥj拶セyホ跿ツ5t窰蒐\邸嚠+ォク\&%n俄kH゙}~R/9ノ>[Z>セ7}L+M蛔彪蕨cC|i゙。_o+{7ム,ニ゙W贔ゥrUキ@a<朝ゥイレ>?ワe]pu2フ・z oxヨio屐i幢ネuラ.ネトワL誚'Vトc#9カ5ラQq ヒ終V= 9h巷ケ[X痢メソヤ・モ(^ォ養ワu(ツ唇&l#ハ聡)ル>☆禊/>Z3ヌ作ンaウAァヘ]カ *:3チos@リヲタVカロイ滷 ラルr`ッm<`sテ#ト、m,。弓z'syモイ醂、カ鏐榑 -テュaDナ~\Q,ゥxマ0ィNhンホB{UナFU,縉 ラTシB&袢伜ムb:ヘ-。スチー}L3 モミヒヲj ムチ@Pナ[x宦ムョ゙Kクp<%゙儡G}ゥケBヲ 澄[ヘムll壜凸絳ツoS凪エf\U|эU|"ツ観カユG央Ш4R絳I楾uモ`リ笋M、劭q"Y20nYヲネ>ユ」ナkSび@ ナmスmq-ラ沓メTkp審uAモ_@テホ%|ョ |ノ烱ワnナt媼Zヨ<ヲaTq _ゥ゚ィ゚ゥ81ヘU ハ*ヒ1ャト从}鑢1ホ漉iC !%ス`:aO#ZMテセD3低TムホcコEホエobmW[%ヨ2 コU"-溽F蛎wォl侫Rル6] シf鍼5nッ^eOhA烱Waォウ蹈Y併pC4ヘ哨ラ-DWィアニ"襯m{zメホrh!r<ラ5セス`k望俎居-'ユノヲカWSzタi yk0庚8K3=OS}a$ィ dFォIl#ホwメネン跨'J;キノエ=恨I「ヌ姐a?PKSeHa> PKma5antlr/InputBuffer.classmSQsUnv溺モ・-エMT)*地・)「R・KZエ4ネ Pニムヲ餉ル嗟ツf閭8 _x・/<タLIgdニ犁チ昊m*。等s桀ホwセン?モXノ`ホ既PフG>ホ 鋭r亰IiノI禊 泓纜傚 .飴(汢YーpノBQ!]-゚iソ・zxコVュカtタ」ラョ゚l^ォ]ラ ノmンヲ?\*{Aヘ/:ッ椙s=7ク`'o)佛ニ3Kョァソlララオウシ^c$Uiヤ.アュハャYuk5コイTfUt3pyka醍M、9惹'K[蕋ュmnセmイsツ96箴エロ莞!渊 稼Bヤ32dxチ唄セ~閊フK:愡ソスW骼愡ムh}ルCヒ^ウ\jWォレ6!gc」6イbニ0ハ。{ヤ速6゙タQl/ヘニ樗_c縱|ac'mLHYA:L胸オ沼ヌ┥Aッュo ァ8コマ±tゲU3y追哽髫ョk/X`Qェワljム"/セョsヤ綱ァェ。Xセョ7Pソ'クbテワW?J~ec?{魁oC詠エ<Wホ.把シシヌホァq廝)シユE>#DwVw07ス但,#ヌ.2ヘ5イ-團7<縊)Px'c;1鑒^ スコ鏡諜c~慓レA7dァ舖シロソ$ヤヒソァ:8エロ+ヒ?*P臉x村-ySワナiコ|wqコD-9 Wr%JーBワォシ=|/}D靴K ヒワi<顳、ツヒエ菟(鵠!!iヤノu0 $Пnヘワ '=J゙"ロッ(8ナw鉱沚bリ饒0zチ5Y「テ テoスニ韜Q`6f2臈ァリ#ャ6iq兔.メoヲサpBcャw59YCマzlミ69ン} ツgャE滷xヲV寶!o肺!疇ネtニ62疉ネユワ~W?уG"H~b勝1テサホPK4:PKma5"antlr/JavaBlockFinishingInfo.classm積Nツ@ニソR ネソ7ャ <ニ$「欠゙JYXキd)\タ2ホ6Dcエル~モホ~嵋||セス陟。#RI舶T乃返R0Sィ3靈`ョ\-Cツp<9]゚ウ(麦2臟\p驗|2zBwホタlwカ'S=チユ逡煤シ暄3$マ<痳 =プL(zめャヌ\゙9c2ユ゚セカmォ}ャ・ヒ橙ユッノuC淆ォ9btTア‐ m@Gニ@V堰 Cテ。/サ3z゚ワ ム「・hスйEcウQ戟ゥセ、yQ」Xウ^チ,カAフイ鬻q:霧‡B龍*メ2阻ィP*勘霜ョーナ ッ\f;Qク"57HX・゚<擢トk(トlナhヤ/PKd.ヘcPKma5antlr/JavaCharFormatter.class控MOA~矢ー,イ,PトE(・ミェ`*ッ"_4ムu▼粕ヤ-゙KミPK:ケ~PKma5antlr/JavaCodeGenerator.class菴w|Tユヨ0シルu& Bs鑛ア&E圍、b!刋H囮┐bナ゙サ`ッア]+{チ^ッラz-ラ゙サ^ W゙オ9sf&劬y゙滂胱ビ銕コレォ}ァク(ムソセ淕゚、-JIノロ堊袒スKノ{美Kヰト腓J>ハOhャO鳬3*悚セイ/鵄+z圉セ。ァo鳬;z梟!ォ諸Dッ?ソ/JoT{o!ネ珈PツエpーN A歌JhZz ミS{ -B0VdRIJzjム GYA-rh俥+hム7・ #a.(駮トハナ e; ヲdJツ4リ?4S3b8-K*。ナネ Lキリ」ィa~ヲ# ゥ、ネbj)6「ト1FhトX#v2bg#v1bW#ニアu゙搨=ィgi仁e4゚楸穩ェO{QEスmト# :テDゥrス膠2兒L 勧蟹R>ル #ヲqコ3(殃D・ウウオリ竣 ャ腰C陋興,贄頤ゥx#゚Pr 8ネロ8トC更dトb#"F,1「革ィレ葵5Fヤアヤ:#3bF4ムhDヘFNKm ツヘ"FOュFエQセ慍ス+鐺・ォ>トZiトQAアZmト1ヤ臾#磁ル3窗#NヨkN$クO2稘#N1禝#N」ェモi#ホ0祗#ホ2稷#ホ1秉ェ嬶トyF徙トF\hトEF\lト%Fャ%ュ#ケヤ依韃r#ョ9ョヤ筱 シ$ョヲ楊エクヨ壱を敢゙(oァN7q7モ キミ齋-fトmAx履゙nト゚;査祈サ嫂ヨSイヨミA3n イ串OTvmハヲク蘭ア决$減?ー(ミc4綉サ'(y2(Ocカmロfト34ウFOア祈セd萢FンネW向賊囁ッFPセ)゚Bム Iノロ半c莉(!艝(0蠢(y゚ネ向ミネ曙リネO向|ll蓍F~a范F~e葫F~c$ハhFセ7#、ナd蔆A+m/Fj與Fn膩#cFn3 恵F9Fq」氏ヲ、Qハ(2ク^0*hTQ!」2碕aTO」z鋲TカQリ。キQケリW1ェッQ弧3ェソQhヤ 」オョWjAZF稚ケQCiJ「"5フィ瓸wj*59E2*櫟 *4ェ滑My 撫A、ィkヤNFlヤ.Fjヤ8`7」v7j」J*3jO」ハoヤ^ト'c"ツFmヤ」。'"゚ィIFM6j害S載ラィ 」ヲムェ群モ ツFヘ4ェメィYFヘ6j?*嫩ヤ\」5゚ィ校@pR\hヤB」2鸛」1鷄」オリィQK謙2ェ:ワQQ」j謙5jゥQuFfヤ2」麒」綱2ェルィテ綱1*fTォQmF!觀Q+校Eヒ=ツィ#)?*ィVォ」汰ヤ1F/サgヤ(レ:チィ5F揶 Y拇ヤノF拊ヤゥF拉ヤxE戡モ枴ヤYF拯ヤ9F挂ヤyF抂C_@ノ能dヤナF]bヤZj找トヒ2」.ァ愃Y]Ixクハィォ瑳ニィk瑳ホィ瑳チィvF&るz2:pハ6腆ukPMンF聳Nッwミ|wEVコ]oヤ」:顧ィユ=カM>K4オIォ{ポィレLノ}妃Oノ~%メvAアbA、~Y*ト ォォ"-ユig\mi鼬+N扣メeルカ7=ゥk孔[モテ,・ワ.s」ュロ盡p{4d・リRサ' ヤ、ワ糾ニ圜イnx$;>Iオ8ァ"SD,+,嚠シsァ柯ナ嗄テ`熟ゥ}aチ。抽オユYq4・ゥニ涵イKフび' Pコg" !敍3I SクぼN原V-ナ匸J9嘲#祭$|ensエハo<ッeU Yョ譎diトE癢膿勲ッz、鑄゚h、BKク06/C8w*チ5 捷ヨE-ヒケ<撤ト`陝Qa Wz,7。オゥチヌh~ A」bWZN3サj6封リフF罩ォ&享ケォ4ユ-エヤ5ユmSムク5ャ及b?_枷&^ャ&エ吻ンハ_dケ%.ヘヲヌ,aモ譁ナ\ホア%ク物タ呶BZR, t[キg<ニ#笶ヨueー櫑汲tvヒ鮓!ヘ2メ儂皈k[ラ:」ゥiYd)メQO2ァ写?(ョw祟Pシ、&?Oァoレサ候「- $*4モ孝vuNワ}+J?9 9ラタ腔Mn~ス Vト&74キョJレヤ5「瑕7巍」ユG=カDn4*ォ@,Oウ7)&ヌシ蓬cフ メエニ鴨v(喙坎pnw知ネ.S!スホJgKu1; lk(モ塲Q\*O'ェ 蛇ョτノ壻マワXqJ蟻ャ ヘkjツ晞兪e?Tgャェ ル玳ヲカ蓑阡:ツr.bBシ7~|Z委。d.糒9*ー4yW蹴Rテァ!Y%A叱^C、チ ゥwヤサ!゙フ[BE歎ヤソヤTrxH}@sニーN}He1,罸ヤエ服レ隧込盲モrz:/ ゥ蕉ヌ!戒4、>S殘EH}ゥセツ・MF;76\ン キ6」、c%!5ヘ匙VォB黠Ш~ヤァ穿Y}RVソ ュァwHェ~ ゥ゚ユヨ穿嘱Cォm! ヘU鷲#+dw覆!ヘqd゙チ7エ紿ャkS1ヒラン!-オメZエチオ洫u0、3tラメ兒iォゥ!"!%瞑v~Hg簸Dソイ 、{,ュウC:キN B:要D樶」t?レホ麕ツ蘯ニ豸ヨケュ(゙KjロH4ヨ沫.メyエ。ケiル(トs、蛭s!ン/@(\ホ9ォ 碌紛45オ妹2サS9Ar*ェ未lkCハロテ#G「ナSL7・ョ%ヨ:滝 シコスーs遯p)v゙!iネHHcUヤVマkB_&ソ'eォlトb>>」BJハャ岷Bb~I/ツ ~?]-OモX槹花誌!Ch0n&o7i=トァ硬ハEィhャ式,oE」「、>レXロコヤB林@&UクチC ッャ8、ムイ リネ3ワN桶 ゚テ ゚ΩォァG昂ラサ,ョuEアN=メ#芯H=*、uA_ヌQキAH!]ト7#8`SKk佇エ、ョゥ、ツ・'2ノニ。^ュ」$n啻抂リhxw穀X寫ョル徂US-J8ro頓IiOロモu鸞ンエI掣`Jォ 宝fフIョモ sャm[ヨ 7$7K偵)」tアホB芥)エネt;KゥロgUkエ*Z_辺me晉q・x]ヲuンtF`レ"磬ノmG、エ拑ウ、ュワ.ッサニコA]宣kヤ擽l6w)E*|ラ$セ(!セ(L3注ノミ6ヌコェャ!=Z纐!ア#q、奇禮ヨワカ、セョ*、w"オ水W,諂ヨハフwノスkロニロzサ蘯%カテ4ソCレMウ5.rォ」KレjK&QZk}sャ|ヒホ0O」ve6 醗:」ヤBkkニqンr,ツ鰭ニツ(o4\8ンzャN~'&、haタ[゚マニ)」些:2セ絲/)=杁wヲF妹'&キCmー7 エb_カ苴泛~Dウソウメ濶!=IOニ扣 sMl駁アK7沱8§Bd兌&[H擒ムヵSノ惜ヤ「Bエ/nnゥ[G1RZMiwャ$dt.ゥイ/C、・XbqミzuイI辭"9-ニduY衣JZ*リミwモCz圉羚拇ュヘ@絏マヤ!~?;、gュg3勢咳ヒEU痕巾8<、#3H瘢チヒw,」"。#ヘJア「コ ャ辟\4ウ<レコエソ^!} ・#kムマゥ^ホ/)))( モea館ォQョ, ラ!7ィRニ 'レロ&ュ6:"ュム;セ|,レEd ツK榛例c*D F瞰疉},J$ヘ糾ネンZ^’P4憑レメfI;'M 柊ク{モルカ 驟Didカッzvァ#着ネn買ェ{3チ7壕ヤュGKー喬トイ)2V?ヨケjv、蛇1jZNhi禍ルa_竹ニ]@カ{Dォノ,ヘOツ:'sq佇k=スQVX矼W.ウ訖リm鋸ト媼ロIタト{S゚チ碣6kzA企新iカ暢,柴Z=Lx$★レhォ゚u^SJg+ヌメョノ.ヲWリハトZ(椄杤;3yY蟒5vコャホVRUnラム\M獅悄枌壇~醸フカmケL蒹m ゥqーヤ査マ刄ホ奢鵙!ス俘[繊 渉">nl"ヲF)ュ鱶%$ォ4レ [ ョn桓qテ叢ュヒzVマU2゚咒rj@! (/ァー゙3ツ{ュ7T巻#Jーヨtオ陲鬣d^。ルツ-杏免p+dHラYpハ鬟シイ"J!~;_CGP8ケvhュクM\6*Iミ コWt6:_ ッォ8キキ .PJテムニェヲ6zC84ゥチ'サj┠Z?w、|;k嚀蜆エソ寧オZ|#HチルケuhホX%ォanフ6~^ l套k嗚$A慾Pクメ-「」8;,ェモeコセ{、纔「`ル皇]共d$l=ワニヲ麌xEJQC|ハナQGホp{オ[&RDY8R蕉至チm丸ノ*: 7ルアnj^eケ}XW7シウmュ動>9ュキフ t:ョbーォユN"鋸げ=)カメ、;チBI霊ネ#uSH7テオFモィ杓4ホ迫斯ヲ,ロク」ム崎^錦稔p=羞{ 嫩チッljヘ咤v銜sm6黒W@テョァ??メヌヘ xナユ替}ク跂ク~*xl w AッO@4ィヨ蕪揄通僊。'浙I !ノイpン$0/*"ロネvャ;ヒト 4襲S<僣5、J=g(e Zノニ@r pヌ4U暼p> ソ(9,#^!} ノ禊)9採レ~ン!ァ;クFk0チ・TJ> 鰌 3!セ?ォ、臻鋳ハ府PqV剞> カソニ=ァウァゥHヒgsネa$健モタ撕コGQI遽葵~17ヲJテG轆ヤ;W汞鬚Vヤモハ2ホx>夂}aH_、/f0nn6MクラZWeノ菰uM}[S[フE/Q好 飫hgpDヌZセ.、ラR$ YョネqqYU@Rヒ畷トヨ1半UKュゥヘm,tケ 曜ワトヒ|Gヌ7#njィk%ロ.lヲFbイ\B"b了qx緇*・サ7K」Uヒlーレ 7哀S ウョカアノコHク、檀U、 q盾c*sヤWュKメ誓愍白エ「h誘6&ムCハアホ>!$ホ泉徙!ソ\+!ー婆nワl3j鷸「0ネ8'踏yu~イ棣Rqノ9'UL弩Q9オX+BJセ9トWp F猯#G「ス近刻ュ ヘ艚疱3D泳.木97 「ラkム@q[Yレ{ス罵FuL返V 粗d"マAン來*、ッ"6セ哥ヘ巖XU゚D釟.$Bュ!1$、ッムラj}]H_O゙ァモIスwメ7リミKUトZHキモ "ハ1yォ。Lツ欖ョr M カZ"ュ髮t4<[ホE誓E゚憾vn}{鑰6カL6舂エフロi"=ヘーQ驅a怩礒K「h゚ァテGサX゚ソ['2xミタ;w3pーQレタ僣ヲ腿ィヌ5ェ &bヒゥh+jm(KZ゚メ:<<、;ネ遁ィq/ノ疎メ幺ス!ス儺ワ}セ.朮ホ|ヤクn#tNィ砦5「 Zォャフッン.ョu硫iュ 鶸ュ iEfTLイミb得甘袂甅ウ#H5H*QイR汰ヲヤユョ イrツテCc/ヨ l4オAュUv聢」5[`アメ゚Fl:ワmj"ンチGリe帰カY窺Qoエ45/%S1RW゚ヨE5袒キDノ亳粒ュリ@L,テゥニォ:--.ッEv=ュ$戍>Wラ!ワ'w館n ゙'臑)フc擶韲z寒Jiレ$ro上Su愿モnロチv%Cj;アV2lglァ/nsKtE]cu「玖~キキス.LナエオK姨W[ロメンリR銃髄kャN「;eァ」.%rvWCaB$チ芬」モ玩tーサ插x゚霸/ XエU総g/ノWE嬲ツ騏醐0/ィ聯Jェ&エトM缸椒qル ノメ酷Yd提~4、#ェモ檐メ壌-ネヒ幀ェQDヨノ、r$`筱5ト・>ナiUΔ穡シ、oメOムX・h?キ月C靉C詞簿凧ヲX桴セsA\カニ5@H?Mョヌチ3「ト欝E+"5Ek青ュ  ヨョ鞅Iトゥ3ソ*、%・ァユ?9ソd、Oヲ[Qシ」ユッISカo&Vヌcゥ(ネ0t咲|茅這=Oィ Wト盍x^Bk季朿1^*,釆yウョ "/ス紮アキ鍮7慊"vテs厨Aソh匸ェiy*緩aイ>ンマァ;イロク 曙ー楷ネ %/晒xワ0ghu∴L_Y D」eョ$ョイW$]談]班ッxイvHケサッV ロオ vHB%カkW]ラロウ敕橢Nk=モp}:)YwpbMO|3′ヨoオヒx]i%モルハコV照vィgオオU :SB膾 廻_"64。・カヘ=AQ・yィーg上BテHdCG#Uト6vツ f暄玖0ハサャnJ朱「'.=\";フZウpVf善葩s焙懍ri44 供|&サ溥Cxッマ懲ヲ(啝17タ3モ メeーナ15ラbト扎6{tbネテン!*H綫レウAラヒOjノ.[RHミ+ュ 剰(簟fW」|M宀ム」I:5<ワン、mクムjoZAオ[wュァ壽X~<術ルュ8]lマン ト~Rタラ=~Y71`{゚I箚!汗_ォt、Yカナ箇=ォ楔sミFエヲ=HKsマ`Tカ處ク拊Tツ0%ェu仞kケ鍄蕉ッ」<寝G*」ケ舒5。レAxニメ0J啻ムシ9'/1kヨ N0 ォwヲテd曙=キ9L恋|ォ茂貪%Qiツ' ]ナ RZAァhK~8゚S()V徹1ホDKWネRミオリ」マヲ~+、Iノウmセx(ヨp{心ミオ+z釦ワ「 H(聟!ト[セe0止7ト謠{O3% Qヤ協< )゙)/Gルモ6Jッz ィロ堡ヲニム-TOd7楸英場ExOR゚「 Z6mRHネhQHャ?。ミ6Z9汨サ9ヌ9ji\._箏$WMhXRWk罅)「蕉Xヤ uミ=JレOjOラチ゚、o稗 コ4リュxア~{トスioヘ !布?、顳-#C|」ソ 鴆~l&搓メ?骰エ\ォ酖tタe纔v祿コJkイgア覦Y1N雫4cz曰晞bーkBjァサ、塵{ェユH逅ヨニYFシ#*ア゙qq挈 0リ=X( 'ヌ\ftVGwケ*ュヌフ`aZ;゚エヒマ'x l櫻ミレWrム{憙T゚ヨミHォン%、ユソ!uロ#O{ルJ2メaラ器=ソqYcモ ?h譫/Y|6ン惚艷シツ崩ヤK菲mヒ*扣3ォ脹壞ァ、 b)j留ユ#MナH。フlカv省キ゚ム#ヒレ&sュ鷽ュ'ユ咥M:幸lsヨ津Piリ"VwDヤ~fZ⊂jAt/抵モ7Jヲ・噌Cマ|j#Xi砦{EfItidy=憮゜ケLKDRモユ帖ト僉ヤウス >ユ杲6哈A ァュ@M≒メァ挙Q2(齬8ハ/闃w」~ユエ聲フX?ッネカ@ウアSiメwiゴ{ルp^ !゚ユ・判_忻7「>1サロRキ(}(暉%.9X」ヒ諜W!%ヌ/%リ#j$`ォ0粃а*Lヲ堝}ィ$_ヨ足=襾4O埔マゥ&ホ4yムヤノ蕪蹲7kN=チUアヨ(嘔シゥュユ&サョi憩6ョrIュ涸e#ャ8 t゚ワエ6ナ?AJ%レoN翠鹵ヌuAWク滾越]゚ュ椌>チ%#・v50ノキ,浦ウニ槃レ!4ォミ傷ラ萓B4!4タ[友膳D炭M^J搨簑ロz。eラ┯ヨヲゥ$$ZGノクVォ:ワ虔ワゥァイ。kヨL[)#4oスI/ン)蛔。ポ:・セ+ ヒHワコホ~|折nレ!温コ=チ"ソgァR=b\:ハオf*.セル「ラo線}b僵ミ:Τ、~、.ナ7ナ?"邨ケネ?。;_濺%,ヘG1F狷ュyJKSc>:v泱胴ソk: :&ノタラd凵d溥ヒLn゙9Iンヌ/xサ寮gZ耻iざラ拷n3Szケ(コ麿ユモ」ォbtW})ンpユ腑慾ムキ鈴ミ鼠コ_フレロ3ュ]a(メ!呎ヒFHiッアミ搴テロ墺鷸茘%7c$zリ7UE牟ユGZVm躙!/B5曚懼スゥソP傍ナ、~チB EV.#W゚^ Bャメ} ;R{:小M#霖bΠ゚ョ{"S(コc筒cuエニ:、ヨ! >~B推゚ 2葫リvメロケわ\歓Pf螳kHTロ/、|ヤム5:qウ「ョ葬n4vコソ=` cチヲョzサ8ヤ゚陜G騷E、・ムェ裟I$*7{u.'ヘWノト敍/ ュ$A恒 ?'Dz・"颪tワテ[_LH wSw譫\+ス捫$b{?アミ゙、/ン。゙6aDァトサアsシ D」゙Oエvリェ匏錚フkェhャェo」゚P!y;F耶T4ホhjャ]ミヤBソマメ3ョgユx%hMヘFュカ{gi$ヲmG0d5"M~W。柤テク涸[ +*jワォJ\{螺nl7ナZcU(Iナヤ%錺マヤ_ル!モho`z吏&F。」-8w炯~t"ョ袿]2モュMIォォ y幗ヲ穫ュ4rAヤトH「p吃ゎ&/&Tミ酘悪沐゚ー゚{ヨUo\エ?カCfQ ォネ*Wゥ(ノ;:#$駻ヨM竿ェ"ヘヌYム惚衵]粲ィ L$JウPUON%鈿ノアホ$椏ヲリコ\髯Gスソ札ムセニj(セR*イY5qメ梯+ 骰゚HWVソ曦ESF#コ? 雪匳゚5佻ャkl錦a)X/_h/eH游bi&nウ粭ク g0\・1キァ"サ゚惹沁"゚ハラ)_<ケヨ;ソKc8Q5:(砿r;モ肉篦滾W武Isェ」iH,テ@滬c =ュA垪閼rセヨツM!蕉殤!チ[CN~コヒェィナイGCエ! *ハOロAケb秒 -オA モ」=9ホラ 4及\G>E・ナV$`冬 '2kニ刑 ネWュH゚&ュ衰リ$#e人Jチャレz ットヤムイ遅'n啀Wア讌-+リス8t\`夛ワtg舜[dステ::Y%Pネャ・リB」コオ"^穐シ媽ロ杏J&エD邵~スフ?ネカF嗣レ Iネヒ pョユ清ホNVCェオワ#~cヘー4[x&ニッオ陀uェネ櫞 タャ{オルッュ吁*ト鶚ヒユ ツホhmットュo64束2ァ鶯ゥチ俸~ァc モ \フェqョ 娚7L8ッbV%ュi。sS.秉Hソ曹オ2セgノI?柳#gkw.再fyイa耘Cフ)ウ嶷ヘ ナ o棍XCTセ/<^チァ耨ヘ3}fメ{%ヘ箋~X 7倫ウ|ン キ^~/シ`/?トヒE^セ賂lセト{ッ籃6循y狛 W`槁&チ[eシ耗Lm5p`楼(ワャ0kk゙p 熹Jd畴PnスL1P(6@゚3=エ2トAoツエxホ!ツミ A.隙0FタDリヲタョ0Iヘp泊潟yーーィp=8ミ#1fミヨXhエx}ハア恠p0ッーzワス(,レYコ イ$ネFニ。◇p("ャ疫?}N鉉伶UカO|シ鯢 シウカ柮dz洗v!ノMヤメユヲQトS 筧゚"殊テuカv\湮タ5 p左マ_ホCS゚ソ\3w8ラゑ1ョ〔テ蝦コョ_g澤"pォl涸\゙_H(n5セ座β-ク掾;ーcu潘B潘F潘チSクモクモq「|・7Q ND d#h:`0XT゚レ ネチタXイハ釛]ー8キ7ル。ツk|ハメC洗ー ケGY 9p)瘴oャ#8 [テ;`ト犬M0誕ロ]aョ f1Vt@a1.廃XPシ  ナatq覗偬mOケξ,U 2カr&リ加:ト肛リ剪]ワ~ッカC シuタク;歎-`ニD蠏tハpOロ。nト67C)ワS6 wチlクeトzdメ P寳l7テrクヌ{G|祉qオH8メE#@メfアV「儒BワンHョンラツ製`章Pョ,{マ(ヌハル{瓩ン関テ>kq0汕:`r;]ァャゥ稲ツ};「タ0M,゙モ6テtjF・ロ'ツヲ3払bz亰 スルァノ檮 6レ償酘遲(フ0ロフ[ 酘サdマヌxソm耻 潮Hz>ミm樺%^ヨホ,L?コm_Mウ「レ$ラ >興]*S桍N$aノ"ャ_ヌ鵬z# タ)ワ嬌 桿杳9リ梏}%、例;翫ラQJセ⊇H 淆l/澡 奎oリ.-゚アI#ォ─リ3饗oャ~ヌc1リj)(-「 cコチ!]旺0PoyXア椎リ/2僕 *Xイ".fェ性ェ_X;-ク店Qノj7ツメヲ干Ggヌ! 90R(チeフ$ホ 筏LbA价2a! KX/hfルー雁ツムャ従ヤjoQxbpdラEユu]T'ノ聢;リ授ニサ ヌ#ノrtラKテ)ーl 月ス!ユサ萬織TXユHXイシ脊オミヒ堝UE; O2Hー冑H&岐リгウ|リチAャbl4ャd%ー嚶ア翊マH.メ&゚蟲瓸祷オlア険Kp#夫ョGC敬゙宦ヘ[Uワω「ュ庇摸翰 Aa叉メ}^EマGcコ搦、Nqヲ.モ擁濳÷7ルワナA)J{`サ"K冤wタ ャ ウス夬 ア英! kルク旦タulエc~'嬾qs,レ0 ト<モナ耕"触ュク易ク哀7タ@Bレ1゙ラyハ釐8ョ"スQン&8椒4>チ5''。ミkh{e湍'詑坐廰久ァトeWz哨,フ>jイOオ駟6=ンヲgリL屓eモウmzN2ホマunJ慵iイマキ6スミヲルb嫂bモオ6]gモKmz儁/キ6ス坦倫ッ黴ォ攻コッラlAトケO倣0(モオ)0!8^ィケリラg゚P*ワキ;爬琥R'キ@<壊d゚筑ヒ=pkゥハS[@)lM@j「ャ->q鞋ツ', >iモァ竪{レ0マtタウ鯲らワ遨$ヲxチ7Eエ込UWOOヨャ{ゥヤ゙/z%ィmz=フ&サ・⊂タテハレmO8Jャ "vタォエタラシ隣;熏7}レキ旁E/)フ~ヒgシ]jクMk!,ァ' ;・チシ狹レm徳g轆゙ム'M叡ク>n1瑜メ8HG:輩yAX@ト5n灌!ヨX)QgTトヨテE|YHRッツ[u%セb煖 Wgリ雍ョ絏ミ> 薐!1良ヒ5判jタqミヨ6 ユタ~bs!ヘγlニタhエ~ニアC`:[ウルb佰蔓CムZニCUYヌー83リ疳k゜ミBコし「コh巍rクΟカcォv|ネ私マルQheュFヒ麌リハ斬;vチ使スル ャ?[テツD6喋ト館ノl'vロ屎ノagアlカ敝gア#ルナリ祿ャス モヒルU vサ吊弘ルuv=貨 eホホFgwv3ンLgキ8U6ァ爵饅タu:リf!v浹ンシネp^e9ソーヌケテカpテ樅ルI゙=ヘKル3|{燦イ錏"?┸トkルヒシ⊃ニマfッオM~%{已ド盥ウw゚フ>誂イOッKセ閤%2ルラ「/F c゚ヒ~リ召$fウ淒アWqロ*ホaロト:トU#nuククンb#ナ緕マ9ZトッNO'[rzヒ>Nョ苗恠イフ/qヒホrカ3TF昵r3B萢#廝y郡$症 ァG(・vオxラ鞦cア,)゚顋:ュッヘ泗ケo旱[キu)>禧゚サ舂[蚶瘴トVA蔘;,P゚「樌osハ弊kB$ウ~I舞qSシTz絣ノキ)カッ郡|忻K~gp壤ラA1f[スイka0セJv黴?ミ4ス3ミヌm゚奈ウキ] rZ飴8 'p喞'オsWA(・qsMc・ツ諄;8r・y(=e觝[Oアサツノ。メW^ァヲ]薐C#巖_肉鼠50鳶ノヒa>ャハ塘ヒク コ*靺ァホ4gレg蝙}zcルNXサ d:サツNホP跪{;・0ユ)%ホx8ツル.v&タホ>p3wヲツ+ホセヲS:33g|耄沱gヒtウ^N!;5loァ剖8ミYニv唳ト9-uZリ丑;ラYノ.uVア梨」リホjァキs3レY冐譛闌wNr&9ァ"ワァ;ウ3モ揀3-憫ルナッyv鮒v1 vgウ x%愿Bンニ茂Xヲm盜ニ9ャGモャソh^播メZS7tウ^ 」6e%ワ ヨツ,ヒNXル9,ヌヘzサYキLヨァ;[ョワス}」8綣`蠕嗣cuオoロキo {ロ5O|LMAX諭Bヒsウノk體b懈ーJ婿イー`[>(!YRヲd邁チ]ャリカα?ソ冤ノvア"ツノ1サai1ソYDG、qdキ#&ュマ孟 ンa├;ヤyhッg」:`:/談ヨa|_aヒgG纔ニ1+$イ廿ラ-蠻pャA糞Nエケ波ネ 0サ@^Dケg)+@;ノ9メr.軛ャaホ:\ サ9Wチ X濬$享孱チケ返゚86XYホ挾。s\ワ W;( 晩/8ホ=無 ゙wEナオラ}ィクュホ;イ レィス怫Y_6リy剥p杼;;Oウ]挌リ^ホウl奕<ォv^b歳ヒlレーォ旛リYホ| vオ&レチo。.{トy殉ヒノr>p::」恟彎搶抒Td98_9K惘擂ホキNォ攀ヅ9ホチ9モムケミノルロyリン3轆>懽A\畚n.<タKy唇!>挌9シ?煎站x浄~ ヘo盪Nケ醍キx>タ?;xハm|ィP|ク霰G粟|、フナp^JッHフ菁「剌ヒy8奇(ョ臘ナ#|'゚Y晢"゙莉ント'|w/ソ=ナV^./ウ^イ?゚[苙|「慂ァハゥシBヌァノシRヨd狐+|ケ夲/Oハ3A\~ーシ+!擣5楫桀2瀝梗ヒャネキ枚&[vナiフe$\エ Fwーd位ム[ 委cヨチ椄;Z寳ケヲルXキキ虔カM;wー],a゚99ャs]乳象~ツ「考7ラ,チ志{lアnィウ0鹽{リチJキ酩:Y匱ハ術vー=懿-p韋lzカラ0ョン タ|hp,ロr6アC惨]璞賍イスケ&u;「evヨu、サl懃]tc/ケdー'擴阻V襾ェヌ噂,縷ロ拿uチ'MMFカp7ェ:゚K$サ」<セ6ェヤヒオ3+:リエvwRサ、サL:ン淺ム0#ン 揣|g3イ(丱チセ'シ $州&ッ◎y-テ猟L~ヒキタIシホト碯ト チh6テ l4_テv'アdv8?幅ヘマd7ウミ>淇マ/bソニ/u ソフルダ檎7:ウヘホ ~ォs4孕ソテケ戻鰤タv琿攴f1~灌.ミ?諫ニキ8_'懶0?iケD@=@罨4 >ウ+#+キタ..e$lーハ腫6リ,ィmdウ;拵ーメ殤フレwy7@篳」лy<御/チ.e8帆 U XパD諺 'w4>\タ?+Gvオ'yォm祉VXゥCマMサ仍ン/f楞儘c F逅゙ア」誨ォ#ゥマ呼"θE|‥%蓿ッd#ホょg ノI[D@_貿、涼<rチシvケvソワrXn鮠= Sカg'Vワタ周({ f +ф」DA8Idツ9「\*zルユ樞L/GニCワー-セ9慰樵窿]f];4CYマuーナ[`ラ$)J%耶#ノ「f温:[_タサOユ(サ(&蒟AWμレVオ敷ロ・]レN姐-、齢=Q蝋F]2K6P了YスキィYテZ闕 ロ)ィCoク%9ャ [パf4<ッ△「AR'ヨウラOフF?打艘ヨZ*ロキ}ウウm/p Kタユ摩+Mワ<-O_諸ミ奏jァ\Aェシ」サルfワムヘ~ハ=:9 。R脾ヲ %「?.タabha8K 尭pクZ劇{ナ(xDチモ「^%豚カ干結]Yヲヌ=リHア'-b・bo6AL`笠リL1堆豚チXセDヒナtvィdキ館n1ン#豐ヘ?.觝<ユ郊cミハD*H覚ヌ7V?経ユ0F<$qI".壯1ッニ*・檸,ラw9leァH綯7メク*ンY:4D?モy・゙ムz・tレb^F^ニ毬・鵈ミ葮サx!遼CヨZェ├ミMン1姙" 艫)w辨剛 dッ宣BH Y 「 E&\xT額8H4タRムM「Z0?Jト ム ラ6クE,;ナ リ(Vツb<.死gト択8 ^ォ畩q,|%NoトU慂8トilク8燈圧リ `ィ<w リK モ"リO.E2uイ單Vネ8JヨツI08S. dャ桃p挈d )WツHリ*消>仔ハ綺y,Obテ菲ャ@槎ニネモgウ=.籖チ糾ユワZdコノtソ!vY;リナHマ6イKャスN'キ7 ラA湿ノヒa仂 }GMマネヌ#8|!氣3Lヒg然_dS衢ャRセニ賤ラルAMv掃従*ナョタF>柘y|祝゚Aェ・ロG。|゚カヲbv:bvヌZリ゙\帚ボラ。ゥャ8拷郷レ篌オwQ5q/「・ +鐓娵F級f崕xルz!^セ%ソ 5蟾0J~c蜿ー廏&ネテ フ盟ツr+"Q\フ2ナUハAスタソ =遑依誅脱ヨンt]9n簽sハ:ュ2 ャ2qク 翕9オXGb"樗"粕3禽ウ[リネDワマ、'Wlー3Nメ・wv命0X(゚ク裼ャダ(ソg|ユラ迚リニlモZkリWマJロト裴+ヤZク枸施[4ヘ} ヱン゚5nC?Xhマ「[蝿ス&ク眞、壮リC]mセーエCfc Sッa嚊;リ#[毳,UクRq春ーGロe蕭オ乂$qヌ4「ヌNЪ{?{泉d-」 1専9*欧?祁`g5Tテ畫U'ォ昵^オ <ヲv釀*キユ^蝦ヤ>,GMfjZヤウルIj?カVヘeWゥy6オュWウ{ユ!~フ鬱yyA「サ(.ルA(驛エソサb`zテアエ!゙ヌ]釼ヲF{PqM@1周ホ\'Pェ.FK_壮x[サ5゙キtー演スじ!O苞マ苡ロエlbョハE%瀕^哨籀VqS炳 ェdタ裾1ョ萄シ'疂崕葦.B`eァコゥ(7DSe詁禝Y癧シフ}アSi松wャ1y=ミO[g疳zヒァ謀オ{Gクv肋hナ:リヌィキ費暈2 'ノ檻・俔kBクーノァゥj<怐\J{H、ユ]ハq穎p2スFUネクフワフ8ネ「ワLツC4?ァ児~ヌ\$チy=ホアヘ暝-q&ホォ|\/コ _射ハヘ"O6w回]\恨!Vセ泝蠎タ5Yロレキ}麿コvvヌ=-険,k~攫牌'8ヌ: ホ>}[レウ煖7gシホナVノgルj|サカ::倥-tZョXN。ロァy=殴6%T#サ&ム瓔エ 射E:gy 40 Bc\Cヘ募wZvリmャメガ7y_蒜[-?羨迫M&゚s?ォ0~兎イヨqーrリ酬>ャフシ廴':リマtョ@NH^士硅 ツdQコ~Iモ4W\ロムキWク侯。藉)テ~%qーッ7dVRマvリi;フ殤 sウLシゥ崘コU|。ロ$靱rー>V.ヨk)Tyヲ>M^ィO隣3舂LケI%ヤ醋S}只Y_ェ@_ヲエセB災ラゥ"}ス*チ|ンョヤ7ェ)&5S゚ャ鐚ヤ!zクェメCU收M5鞆ユ }:zオVoTW黠ユuzウコM゚ァ榧ォ鉚鷯ィX?ヲセヤ将゚滴GOC>マ帶}' ハセD+m"遅テ ュxオェエ_a勘スヨ」酘7闃Fミ/5w)ソフW茎テ)シト 溷墮ョX U=5d=U27ラ Z%サヌュぺ2H,,ハCサbkツチqテe档Cr?i筥M'マテqNーカ徭婆Dq ロ!L-.鮓オ ナx?1hュ島コn〕ハ鑚L代霤R冢Iエ氷 雫w爽カJ簡チメ D+WゥstuヘqdレワスU5ミ淑 孤エ(諍:ウ慷Pホン蠏w`7莊ラA7!Sソ C星?ム#丕?ァpN7Q8Wラ鑰;<ァ 香/Lセ3Aヘd20=Y榾bテL.eイモ常bリ棣?嬾ウヘ0Vm リ即oFウ3フvァル冰漏vソ)c徐=ルwf/ァッ吩 5統|3ナcヲ:嬖ホfヲs吝ワn:f橦館゚゙,p~2ウ煎1串藺Sテa>レヤ「 ケ<.C6/\矯ウhス」fVSズ緝[RソKж鄲1pン」ホNBナ[`P潅P葮$K.頡Eスic!モ4A_モ ;(21cZaウヲ0ンャf,2G@ヤー_チッ キT$ヘ=圏D86rユ償R萍ノィ┘ラ;hYlN6;$サヨ;=ヤOmNオマ糘ィwa謝3ナ孱イミ帑jo.k"aVvタ\ソ應」"ァWェ紊胞eァサ`6、キロqゥw スン卯ス駑=メロ、キ工o スmヘスーリワj輕蕚アルッ=レフ0ル>ラx}Fー3 鬪}ナィE:wzよ1ロ饂ッモb'avヌiENオ亜Yメ(ナV。&欽N糜Y据。瞿< )冏aエyv3/稷ラ?@ウワ橲,L}冫燐マN触ン*UコSエテzァ3 ^壹 LjソN 鶇A鳶+~4ッ#5\テ0ワ3 n畫\Ko6ト7、アCb譲p晨FッG^\ナ$啝玉|`巛5{0~)ウ鐶'シヨ触檀]9Cレa1蕕ロl0lュフSUtレEユlャqコ牋o彎dッヤe穿m鑵Ft:>,)r0ト| リノ| ・8タoィ5ソB」 麿ュク帙蟻a祥ヘテ* ル'VハZ邁+モンョィNコ]⇔8r-rリムl4カツムvoIヤ写q;瀋{ハリ疸r竚「,鱚キ3~^縒$)レmb・r+ロ@?2醪+ @ツ漾v 。< zタ=,ィ 隋sャ Ε矮タク<0ョ ラc~{ 鈷コナ゚ウ{、 榻槲ケ)/鈬OiOE託ォェ雙ムM`0フd「它ツハ!ァネ瘁勘,"Q.1給 「*' つコ ィx⊇コ「イUユ=W2セヲコォォォ_UスォuトjグAFy」ヘ6俄蟻ヤェメF ヨy^/S椛」ホキw_ザ悌*ホt叶Sエt碍Nムレ)ョr喚*ロTVJ粛\p|クNワNホ'u>+テY.|罪オ~テロ傭}Jャェウ]マ草ホメ兎「l9「「qBロ鞋t@峻\テ9N}ョツ>Tア&棺クzk、*YラU5y*オ餮B(現ュメIキz籾Qツ。ヨWopマ-。ッエ莨ヲ・無ヘhヌ;h銷ニヨハRッ鍖゙ホ8E_ァ鏝}リ*2Jヒ%ネモク3$&G:@e嶋lJ 。スコ匈UtY トウYSルアT ミ  |スカj?テヘ!rri|Hdエ、v7Eョv溺纓ソァ*驗]腋Hテ"リ挺DFzサ,MヨX歟7ニ.Gト.Gニ.Gナ.Gヌ.o潅ムシ&トネn樞ァr槁(O。xTVhニ茣蛍北彜件!eOPawex「ャソルy<)曼酖徒G>ウXヘ、簣キ鎬ムlyukuト7ム・ス-q"ホユxェSLs企N1#ニ|$0ハ クテム=|W艀マ#姦゚ウ彙v售1%&ケ3kz4~1=ソィdチs萼嵶Ti0G鵆ククE%%ムオ hーiG)r$ホnエ擅キャ@サチエ; 」=`)・R/錬{テエフ。}痿レモー梧I 5@Jィー◇MtTミ。PI疥: メ8FテIz#@Gチ春4B&dムqネGヌ」Kt9揶ft5揆レモノィス・キ「At IoC肓T4儂Gウ エ酌恩モサミ.:擘ウミwt6ョK鈔tz/nB鈬tnM稷コw。痿<>管ミ%x*]拡ムex]7ハソメ'コWミァZ蒋UK4粍&)t YF7t ルMキ巣t;9Jw丹h9懣"輊介&ソモ=I_3jムラ4コマh@+肴エハhC゚0ョ・o];Fョ1偃7モニM=>zミXD゚7L+鐶c5タXO幄ニzヤ(ァ{鑷」萱0メG)$ヤ8Mソ0ホメモニマKハミッM京1メoフtュルнgカ゚岶9ウ=ムD2ミ渭♪゚ シ9j諮薀崙ノGJウR・膤Z?ァ値uLGヘ稷6Gヤ! Fn) $ユケア那&ユ施1オ婉.yハホヌ髓\RルユFg7egマP'フ8ゲ'kコ:Pルケkネモs。ネ35ヘソjrイセFlクf苺ki8臀+vm犒qメ0iwq孃・髀メソ」鯛7d←Rェ+*q、ZクB盃使チ0ソA「 リQ・pi_・蝠瑾a陦ョtcス尉=ナ1(コ・6Pヤ?rァYjモ~テQァS+n・ゥh"槙I<仞鶻R。:諧「>s[扎弦Bコb」>シU*\q足ノナxos ル;穹ィン庸アH=ャgjr゚pクニQ|5訂4aハ,ホナネw'゚#゚2ー?タタレlフヲl槃トル,シ吽νル\寐ソ`痂ルテgチQメ-!!gメ-'」リcd{廰c+ノlVHカアァI{シハヨ7ルz=ロHナ'ソイ"ウ ホ^4ウ-F#カユhチvXゥ妥ツFwVfa;今ャワノvリ^綢カマXナェ c;{ヌリヘoイニvネ8フ'リ3vヤーcf*リシ7ウf図b^ヘnvd'ヘリ)sヤシ傷6アッフ褌ki沙栲knb゚兌ルYウ悃3wウ淮7ルマ觚WG孳#瞿凸鴬xメ94翊橡-On{イy-Og樓鯒z4マスシセ1ャ譌z6tマ蕎。ァ7蝸yNニ3シ於ヒスuy3oC゙ワロ聞主!゙メ{;ソハ;誉]タロx,R゙チサX>%ヒ <0ルノk;コナサ y-ツ:=鐡ョ遵ェkN鼠躄*t(ス+ェU8ハユKRケメム螽nミムヌ椙ク$!uャ8b0般2ムCyロ#チ振Jレヤ・Wツアxキ#0御ゥ2T|p7抉?TモK(¶\、.E戦セ8ニル{?y*韓影┃ソFY-从m吻クmS・{ョw?\タ;λサでサテ・シエ=!憐∇シ 。ム|L釋テL0Xニ 潛>6族剌R~T1pэE>>$ヤ念F|ヒァ」e$汢ヨサQ气6{ミVY訳ケRモ/伺メH笞:ラ|>油\エ隰*゙ jァH軒搨xOE=橇k 希 ニgヌ8キマsj歇 Gワ"lスAツィN'ヘ蹐 ソpHホ畸仟」r椁ネqn芹v情ユ賄O'ヒオォtヤサHFhmアpメ+隣二コ`ムC:ァェロ.゚ ヌ%4O@セメ寅扮н) ナvイテヘX3ロ9Eニ/\\t$ゥキ3ョ ラキ>|C」)~SKモキ男ユサヤNGロ窩;(ネ1硴&/pセ両 Aセヌ;ア璽 ゙腔ー?~gヒ^+1ェa軍エ %QPYq0lケ驀bIzォ髱 ;9徙/ェEヤbdミッ&8Fト狹qM|\f「'。棚ク6ヨノ尊N:ナw。Su劃ム)ァヲ「弐Q?寿オカdトY[JシFbシN.ホo□トァ%'{Frイg。 _Cs0弛|,/タ"ャ膽P羮~綢drヤЦヤFル|ホ「>U4仭.9リ>4娶「シ m與Jョ*襃「ン|?:ノbフcハ?トWcクq|#Oヌモ)|ッ荅j%^#ヒM+ uI{%*'セナ[L「紵4褸<」OトWg:v買ハ^QM「37・鉋ト殄エsゥMオ銑cエW&坐鰺墨マj・1ャ~=sイj2駮8セァF9>%Oレ「0ニュ炎d} 「ヘ6cT・ヌ9ソt・怩ッ | ゚2ホレマ)ラ'ノヒホΑ M@k [`.ワ,<0Wxa羽ゞつ_о2aチ^aC菱weC「.ipJヤφ「>|-ヒ ソ:k完ナr-ィァIラ簽DォDノ浩粛コ診*A!ラdヲ゚H3ンUムFGfリ耄ーN侑マ 耘*。屆ョハ鱗?1rbヤs恟OkメスL/餮QbY(%2Vi^u5~0)ホフサsp舎ワu0')M刺 ~ム\ホミL\ WエA*2aチ`ムニ碍0MエロナU0Sエ+E[クOdAアla諒足ム ゙昵 GD78&ヒS「サ\0)s宙猾GA\'櫂$+$「iヨHRQ煮D95トソqトmB$ ;ィラ|懦o蓼N^メセ=ヘBiサ竃P-ソゥゥョヲF募ワ」lW&(~*1イvC"枩|カ2$EV病ラロM&pBゥ「貰sイオM|モ{アラ箔a)>[凱ヒboDセムホoFクH 貲セーミ@Eソュ!「ケ2ツミ*3擒ニJ  縟i「 XNナG。a=\)キキト゚セ@椋:庵ミI " Dテ%ホタ1VQーF戟酣xxQLq3$ョNト-遭_維煬核矩ネ/f樞N4@ワ興ムl1-sム b>*」聹ftR<ホ闇隰xKミアbハイカX梵ォb')w5テ27WナシN;筰@ア;」y 周妥AOG9kィu」h戛hpt0"ヨゥ畑HP誡5ッ>畠ォ%7\斂謚ナJ刊蔚< ]トウミOャ全ャ$u0[ャラリア+。{Pサョ$ッ"敢ムマアg>oセ磧f@:bTクxz逸テ.~ホ矗(3X・ヲョソv;衰ル「p7シ泅ケ< 1メ>ンタ\'かイ_ruq>ヒf $akカ瞼チ:<゚只7ロ[轣ノウS|キE_ケ横゙ト!ェ禿A蒋オミFG)ラモラpム蕎Yャ稘8Pオュ_ィ゙,Q゙=`或rケ_*^湍「.沌ヘナ[oテユ){゚ア_「ツ)゚uF甘0^!磴ヌ`XJゅー]怱ン笏蔕!S|)ケK蒟8|Wt{6u\Fr`ホ7ィg]髢メUv=.カラru墳♪2tb@ヲ@;横ワエメ・BフbY{JeP`濔ナi曄 ャヰ、*t0+フP烏t=QhJF* ゛ 9ウUェWィfァ、y!%Qィ晢7CN#ヤケミ(浣,^W!nレEハjォ+uュ 4ーZC*クツコ7;hkeAiu鰻G和蠶\ォ "yヌ湯4洞フヘフ|ナラ2ヒH\5鉱tf9#"癨゙ノ・EヤX<ネh ッuヤカョ4k0dXC、ryン苡 鱸DV*ヨJ/'6シ"輦YQ &ュォRノIwdoq(LRウ告マhRハУ4ツ、カテ・ィ゚#サ+_f|./ウ tケ振ソ送X'ーFウFA}k 4キニBタケヨD鈩ン ャ[a5Yキチk*厳fタdvクヘコイfチ,鴛Hel搏Γヤ」 ミ,ヘ慎ンHQA$ム瀬S`掴J7クアア9QY%=ワ/饌▽$<-ュE親=ラハ\9ナUム)U笊qq、N 珂敝 ィ蜍オTホヌ2ーュヌdッoD ,w] 6武コy#^繋-`v4Hヒ啣k、zdヲ#z0ケD~/マjsゥW)兮R?~Y廣ッIマB kュ、ミU゙カ档ュ告oノヨo長ワ」レV3Eバ妨fー倅ンt5ケ?信椛[ュ熱f醍9h.キト&:聊B]ソAワ% #Womエマノコ]贇叉ノA・ ツ%オ"ャI那(Qヒq8.・Xse蠣ァフ*ァ悒%テ ソケ擘?ッ3 G5b「D`}2ュ罟・ ハウ>C7Z泪・ヨ鑷4Zi}応アホオヨキ゚:qq{Zp/iTゥ≒メィ彩,CソnCヒ歔;ク](! pワづハQv 裏戚]ュ竺S、リ、$飫メ8__ネ・Un゚Eァ2}タs。万Kヨ共/タ%#fリ.キMhe{!ヒヲ震[ミユカ!マN。vmoラゥv=ノ゙!コイqnG・ ハぼャ iQ眩ヲ卿ルヤ ヲ6母^レ 評キアヒ#_ハフク&ヘu」0i7・セBア+テーョ4「邸ーX竿赤M n鈬`ア\ュ90,!-t イ隙ワル i貯5ヒnu誡&[タv+mキ vフーッ9v6フキ;タテv_XbwぇvgxメOルン`鋪JPfSサ7カタサソト隋4G竪 ヤ4E/AAi&ュ %コ隲ヨy麋ニ脱*,I dモ.Eケェスィ@)~I>ィ遼W略鯖娚ー,レ鳴"=ィナf汐h郛{ンGョQネオN:ネヨシヤqウ ア.PEqgTワrナレッzェ^(Hネ槎H0髦拍8`衄謬7'ョ0Jキウラユ_コネユフ.h)漣:レβカッ`クフ"牙hmカv>エキ晨。=BフHl盾{エD嫣「=ヲロ聰n{<フウ'タDxヤセキ'ノ蝓 [/ューンセM「タTリcO*{:エgタ1v8a゚_レwツ7]pヨ ?ルsイAiスィ=5オC{>jm゚ル ミ0{!c/Bキレ句゙#]ヨs冊ッ糢、nレオJ;餬PFコ嵐ョェヲ嗟カ遼/:cイ}rネ、+=□|ッZ賠-ノ゚Vg||Pウ}餡ススンZH!G「6屹コ5ック;Hzオ #>Tチ蒻PKDs、VjマロPKma5/antlr/JavaCodeGeneratorPrintWriterManager.class恒ヒNテ0\淋@@)ワメHH徨DP・ R廴コWゥ9トキq(ト贄B草チマャv胚&pタア#スゥネツご惱ヤ濃ホ儘o゙S+W+iqー/メSニサオJモザヨミスz1#etタ園駈&):"b_g毯Nメメ\fォエンル^タYナFL・卍ニ(UbvワメムRZ袞Lゥ6イ.0t Rゥヌ\。\ォク7{i8E昂蝶?譛K-エ憧ムqG゚h」"f畴ZPァヘ蔵ヘx猷Y;腋b`ァhjUpンVタ!#桿 ソPKャ+PKma5antlr/LLkAnalyzer.classュZ |Tユユ?鄒両7敵 ネF 灸%┣$@&ノ LfツフnTqゥ 慎-ラm(j*カレj]>?[mk}カヨェユヨオuホケヘ栂$メ>q゚}掉ホケg゚s荵ッ JナテN マwタ郵ホヘdnヲp3UテiNHオセ]狢餽。 8qヲウ8+5懊タ アJテケN鉈x?輌3_ヌj~.p磽\菎ナX翔Z\ツワ薬2 ソ」疵W8q%^、。Uワ\籠ユク{&ォcノNラタ=読ケYォ:獨qス8lミミァc梼~'靼ャ紮'巴Pヌ?テ:カ雕化 クルャ\ヘ7゚0モ-:カク塀隷xマソ廢\チヘ舗ls審ロモ*シレ;サ:^テTラrs搦;攤 ッラq?=:跚*n.qPSノヘ :゙ネマ嶌ケ+ケYニヘ6oヨ{フ oユ6oラq嫦/Kン鄲6lラNhタ;忸'゙・xpソ指jKスu<、=コ良;リコixソΘ?ィ紊:>ャ醜鐶癡呵Qn~ャcァ雑リ浜:ユV當:>ホ;anヨ ~>ゥ甦フ':柝潛ャヤ博?e>ュ3lテモ,餾ツ潁 ホマ邵y=4%セタッ/+_r@:カ2?ウッ_aッイメ5fJrロ汳黄9ュ肭<}Zヌ゚8i y鍋Vヌ゚アEッ綣懃|猾葯顏ソ:セト?:Yヌwヤュ艷Eヌwuォ趣ア。゚ラカチ゚t仁巣lw皃誘ワネ0DヌOuLヌマ咄 ヨ茵:Kヌ/ケ侮_;燹 儼*ケNスチッァu!pV*|チZ!TnR8+Y゚淀フミ2ッッ。゙l脇6zZ|a澄クィノエ~ & 4捉Pリ[署塘ミZ=Sサ"_ヘチ〜ワキネキf0ミ耙nbェ囹}^ウa。'シaTd椄^ ッウ?マ゙ 3嬰浸ケ`゙シヒ ォlナ->sカソaカマl"蹼.ールWMq$$リ;*f4C&!」佃oNマ旺xf~専 オヤ)&gz3フ*&36IrnッxイN莅 エヘJ/」y6Hサn5Η$8チohGAUS0ユ@貢%bq扛Tン6uth"マテトpъц3Ъ権]%iib、!Fトャ、瀬乙 1Z!ッ 「-麹シ]・wC|Cイ) ュ Q(4QlQェ衛/ホヨトCL D87(rDオe3LRチラメJZンソッユン葮`ワ!モ2ン香;ソエエエネ7XンBM秉Qd印Hシ8_LメDケ!&)*ヲ1ンbルbヲ!fln*ナtr^=ナ梏ホ。ンメYCa7mエ4x!匂y4Rソホャ゚タフ跼:7x2ユタ。陲クkv{C4でlャ*リネ+."ウ{dン枢-nO8ハ@オ域懐ィ h5 Qu梵ロオンヘ!X(ナ 娥ヨ0ヲcCPチcnjケY"頬b!セ#北b!V葛ノ5ン,m嗤フ0pf%%(フラト*C\"V#L2尹テュlH>リ儒l_yカワQァb腰Ao$ィlカヨ鸞罔ル'エjヲ煖EXナ埜=c頻>FXヌ!D*エコゥ 汝U{a ハハ 痼ロ輓=「ワ:z屓ロ#ムK冏コ」p鯰o)6莫Maヒsミ鈎゚ー;DZフpタ_PハVgzム モ(「!ン ア訳f!シb=之アCyオAPケ9タzuラキヨL7iKク!Jヨ{(ミワ塙チVキ=*・ s蘚ト&アネ~0DウリHャbLZ幀>CEネaムB`B^Gn・ク荒モハンオト.テス冕3aMbウ<qy&葮7ム[8綮ナV pソケ%シ寵ォリッ垪畑+ォラヤイ・o散誄ミe穩Rヤメナlー9ョ4ト簗(.2-6連」 アMl'雜sムワ*廖トイ7 ヨォ#>ェ7舖~2リチリ鐐=d篏枯 睚5,>ッマヘラ危 アSl7ト.qス&vカテ坂&\)緩ェi鈍2@袵ヌ鴨 0ラ゚3」ォ/Uネ2フ&ョヒキK:ノK世c\lミモH馬ケ毟|シミ譬聯ヌ [Cワツェ゙JハSxンF ヤT%街ミ`o[f) Cエ-課9"モ#コノj欧`証ヒkルg6ア唯ソ督クRdゞエk sy阜オ梢hO*"?モJ鞫{忽ヤ kVトL5トyォqニvbユ陸缸"w1饒おウ耐] ソd(゙iレhヌ ソI插xノ柏゚HmホH:8yr"ムy2ト 閭$"7ヌM1ト~堀キ楼ェtg!aロl[タマ|」7填V蜥厦!vレ=秧JUヲ:-旄偽ュメモルモルオ& ャ*'ケゥ<。0゚参6Mオァノdセホ溢B、j肥+tキ!R,4湛リH`ヘ1D+x涅ヘタウゥリニx石┸拂「uオ!マキIO21X鏈ゥモアエ!討5T\qヨmKYPキ朿=ェ.薦NFQ鼬ナ*イ{/C&轣IN\Qyi」f}ッ56岑 エーャ#70n!ュ:Lk7=フ264モ゚タツd%゚莓="ラテ∧><(?綜(&ムク听|~Z@2芳゚ウヨヲ衰+7hG゙ロノc)スXヌ゙ワ" シ[My:-ユ*%(カサ9喚3フオス浚⇔0_ノミBス_Kュナサシ!掴棧_Zヲ ノGェ膾euY`シミ運Wト_餽u。梁+K劍援#Lメ呪閹0空蘭雎O|焼ワラ-QUW<[ネ's働0ル]WマモVイ1 c┻tOカナI'メQL Yロ(-=ル弄Afメ脇ラ-[g+e校A ケ1kュ'H 幵}吝ンG >bf0^H」E艤#bvワ遼ム"、o話qzマSjッムェ$xW魍eホ謡q y寂&>S ^.zス踊# セ5\マエC乳zЫ!晄マ87ル紡ュ筮3ァzノ儘JR@イロF2R揣<  唐ト趣@9ヌ 刀Tラミ握Jcエcホ┓;H$Vョ燧襪?}F夭kcウ@V◆i コ[ヌ"e隕y6、玄2veGIBGオシ}戛,b齢leV01Sノェ-M#%蔀f絃EsUシク饗n憶 Rヘ-_ィヌルュネtj」シ騏oーo・( < j~U臟hvsネcリ倅H偕f-サ[| E「T朷觜ー蜘b禁,+゚ーk9ヌEq!6:9zィFkP「ハ腿ヨf粭$ムU>ロNヲR}エ4Z;滅≫ノm莵]レノサウf勝,Hゥ3ラzノ,8'V奧1ヒハ##。ワ。タ渠 蘭ケm4p=A;ヌHaY3FァHP%f0セ濤c叟ソ>=Mヤqシ鏡fミケ楪.d6ーZYM・メ6セヲイy丞払6 貽6ヤRQヤヤロホa゙e8[ツ9a| _ム臾H・Ou=Uセ煩L桧M~セ 、ヌp@tbオスュxVA甓タBB< 7*ソヲヤ薑A;za}y9a?jGNPa8タ髏 !ツ0E_ゥUノu1ゥェミs|Q'8gdg'、」 Rヤ各ァq'; 駻Hy(*1xAE$ッr`テx( D13,I麩メ瑚-ヤツNm0祺ミ:223サ``Q ハフ「N !4RihクハT「マ.OチT^l6 邏CAムIネm琢ネk"Q2Qォvチp粭ヲ#コ`d蓑イ"・ Fムヒ5fE<ヒュコ41ヲFセォG`l。+dミ ヒエ,糠7'yRャH'゚ィk察?ハ,=K= セ'ヌuオ86n怨エイ,ュ 2胸ァSマZhdNセ4顧 %齲'。エワ。9-6Eqlニム臀2:6}袈Q末{ホ.O」.{ナ「0芝夐Jヒ廩_tヒb討VO、>ゥ堺ヤ.Gョ+ヘetAYM「ヲ蹤4=98rI3鮨シj冱タ,棺\{=セム韓T」ハモ韓P」ハ ハ4R{29廻聡エN腕冠lL藷=5jiQOク 2~孚ツク ヌ yqm炙蟀 ナ゙計ネe>ハvhsVキ 顯}f;カSoV慫チレ"h続 ォbンG<競ケfウ9勘Iア穩PH齒A・0,O独9メ$フ-カCヌ識;4jァXヘX、鰆/' ca 褓Tハユip\@#モ匆fAフ\療<.フロ。pツスP ユqX Oテ2xVツ寇&oテ*裏 Vcヤc4b1ャナ歩Eヨcx'yリ葦AP、@Hーネ[トhリ*J潦q\&ハ疵1ョ3甄1ョ吟:アv階a昇馨ーO<ナ8(梳トッ!&t?チ1ナ O(」爰ハ+e)シ、4タJe=ヤ*ヘ{eシ%)モニタmо)\K&ァ| D・jキs 」格$KmΧシ6ミヤP0rhヤFテ;P!лホ¶ノ@Mj3ロオケS娑I嶂H嶂I/H/I岷H岷泳fYレ(-=E+チqフ+W]*C2Gdカ遺\)9渣QハRウRウR飴EYゥ]P]ョケエH啓ス ム 鱇"ツクァ+e(ヒモヌ1抉C餾#A5!繊鎰┯レ %l:{B掾ミツ^ ̄gEA」#ソめ゚"ヤモ$ワqサ!サホb:iケK%dシJスeイセC巫Xz.oキヒA,KオルY+$センナエー"Fウ ホ。+-ヒヨリルロチヌNL}V-W2/ゥYョfョョY梺ケヲ&チォ俥碁Yゥ就戳 也ュ"キト/ル遷炒$[C*05ェゥt爼ゥ?:a:モ<棘TnャトABC`fリ菎攪8 ~テコ私qテムXcp 芝ユ瞬fb1カ`)nナqx蕊ォlワp梛q*>4Uハッ畚|釿gクH雕XLツ暁2\+L\'6b?ア7蹟オ ォケ所$'・モモTDユ麾j6ユTツ1h,ァ upソ\M向オ59>9|ネハ胄滲ュヒ叮・ヨシシクy゙ケEyJ僚ヤ」ー0ヘル34゙oヘゥ帙曲j缶ScヘHヨキN< ャ~4麾メテLリO@ー<ナ編。N立コR蟾$ウHァ8抻uレヤォN剱(ユイ24イ[Z/ル[ロ稾uチ扣@.フqャオ北6%0Q「Mキ;ヌロヤカtアZヒ; 蘭ヒ,b6メゥイNe・.;ソp・r syケniQァナQ畍J黻m]ー=タh !ヤ争|WE躾-Kケュ4 飽&レム翼蕭aメ-改セロ]濫x~\ウ|3^ョYゥu・tツ5]pmL.ケgッ$\Gケ#?メツwvタ0ktsサセ 錨エトOロ!2DRpウeoヤクRG馥魯ン。ム&B」ヘ帥[a^ q,ヌーッx ワラツmxシ跨テp7o「ヘt篏文レロq#i」゙xp覯シ睹~|ト?狹6?cQ1サD93qア伶Sb-Dテ /疂'|Vシマ瑪ハ|R/(・イRッ(疚ハ2|]瑜/Q 筺彪|G1セォ愼]Rf"ィ甞眈「\.ツSリヲハ\z湖」フ・トノ辮Qュ融c楡r・pク{ノ≧t'猗O。ゥタU6!オ%オ、琢ワトタルナン+!rモ タ1。N>D sr- アァC7.Kpcx゚ュ妹U$U・,%+・リ:ヲ,~cz+R倔M6テク3募ナu賠ヨ8兼oワワ|/鍬ハ{アヤdレ.ユEG・-;R(嗷ィ礎キエム赦ヌ嗾kャアカ>_ィ'`W'ワヨキ麾=膸Ufレ゙(_Dス9!H'-;rヤホ1+叱ムネ}mーアg8U[フヌ1ソ盜ムbeSIQx0択。桷%、ハテ]CカhノQムa~'S棉嘉]Hd jーrH'ノァクサ壺nLラタlq-フラチ*アカ雅ア7ツ#&x\ワァト^x柎ac キ蘇ソ}%ム30ヨ塾cQキ畊鏥ラ畠Y\u、/7ィ*袒ム蛄ソハwム*モ*ヲUЙq哩{h^tHmンカカソーオンGエ=fu翆ヌ!灼jHヒ'ヒFVDd%4,ヘ+yイM^EソL-aHP累ト・色ォy垂aツチ\(,?t堪]ヌ`ウx カ九ーK<)W6ツ^ル2;h南Oリ9ナ恃qJ、ニ"!到sトウRネネo@ネッHネK$臚)ト>タ>チBヨ$錺DネI 康eヤッ鮫ァレタF崇 ッ#tノaゥMkシ6cxォ&>"ロネ1T碓`セ毎マ(埓Kト?・魅,- アSミ晶ML゚$Giォ4ュ.9Mヨ マカK*?ウ釦レ喃箇マ鱇s]|pia!."ー +輯eTyィG熹ト[曲~ソニ檐ヌセ8%モ0%^クモ蓚d`諡ヨト_ヲラ/オチ&Kdq慂幼"3_アY>;耨シW。スエMkT?ュ9イ.6ル199tv649暇カ,ァシヌsqツラ]Z=UIR・?LUタJ畢クLW)YーS 7+C`鞘そ浜 マ+テナ号2)c0*ツYJV)羃F祥糞アA凰e"nU&疱zソV)1qケoレ雋/1&ォ D)a梢ヨ}Qネk_虹k莨憑リ浅ur"~ヌョ2,t悒Tタte,R*aオr4( Tテfe\。,みハbイTュ\鳧oッ?゚サm諦SYM堪ニ|カサ3リV~ロV゙イキ聞「ロハホl[QHォF乖ャi*峯ーLルォ鏈ッ4ImヌレレセakサラメvJッロJ'、EwルアケD」ュ?イ詈ム゚猛Hゥdロ駟rォ9sソoM 佃C0[ テBe,V6テナハXァ\ !2ク\ケョWョロ吻脾~`/ムDキ$汢n゚n・ム推圷j偲槫鷁xサコヌj]S-S洳麌PJァサs -モ5葆昜ワ]葆ノ7テr薹ィSnfeナ^ク_ケSョスタ^{タ^愼オロ7?Z;スヤョ勵rコ提^ク@ 督 嗾?、o8+Q_zリsサ餓5ムヘ秕ョレヌ[憔q弯ーマfキ アbセ緤ヘ冪&^゙シQHFrF/キIヨGニメ ,oヨI]セロ薪ム{Dム「.'襭&YYセ ハ淵消\蚋つ聰W棧08ロ蕪p搜 N+マタォハマ潼裼カシッセT^@Ey+迫チチ」oJオ]'控ァ覲ネツワ 拍ワ(R這v楾ユソ+ソ P゙┌ハo!巫ンハ[ /拯゙wo5b>[ワヨ/キユ%ユソトiコp>R\rィ(芸6'Zユケ7・* LyカサDトラbソ瞥探y2柄゜ホR>びハヌPィ|Eハgdオマi{鳳・/メt執5ンl_採QY|゙A2/゚D(イNカゥEY*ロ謝qIEゥヤ汚RヲDM CU ニィ:粘:攻ェi、トル8チVb*m0gYムソ (-F熨9ィu8ユ 隸fツ o嚠討屓r0砲4hGウ短h_ュ_Xー1謁、サkユ!ゥC ツウa90メ娑Q軈-濔bマョ=``歛|,堤`ヌv+モ"*。「<セ 損=藝+/xメノiヲ譫ガト汝i;悟軾0悄濤sN怱/レコ$砺箪9[_dリ*芒篦カ`A Y鷙「ツオFゥ總シ:ホU'タu",Rヒ;ケーOmソ:嫻+羮PKョU8R隋PKma5antlr/LLkGrammarAnalyzer.class部]Oロ0潤オャ1Xレエロ(サUA0!eハTHス3ノL[rンJァb?`?jツ康燈ヲサh5輌;ソ5リ)ツfカ寛ヘ`&D&JtャャV<ョャ4 鴃+nE・レj驚+ヘ7mェュ j-4qアp窖ノ瀋蜚エfW箜 %FィlスJキ=ュロyXg0%ンユdM#ーBォシ%欖wd陝=v息木Gキワxツ=ヒァ・/hruペ^「5<敢iXe喨O"C0ヤ゙l噎摎K"欹#ヤヘコQI涙ォテQ\キQG醫 證鯑/ nr'樛#サ:デu鶯R_2ナR-腰チ|ex頷ュNハク)qu逢ェニbネ`ml什餃wM'B"Υ邨HqyMュ沁チ%ゥ3婁WwX凝[綴3鰰))wX@フアs槁,チ 祠 q8G|M'~ q∈H|K,来ヒト葺ォーシwノャサl<PKP~QwマPKma5antlr/LLkParser.classU[WUNフ0ЪヒJK(ムカjirii」)`ォXォネ)a的・^゙ ]ヒ>`サhXヒョ螢數、ueeg淇}ル鈕ッ~タセラ0碁F愿菰ョkヤfフiィヌシ4/4nハ蠅キ、ク-」b鮭*kbZ;2tIナイケHテ]$T$emK=殄ォ*>Sq_|.オ)ツ嵬;ゥl喨ォ蹇ヤケVP`ヲF6:Xー悴FqオヲHFvヤ"wfメJ∪7ァ閇董テ苒 エヨィB Nハイモ\uJO 七テ&g%クス+テ|8C PjヤGヲ衞Z」%|マンh2:ハ6ム薩tクク17F簸験ヤ=5gZv1]キ =`筒P竚アGス媒フレOイ伶「G0Kク8S!xUVムーd\<ニヤチ!F#モ0厮」=^ウ ノ6ツ頻アej 宏壙垤%H$ィ "xaッ}コ.AオPpゥッテ!y=6ロ ユ*ャO^z7mイ ソ%/ヨ+WミトN6状[fl j(エP+m%エ片滄・モ3カzマ 4ヤAウム[B遏QホCルP$コ+<{ャロS 。NTモムwヤモ (ァX46ヌクホマ,naヒX;キi/[ンサォ做韻 {2VムFhC0xr 孟Bjヲk5錬C_&L択$モ](H)y)JンゥPKセ・\,UPKma5antlr/LexerGrammar.classVwユ晄5MRR[ 「iメヲ メB+TЙ、\あ潴鱈ツ濺&Lfーツ"ムk}]ア「Uムuヨコャ蚌yソン褪ν?ツヌソユヌ゙塘カ!ハハハ>g縢゙g=゚%> c5v6&ワノ.&サ們frwツスL 轜fォLメad仙&!'!/aO:2&F, E~゙穀GねXA&罧y@ツ IxXツ#e読$dル%マ艱^ %ヨフユ゙aeヤエkィ+頭vニ2 -肛忘ラ E」ス、4gP@vエ駐ヤヘV穀リ%ミ漸KZJ3Kコ」ラfd跼ツ9ン ルVv訛嫐ミC$ケWンッェ呻O9カn跌、ンヤ!ム暁U?キ[1xアエgト;g~R7オ-n!ュルロユ4&5q モ<ケノ。ァ2ェI!ミ2)リ釐jF 甌o疥0莉w'dシcdPQba\ニC8)稚ルiW7イRロョゥmヒK悗モrェ。g復e+#ノxIァ′I& U田サ蛤@.墮樂]Pテ+ク%GIk ォ*WユmムMヨヒpネx樋敘[k魍9掖壊7lヌK+(緜| ーェヤネヲキbムヒカj%c凱Mャ亘⊥ヘーフ<蘿ォU*饋サ ヤqj{ヤajヲ鍔憾味~フ*z゙エク~ンI$Tnチ--#コトホJ_誡|m」f"+褐)|"經ョH・zgW=オyy盪ンωュムア 】Zヘf鴾」」ワuY-ァコウチカ-{屠f >コケ:P,レwT順マニ寔E{6%・タ蠏溏湶褫ウオNs擇P偖モユi\otフc鮨ヤィ便レ'-)5{Gワ、Y>{樺゙フ5 啻c岔gヘ$7ニノレ<ゥワZルxキN 6メv@+|Do"Vリqoヌ蓑髓ェiL屹.=0ム1lュェセG~0ミ=セオン/Obgシ{ydA エsロ 箚2:&ア脱 E戯tケ'jcQkMtィ|f&蜚KP~$ コ「ッ&マ蕘 rk萎]」リFO) スサ ~欟ン]Jr竄ア4コモケ &昆樫モキト瀞Oスe,ワ,|\トEモ衡 ッ(絆T<「x5}足シM妓S楢驪,ュ2|」ヒCクコツ? ,ォGXVCクf.ツオm牘Gラzk爿Gユ z=スms档@)オu舉\{ ヨGVテM4タ泄ネッ2u9,゚髻ェl&-ル愽 "7湊饑ニェコfハ;j->ェゥ[Tェウ4ud頬Mテ*茯P鰆 qL膕、゚>醸'珥(\テ8}<3x_ミテy<痃|C゚~トa#マ7・O司|$ホaJ|OトラLウルSv PK ゙gQ* PKma5!antlr/LexerSharedInputState.classuSkOQ=ロK DQhキJ}A04QォK4m゜]lキfルO~ミトJ「?タe<ウ[i\ヨ49sフ劔3ウキソ~ ` dア愧0JWョ \ク!ー「罔×ワ2xケュ緕ャJ莪5ャクァcCC゚^ォムnコエ恰Tテq膚ャ゚zォワ嗤{~ケケェ'クレセ\ユvWェネuォ}pシ5 NCケv篤ム遙゙.5lキ^ェ槭ヨ%^oォ」#)`ンq櫟&1・ /ィェワレg。aiシンn*oヌ゙mミ3k;ュ0蝠ン rrスミsew+ ヤ潺ルXセッM頻クq2簇モI ラ昔クキ;ァv67臠顯O{鶇A j3jュカキァD梟ゥェシレロSンムl_-技モ(チィ奥タ仟クタ搬ホ ェキ>*-Yウ躁FV蹲L筮皿ヲCRャ冶ル。レ#ァyセカ,ャ飼韃)%;ヨオ緇Kル▼ZMf。5x「f o/隊Xー治YゥI#@レJvミg;ミソEホ/O椄umリ2ヌv,;ズニ轤&ウン&2・ノッ管ネテ阨3B0q1H寞I43&m永yz贓5L孱ユhモヨW ~>。ホ"q!・鷁j悦 1ヤ荅uクKMEゥォト|,5・n アヤD濃&ZチpB}ヘヘwTpュ「゚!メxトVヤ>碯ゥ2否リ&29 \PKD横 :PKma5antlr/Lookahead.class攻sUカMイiイ}JyY(J奐。 -RZ(M[ーオt嬪ロミd7nカ<fdt7uチu=e tDgタム_|月阪wwキI )ヲ=懶橸ワワ゚ヘ[レp.0*P§ハ敘@5(ネ8(ヒ2d P>磋ぺティXテaア<"8ヌ1yワ女dシタ*。楽フ昇~hツe\ニцッタ$但愨嚢ヲト^JFZ.チ3榕, kcェn・フク遷iq+i鞁d:屠゚擽4ォS7~&榑$b'ヤ屠{Jユ'レ,3ゥOpSム2ルdハミhkRB`Rヘv; メ セI=i=+。<ワ<ツCサ。ェcI]朗淑豌:&タテ省CD/ ?げ{< オ礦yツ'トヒ#=ニG"ナレケ澱1・Njjツニィコ・&Bフ筈nifヨIIツムXォ'Sハ繋q^o)ナヤXa ソe8)衢゙Y「!]頭ルc亙ユb゙%」/6レ55ユtZ]ャ{aーr+c愨1mニオ椄燭UセTmツUチ豚、OHィィNDUミ7マ`+オィ ロ5ハ{,dミ%ト3`XCモ剏aZZ「t徊カッ{yチュW錙WSンヲi :ア]=- 2xE萇ッ.台Nアa"+l{タ6 Dキサ敖^ィ+ヲソ{z|\3%Tt6 ニbΕaモ8ノf゚m$D菊wCサム螂旒L~d彝pgdシェ5檢$ヤキт・Eャ0フ^@J窄サ\ユv=イ+z鉢 yリlイヲR \J >5酉D-糾pヌクハヒ.ウ)MWuuBヤキヨMwx楫3_ヒ磊|+'4kト悪cモ)ユ<#a]xAイョャア誌l亶゙ (Dヤ「=キケkェ歪5(j寺ケレニ吏っイネuH沢k(粁「uxョチ{BJOq ルタ9粥ヤbネル篦lFシ-0犬 lァ]  Oトハリ*e1ニァ 貳]`蛮鍛倖゚FE,ケ o⊥`^ア ヌ)xKト28キシ ・弗[St親dエ顎p残エ&・aニム l莟ロ、9ナヲtFlC嶽.僂粮{F「Nムェ/ツ淫リャ移S店Eフj.マ優「sャ「6ウ咤iレ臧BF{u8ユlチF悋ホアPッ綺廨 oー鑰2観嫺スx&8{噂Mメg/pワチPヌオ招U゙8糾テD暴Gィ_D_DUヤ樔霧フ]ロX学(メwアqgg>v7衾旌H濔ゎケ OyqVwアワN&キフ!eYュハaYtヲH;伶ヌィチGエホkソ帯ユg カPヤ匙1シ粍湲sZjコ7xカ烈P],T7ェ圍9ャ闖リZVニЗb,V惰ミc9ヤ ァミ9ャpトメ0醴犧m$ヌ・テ縅ゥウVュ8寉r4アタLKWヤハラXバy/゚アャ3リ+ヤ6糧-メ9ソ蚪マgkGュ#・9"漕鴪.iマンエウキixナ+ヘvタ &<ヒ貸y緊゚ヲhーOカケuケ 。ネヨ;hシネラF靭|&mス忌7ーABMナケGソ7j )|惶秧E]詼イq1,「^w3夫!ォチf~ァシ`ッ鴛軌゚蕀錫_zrソ PK)#@T2 PKma5antlr/MakeGrammar.classオZxTU?L2ofx。愛ム& EC@(`$/ad2f& "各u]bAQ侶ャ鴨LミXタ]quスャeラZヨコォ[ホ{o゙ロテヌ暝n9ヤ゚9>檣ムラH淕WMオ易*ッ、「メト、卯モQォ蚰弘擡ヨハモi:ンGコZ蕾蔔 :S~ラヤYjσ;[ヲ昴・1j」溶Q鋺z桴*ユ2z※~皀j4zム}聡Wヒリ%メ\ェゥヒ<靡!wO]ゥョィォ}Kiョ服_ノエM夛Q匱N^ョラヤ イテェ~鵲オYS7 ノ寉Mン筌)鷏yケヘ匡灸歙kjォGF、コ]H゙!ヘ擺躱奧ュ嘗ォサ=埃ラG5j沸カh娘ON哢蓁禎 yヨヤv・n風スxツ6ィiヤヤC>Zb#オテ距>ィスGA^ィ]クL」靭ツ <ゥゥ?i)驂レG'ェg|tzヨ」椏゚=/~マ墓EQ覓イbイ<](ヘ+>ェzMリ|]密7Dヨ7eナ檸Mッx岬゙ィw<鷯矼/W循O゚5| ヒ>z 撫ナタ淙タァ「ヒソノモgメ|.ヘ・Lサ+代k嘱FvコC;eノル2[iセe~/Ob泅AS?j鸚イヌソ}?ソ棟癲fサ<懊畚フe7hウ・イヌGWイDリキ縷>コ<ワ゚ティ梏!x。鑠XJ 窩~.先S!0D(h<ヤGゥレ0i{クPGh<メGロy迫秉$O」}シ=<ホテc!8+班yx?冗b諒ツR才| ソク'xx「$O)> 罠Oモ@メ`スタUョ!B絮Oラx戎翕瓏\yxヲ%テヒ豕=ツテGzxョ薯。」。Mョロ<x>>F^zx聡朸マツ鷭/ヘb/ア"ヘム>ホテKe゙2=ヌヒ=|シシ悛>^「9Y噫・Aニ}シPヒ&i QツqウィクEメ・9E囎メ└iQェd2トテコ鷏G|t兵U6晃)r7"+c k痊(:ア1 朔`$嶌(h\Yュィ_(/l 糠EC况ヌ績j纔ヲEム3;ワ9ゥ)^2SカT4(m壷qwリ闇 PPs-ム@kk :;好ムオLム脆h、f湃hエ 搭レ熊E6ゥ系!wH0薫W4オト朮延ェ隣X Ъ c4nゥカkkWヨ。オァムメ%較fE Ejヂ」ョスオチ椅B靦 蘇帰Eョ=4ミ粕オ>2ヒ箒&WT妃oF4WI・ネBOτ 婀sJ蜂Oウgヨ柿ィゥ6C`ウテh、スMQ~fッ・香)aWasン惺ィスA発ht謖s{ッ9ソ>j`イマ!7ム#盧q*ZRjッ37ヒ鼇メ7F后ワィテ姜ムレrQrK[gユ}[ーオ-4q ゙"e「メ>・タレkセノ^ 蓼霖%サ5アュ彼Iメ゚d4テワBォ.ミjd8娼ュ^迫チ,碑6ツMヲMu<、决^ウ 90スマ2」]祉ixアYミhイ姚w ,モ-yVbv鬘@イNQル愆 Gjレ繿#券 隙笹ムy捉疸 畏pzシQ」ケニ4/芬ナ1#夭Oコ隈[担縉ヨ魁エ6!E=y鷭モWu{ヘB(;VaEニ況zV幢1ムコ-ソ薜i*レ-SEQ 畷遇ー^ %Im7 リ)スsーI・ タl;%鰄fトヌナnチ譬hョコ殺5「M{F9Sカワィ淀ミツ 7;瓣傷ヲヲ&Tョ゚dЩメAMメbkテ忙!ンDイBノキ(メm4ヘPXiリX0A「_WcヤXE」vΓオ42gショォ!コゥF鬪H斎ユ>ハ⊥テ+テ5aソ拌u屍L羣ホシZ5|*=efカ77拠ヘヤ惹jアホk4-7オxトo籠t>抔kシN3T紺v ユ斫蔟:氤FMQ霈A戊ェT柊スTェルェL輳x」ホ銓yコ**j<カヒハ揮#IミN?メ 、ル/V寉笑遶/ミシノラヒj|。ホナ:_"ヘ・ツz・噫h」ネ%OミユDオソ(ヲW:ヤ2/+、6勒Yー椡ヤ*5 ネ楔>U5ノ賢uセゥォルj爾aカテ ХQ'I・/謠Гk:イbテf@ネォオb゚_&;、9Ρs"ハltル $挿hjモt極@テラ|σソ7ヒM:゚フ[tセEl<、ッレP躰6ュVYセ^泅クhmkC$、B薇qマ~オニゥFヤ2・ホwzォン|ツ勾テ尊゚J膊:゚ヒ綜ロレBチF攅}:;u邃ホン2佝Tェヲu、im剛M菻y;T咒玲0_%q{ーGユ 驟瞳笑C:? 蹠xホ;ユxユ第1掫:臓mcヨ凪 カ'":ヨヘテkホlノ穴=)_コkd火o.ノXF-キカヌテ゚゙ヨ5bFソxWlL遘Dシザ偏O3"羌コj砠t5L ラUオD_アト!tィ4モ%gIキキE#孝,& 幟イチ_Y=:qjAL押t~狙メeチィノi。oFカkZG1┻姪ホ~ラ+窕3ハッェム薩Y震テpヒオmg弌ャoャH袮u罪ミM3*T?!fエ「ヤォソ・ロEヤッ*cヨCテaDh<^mDqカ吭マオgトトjp!s$レZスSR?i]舫S&L榔齬レm俛]ワ8ゥx]q綸iヲヒ j磐タレt暢腮)*z畿単P:A轂$ャ舳u 檍ッ@5K簸ソッh鵞?メj・Ql卍6KヂТム。濔q卩iメレレBA0ワP"|%蜃sゥ{>#5(-$6オリ*wN?勿夏~Rツル琲!ォ\アqu 4ム 5Oユ9イァ殃pゥホ}}ニ4\ツ コ7dt伽」\5+yZ 3(*=エFAZ脹モlェ屬fレッ/V5J遽ウ 嘉@E詬、jモN9&テキ:ヌ゚kノ?lカオ 」5IIM稠??gレ哘ィソモニチソLfメ' 鏡゚ェタ;`KッH-H+テメ。ヨO撲ルlテ`M<メェUス「存Ps&P*ネB0ョfFR菊}y EVX・騨ムlvオ.ィ]ッO.メ] ク綯揖WヌチG]92ュ、絨禮メム-Z恕eサ?セカヘミ]ケ篌コ扮撤?SP{瓰# xX('嚔/-ロ>セL-ラUェサワツ・&x瞑レ ,悛NDフ"ーZェD官\cVコヒ,QヌqM(漁F"哇償]領゙ルB ^ロエМK7G#ュ/ヲtモォアラYホ6/mi*スk%[]'ゥ殿YW榜TヨZ3&ヨm@"oXコ[g椨ソワ5P哂 根IW$ォf1Bセkー灑H3誄 吐クャ(肌YサEI%竰W眦F度ΤWP-Oがdiヌ '7aンオ0ケjIN訊ッィ%」C効ャP ウョS粕セチ@愁p榜モ/鷄エ(Cョ+コ靱e找ーホ醜=8I・篳gウ'鷭蔔ナヘ I("H声*45ホ菜@誌ヨ6hーj違{ク`ハ、#Wiス'岌ネワカ@8リ3ヘケD(7モ黍躯ナXニ+リ63 Lヤラ5リマPcオ%d襯#"。K8l7キトー_[{lEヨヤS0ヒナソ廐ソエKGw ッkoEイ躬w、ケL、_oMD]=7o恚!Hア テアン ヒ戮J8 $キヒコ=揮x kニぉノ撈2k$A領S*o0fa,5ノZ。ニAケ1ハ鳳エス!f?gムtシマh妊,wィォレ籔}エ疂qnj裂uE<メ 、ニ8vwlzシ}逡寓s蝗Fノワセヘオgマ6Hョ椴レ 孀4bwッ9M)+黜Y侘\YFM"ロq1襯「觚傾0覽ノ!e乂zフf 眸,政゙ヘ%4ャ盡諢モサノ「 9%ヲjr、齣St、VJШ§カXィヨ寇ュE「ー-ヘヤソス)ベツヘコYRメヒoフ ]1C譚萠フ/M襴icレ氓>>0Hj42鮖レ.\Hミp逐 ヨ鱚s買i拜ンN盖癨ニ\ィhヤn/-莱參」rm,nエZネケ メ緻モ]ヘ「苣レ゙ァgqLZ溌;、ハ若Q」5イレィGU位;uュ g`~0釣込}疱膾ユ&嶄}キ/セ"Yウ4 、セ*8諮~。筑ヌl゙レfヤ・vヲ5spJcNス+><ォtヲュ Dテ&槫 Xエ#QテFb[。ョセvaュU3儀。0ヘヨムgP!mM゙;ス!G7キ殍1$}ン[\欽gプb_g%ΠxA61蠅ォKyエィセpc魎#nメ献竃エ照 西4ht゙ネ^ムh9W髻Q5J 奠!%/CqV、澑叔~Oア>タi &4@5P4nヲB躰UHK遜q彈-ト%馮助進スハ=ホ駑m雄#逧ラ゚*檠Yトセ荘ヌ野%LD ヲi/7d濮cq巣PMkCPx虚Z X+ーONネトzw jZ+Pn>,24F RD、Q旨」タSPペajク[ィF仭#ユ(キH哉/ノG @J>Βンou買ソDcヒハ+*サI峯/&W蜚。ノ僻]ヲコHロ&$ヤ8エ#ネ貢$xHT@テh(困4ュ抵6r縢Zセァカャ鴇サィ滌$ネ'ッzHヌNy4「Iミ蝸uモ瑟6|4{璽IエM・整X娩ンdソ! ?、 コhhYfオCTJィS9zKU儁aヨヒ韋イ貸TリC#タ猜y#4JuRQEFァh ・\ッlh'Rホ&QMニhケ」iィd.レヌヲbSホe5ヨ萎 ニ %4Ff゚ヲア独t.肝屏}SMシ」=ム。P゚ 卦ェ 6ア5部「S(g\~ロiシ4ナ *)+ヲRクAルR<波゙pャ沚#GQ1采泳廈m0野゚ *ヲ 0侖Cwツ4選ク。頼'ネИ]T$}ウノeK;)7軼q;sヨsV7ミES-ゥ泙.啖f?ねS,'ッ鰈-Ю.}i -璽エ君ヲ「1{ツ&ヒW@,Qネ イロN:ィ3ユzー蟄UPQ5帳C{hメサiニ#「ヲテ,疔 ワフ4畴冤c4ルR,%レ内ハ'9@ヤキi「2タl3鰐Q%pa餽3E`カ=`゚iカ「ハUZosハ{ウメi%榲膓Sユ4巨コ驥_ク丼eンtD]%=2As{險・イ」ォr* sT[牢ルE\モr r。コ-VyAnよUBwn鬆Jpー0ホナNェヌナヨK慍゙J2rlq Zコ.アテJ:ヒェ<-ッ)J 。コ悍ネ_鑾叺昏:鵁]埜ィ儖]t|擴セz簫▼B网n:9ァ25ソ!A鮭i"」ミW侖Eヘイ.A-XUYィ%hE。[コsユヨ滌戊?・2A+モヤクMt:ャオ爽 LXm=片ホル渥s i搶 ミ.・ コnチモンt%瀉;`ォラ颶z6モGt}A7モwエEキ(Eキ*mP騅5偃Pテ.ク諱 ロ\燿>pアヘ ァョ|;ェr臂,ユレA蟷,-ツ 釜O<ケ*キ0リU擒6ホワf*OV逧:5ワ4メ贄モトZnZ%蠏T潅F'ュサルス0ンhィ1圜ニ:`-ム}ト.ゥx !ノテН、G皹肖レE?e> = U>ナ=hy宅、鈴zTVUVェk9)U^゙Iアテo「Z+舌tAz「iキヘ8qハユ亡酒rkニ 畄嗅レD3メ 撓Fh^拉BjE遭舩鬪ハMイ+フ 觧6Hミ:3ヲフタ\!エ ェンN蒐sWリZ〃玻bッ}_zKSフキゥ補キィQルs5sョヨkn e麿ヒ8陦ヌ綉qvUyハ、j=品ェZェZェR*タ罩PkPP婬。,宣m;偉ィAスOォユtゥ宛ィ城=ヲ>ァラユ囚>T_モ隷A}ォrヤ~兄h;6凋7ヌ暉クィ~# 聊9 キO、UトO&マ{d#ヘァカ笨otN薫ァーコovト行+ゥ( cヲ9fs.テ:空0k測キル?ヌョ房)匱ハ暉l ツマs菘マ`ォrス 齋: ]淋zyw%~0pp*蜻エ功チ鐫。6ョテ!コ髟RU)゙Cッ-蔽ラF6}ノヘ(譲」。\L綺nス8H聢}・」%タ婪wL\Nケ\Aケスヌ:ウ)xM溢「?gA&。8gノX{閏6}+{リf*z:ッエMr「sャ圄ニス砕ミ銜'・"ホj嬖|コs%ュクォsサアュ 7&゙ナ& シ_(フルメ)&ンヒ%n咀#)?ネョU 髢、会ミメ:*brミ\T冫=カo筌岨8タ<毀u4慌71Tマ矩$^BォXZヘヌムルシ湖肄t檗廁$レフ 7ミ=lミCワLサク槫|シc癜ーッヒフ常6A殕DZ/桝釈J#メモシ d鎔ヘr7+レ97 ##エ/Gi氏8Uア'ィmモ閉オトtマ[ゥーf_詢"祖妄コム{ヨ銕マォrヒ沐覘sク-[|怠エj荵*R柱 J 43lヲム訖ォ[ノサム濠ウサL収タワt.ネフw[假fヘマァ送Oァ眈ェ8吐ZツgS 櫃Q゙H\コ槃Οホヲロ"コ/ヲ2睛1セ槭ォ@$uイュカ。4ケ0}モ蛸楙ク w=ユn]j:)肌@枉tTXBm・ワ「Mo姨4刄゚レユメ、GR⊇@ヲz雹d0クサ鵆ャ爆R濂(葦o「⌒A%|=U ネ 嬖o。・| 5ュロゥ晢K.コKw=P狄4o」>z;A1成Q鑿 x*ナ%Zh樟+5サg`y杠 (o暉/iスヌ%Y各 g朶クツmァア|?ぇ&睡uS?B、朋(<褶Xユ潺m燕縻2Fサ?ルhセ釧肛iトゥメZUkラ罧ミN$T百&9|ZノIメOHdOム(~シ>ゲテ934ロ7Dハコ・腺y4・b}庭 +UヨG~〉Eタ/ム~ ヘッbッラ`ラa70サルムチ$[ム\J・テ鑪マ;苺m/ソツ^~LYyBqBケpfV9]*G~sサPソ }サMN#鑿w)キj]J点O劣ネッキKy袮ラ・|ル+イ{リ`タ蓍 ヘヌタ0 譱ム只ワsャ :オヌxー.ク萄Dス」eロ+猝ラ9ナYSjッノKVXヲg/師夢q・cシ掀冱nッDワyーハロロJェ彑P?メd襖ιケ.EK\Lヘ.]9エヨ必]nャ,qM忰lq医R寨G転 ホ@3粭C愧gト)8Cア「+XPKp{ {CPKma5#antlr/MismatchedCharException.classVmSW~僕%ョ喚ィ(ムィオ ルV-セ@i!「J(ョKXB0ル`リ(エカu゙鰺t・モ/ァ_亙ァ旒)0cg止lソ」94タ販桀マ9樒徭淇O=ミ弦5ーァC8ミgE 逸\-^タyD 芥zテ!ャヌ2^ ムxYテ W5\モp]テ 75XF5、eムX6ニ5d$фャI キ4萋ツ 「@M樓T)X,8モョ蟶テVョd亢「@87tCヨ^」タ?リ9pイG氓ZqV ェ_ェgHヲ~AUヤ=沱ホ[nzbhvハ賽BノKNXEホ);f テ率ヲlレ|モカォ`w?ウノ[メ\NV0ソ僕~*ラメ埠SカロF秘エ8イcseュトL附レウNヨ`タXモーT[#ニャc飛」vqネヘムイ#豊&/ッBv5トヨ%'ノオv2カ{ヨ棍カ2ト゚k堯エX-9ヒノエ、ワ"サチ7XlGケOV橄c+勃僻ヌイマ漁*矧iサ7+55椚エワフ、)IッY"ー KH嘸 筝"コuLテユQム;: ェ綟ュ"至86ヌp\チヨユ湯スッ0ォGtワナ月短沿ヌhわK+,gヨ攝ノhノ好ヤEロ"凾ノコ篌kヘメエエレ-ワイPノ6。ャ)2 伏詛VpHa<モ1駒 ゙ミ&層M%#穎)レ゙sョキΒjェS雁チ巷誂)s斎辿e.dネ<レ9W棍GM+「`!鉅x、&、偉wオ奔イ+オi物+-}AeB)Hャ面ZワbンBルRタtgェn。ウX敗RcMW俯ホXrヘネリテォl?ッ xQWL.4Q/モM皷eヘヒ鈴/Q?エL?フ9,履ミリ[+耜ハH*{#y-g、/eオ}窟6!7忍ヤ-|2D録鯔ラ*イモ辭|"TE糢7遐s<エx芹>クテヒロー涼」lCゃム+`]ーー#kJu「iUエト<ユム奪fy=,ッ怜拇息クPfM*Pア。?+めC盃^4x台タマア+マO佝ニヤ再Mnェ]ト&モ0ェコF}佝跚僕}M」゙/!モリ*ェil{nェZ"kVR酎puミ遣s 4;(ーウえ/u樵テヲク73フ>e鱇ヲ~.クnニアヒK~ョカツミp棡d;S穉テクN卉E8クロク\テ}Z?"樟瑰~ ゚bsHウ皇XdCa釣a杼RQ醍8ハ~Rlテm%ツ5rヒW允椙|L6燵WJ>ヤ鴣^"斥熈 $遘O蝠2在キ哥、wI銷「~@筑意 ;)ョ3V9ォ捗Oz7ヌゥPK&@イRツ PKma5$antlr/MismatchedTokenException.classュ練SSGヌソ y\ツU07茄j|エナ*・EAy(Dゥ「ナ.1聾テ }Y寓mォ>:モiエL統鐵ニ゚:ホホ鞜aン姙D;ツワス{vマ9ルsホ?滌@%(8、牴ム/(8囂wム#嫩rト/嫂I。_6Aル柚ネネニミ?!{a;"!ェ &゚qg゙S娠0(eMョ ゥ`HチIテ F彝セ澆)X2抃奄3|害gフ市=vB狂#レタメ請*ォ:D(ワ!`5Tオ」z8Q蜩"ヌ-ムX&ーワトツa-bムチェコN C6ャ 8ウzオmlベ@C攬テI-!ミヨ1比@-:レv7J!ラ廴Xr;}イkーヒ9Sエ ィ岱`トッ収F稘ホモ纈5、ヒd<ョ%h<ィ袖,ヒnB匝クェ>、wj:。m5。hHッ・⊇ 」ミ`D。%ユレ酎^-碯9イノ%奕トェケーU<&EsX,ー3テ゚カァK蝮ィs5、iツヲXXミr!~AMoユAヲ」ネ]椚メ/Pnホェ肛%mWHヲオエユャ$ュマ`khq欝)-ワモ.Caサ+]l.tトナTクR'ヌZテ~ュ廓弩*^(徇lヌqIナeルT「JナleAラh尊>簫啖kヌW甦6ェU4ハf様v」ノ煮Ulツf_タ'ーl齠ZBB甎ァQU\テuタ@諍]アdエマオ“変;メ祥NoRwpcコ+、ヒ遠&モコFィ]。ィ+チ蠏TマゥャX=[エw胞^PbQヘwケク。&n燥3儒4劾ケヘヨヤ4<エ;セTq[GP^*ゥイI韮y91YホRアsF圀eg=湿「X橙ヨ0n゙yb疆 ァ}オミ鶏ラc)キg#'uノレシハ「}゙,xsK;布x餞Wミcu延フヲナ]゙ン<o緩>$~ャ磔i#oンナニWアJ^茣7Kナホv^G5GキノサoP゙!\!ソIケ6C~喰ホ ケ屍}ワタ純アn」゙eセy.ホc銃マv/・Mーイz慊剴3噂虍テ「ナリ只噎瓠1 クGiンjZ゚ァオ搾^蒋フ>=g檎觚ノ川S~=絮<゙q?麭:86|舜rコリzv,カqN落c息リ2芝モ<癖gサネSホリV2コ[ロjFw'」ミトxdサG?'yロL゙;莊}菁Zネk%ッe学墻テJ替洞▽2;$m糀悌%i帰ルnR゙5)樟PJ・ミFLEbnGチlフ}&諧bvウ祷>bvロテ8BP音マト!、]n据+l\ェ積9HNIアルrサ榕 (ナ8蔑`zアfキ徐ユ慰o`71bf ヨgチxJハヨBa;` @D #@ワXシテ\pX前i臀6∝Z.堝K:WH!,イソ掴Jリv拷ヘi填ウLJ3ユiオユウヤハLy+痃レコ攣?ヲミ 逧1X挂・ヒモ龕.仟コヨワレR」XU痞EWxシ・鮒コFsナリソ、攬殪]オツ3 5ヌ、sロチZモフ^V」,豕.<飴袷リWト\Eク&v篌hツ ア7E?Cク-.燻ク鶏セ誦マ?TフJ。氾wuノ=リ懣%_A-!。鐚熱Q欄ォx$uE期ヤ)ル2 コフク{ウーセ胝槨?+#gサ/フ;PK):カV] PKma5antlr/NameSpace.classmSロRA= 6機辛oィr!゙/Aタ F凍QP技`タユd7オルX妄┗セェUJゥイ|ヘーアgと* ホ Mヘ=+8%ソj N盒B 2揃%7ラワbp%"ョ{ネ(ネbHA p_ツ「ぞp/CーYhェイオ%8かワRfqmJ扛釉rゥャ喟HMォaヒxネタF<ツ推ォ「ーヌxツミSティケ僂]ソfリ+ム溥贐欖゚ ークタルi;枴螫iqァイダeh7+ロqムk屁jケa7MHMモ。Z1&チケSユィyヘiz[ィオzjη/*hネ;]u]\ュT簇&Cャユ遯= 痞カYミUキ連梦8ィ2O僥チケ4MB?ンホ%ヲヒルム ムsE,旆P8ル駆チオイ 'xh]介「、Hエ ゚Jb8G6H@At「巡悦「1Tァ=A+b聡窃7m受4Aィtr ミセ ネサgcU(oムM`m;ェ8イウーA惟>Fб:1Rァ\#B1&ェ靂ニ>」吟-bエ馭o.コレ8ZEo慇@伸ヌェ8セウ」Y2コqス$ョ;bt「>@M/旃@スqア渭\肛 Tュ!LR$シNPK^O;4PKma5 antlr/NoViableAltException.classuT[SモP&Mテ-ミ"U4\゙lEナKゥ( Hg*:カ2緜Z拜H:i-」ホ閭?ハqOアR|韆憚oソv7鰕__ソネaGニシ桁(ョcQ%1$#魯JA仰,洩FBV≠2r笙騒箒チ-ワ牧Gニ鰍サX朴Oツ}!マyテm盍a{麿ッ危*テsゥタ]w,禽=モア[J發i巫C"Lj宿6}墫イiンQ砂U」f組.Oチ。ヨネ[ンネW<ラエ~カメ玻3゙j ハ廊ヲQ┼8mキホキMチ:ス劼タコ蚓ラySィノ,hO厚.墜5 Tーェ「(LU\ツエ5qL1トマWワhpW@1hm7ゥgセッs,*c]ナ雍 ]ツ[リ&i=Xnホ∇jラ<欄XO慧饕ハ}JモN鬚セホ/yンiミ2ネラ珪ク掩*ヨチJヲJ コo:VrRノ{J%ア徂マ|.糠答cネ^@ヤソセ0Wサ坎偬"{N7ニ0ス0ワw Gセ*&べ'pT゚59ュシvHmc>}E`b」tbbテ ム)⇒dッミュ!ツ俺ニエ⇔ネ2彙(暹交?w翔ャモマs5澪!O6モ.?9悸c$M kミ寒/リIBfセAy}勘ヲv0,\糊ゥτコ?b=ツレノ俟;`gU$$U「jウ賂 イHムYDsソPK@テ{BsPKma5'antlr/NoViableAltForCharException.class}T]OA=SJキ]Vh+雖V汲l?l7げ@lRx%qYコヲ拯カ[ュ/繝M-F?ハxg巾P濛マフワ9釵;ホ涸?(`#原格\用Cツ穴8f#違ヲJ0ョ 」"ぎ怱シЪaTQRqwワUp!r犇トセハt价Zイ-3誤Fナ^ヘ-ハヘヒそ漁]碆s相-vォセヌン鉞^購%ッヘ7fアf核qヌsmQ],K下ホ.キL擾3L廬ェワロ簣ヲY%w」zヲ檬ァ藥|モ!モロホョ-」ッヨシMヌ鮒nエ-゙lG、和ィ「狎Hixy $$ー2F,ノk ゙np騎M[舅!ヘ0v:嵯ヨチw・bQテKXヨーCヤVDテ*ヨヨ・qコ[ハ イe俾r柢ヒゥRネヨウ搆ェ%ヒ フ:稗。浜 テ)Sgim8オV旄スソ堤ヤ!ウム烽コ?Cレ゚溶Aィ;Iンク`79] 閼Tッ悸シ汨ミヒg^鑛/|Aコル蔬&b$コミイッ4 メ,1ツqZヘc4Nd,゚Aセgフ&ソ#ユラNシa%eフ2園e名i慧廿Tハ鵜% ヲ心oァサuセDツA」2|*qnリI3_wゥ#g-スソ窮+ルoEモー薜゙ト/D^Bヘu0晤@潦・ヌ&yヨc'フ倥テG J ノノ}9N*ツiJ6Cヌヒ跖莖K?匚渋ト%{PK4衂セPKma5antlr/OneOrMoreBlock.classuROモP=就サuン&cl Tコn21「dI鞍?アユuメtFT嚼-jCk; h乳;忤oホ} `;rX病-,ヨSク6ー)ウiワト-[0$ォ"33岡\+*j*n3(;カkマナンタ>ク゚ィシc俤Z!gルョx3 ;┏Oウ肬泰n(Mカ+|9aж9槞e(ヘJLカチ[ 「゙a珈n[2*ヨG荷カJ ミ'笊-/ク簑bラNコロ脹 詁(鐡O]ナwqOナ}Pヌ#8SP*メnB,Lワ駆t芸エ<LCM,LニノYSW" ルPハcャ]~E蒄周c鬢lfヌvフ1b耀4「de戰JR&犠ェヲ讓 S"゚Y3E<●PニX栃0ル児P縄z#廓"&ヒレB]м_2D4>ナ:諮ヌ]」」&_漾墜kァvquF2C涅達焚-ウミカカェvリカソカ;dウa1PK 2'ヌPKma5antlr/ParseTreeRule.classuT]OA=モ.ンメ.・禁カやG?(ア ELcェヤ」[puル夬ヨW|01セ窿&Z「&~シ熄マャ鉛}クsホ=gホスwvタョーツネモヒ)!RオQP゙:NаァT0ホ牀鯖舞Uヲト9オ竄)}eaiヲZ几0Sュ 澗@ケ4=モ豊サ%斈Oヒ 貅U嶂フ「m:kナEマオ愀ヲ-エヨメL[主笙擠ル%ュワX!ム`ユrィfヨmF{* ^奈ュョた冤/ q&U蟐ギhzsメオ棘榕pR;Yg[ォォメ%7 z痕@hアムr怜シ・トトn嗜Sヨ\)ユノ環@ 」ハ\4逝I 佝%GqXy%窯ル?ロ-v)j'メム1テェロ10#:f pクキT取樞ンgkcQユャゥtV貯ホFm.0゙k:サァーサ杢ハ,帑 aKトrテカ蟯黏ウ8ウXcnヒ匙cq8=s"、]積スEォnwf昔TvU,シ臑~~カ?斬纛。ゥミラヤLクq/4Aセ矼ェ銓セ 姻tFhC挾  u」ロモ口ケ6猴?ツ檪}紵懇SE゙&P' ・#w]XLニゥoァ~紵:ラ'R%糴鑄}キケ6m霍モoァ4ュ頒~禹:痲/ーonBk#、鶴漾*ス2ル厘セ#恟オaLメタC?a@`J{ウ9カ+hムN|-タ1Vt慝d).ヌ3Zタ).yカ:*w0拐8 Sd:レュロ&*アハコ5゚+芸A蒡」モュ^PeエDソ崎サ{ア=4 ロF,6D_蛄ユF:C)!ネッオ ェU?PK働不 +PKma5antlr/ParseTreeToken.class}Q]KQ=WュユuKォィエマu ^2A"H點ォォl魄ャW飄`P=リC?*嗅ゥ賽フ=g恙粧ラ7E*H猊イ$URメKォネ`ユ5 イ sツケ6テBヘエEヒ-ユeXf?ーlK2於Ρ籤pPヘイiッ}ナンコyユ「LシノE7Dロ驫cZ}SXマ襍[ウo忙ヲン, ライ姆スFサ號セハホw!ヲ軾ヤ鉱ワk~bノ_「gヲロ蛄烈QKE荷!ァAEPチMli *スm;v・レVキK%ウ#ユ、.ュmイ"ァメ ^]*O-θev:ワセaH漂cア {ウ(<ィモュ{m3@{ヲMHナ艙ノラー@v":jシgxGルEツJ」(0ンa,酵ト脹腺8;イ` b20ナ州A堅qハメ'x゙蕃ヨ2倪'押「8CルナトソPKサ;フ沺シPKma5antlr/Parser.class・Wy`e}ノ&;サ勁カi寔カKルl。アス ス 推BRjZQ&サ楠メス悃!ィ\^ jQ+"ZDTタvチナゥ3Uオ4Y3ァユ;キ&kコラdュュv\党a矍*カcウT杆ム~ツソU怕モU/「階qE寤ノシユク勉T\桑Tト.ノ嶺_?^VqZWアWア3゙xコ<サ枚Uム$啅睇.~,;R゙エマ$y゚イ<\ァ"怺#滑O衷クFー+マニハ Hョ F犯nトfォe+ツ}=粗]ォVE*T栄ナ&ニ゙(接D#@pテJェォ=淌Jy軾懸ヌS%3a"ャ(匯u週椥i]~瞿ァ#eエt艱シ&ェ&#ヌ'ソGワBf01啅ヘ[ス[シヌMEf`ィaキ"サノツ4'=2、=厠vo、イ骭ラワャ禄ヒノ)&ルァJ坩qネ~;フ「蚌コ弸授囓&JルTフャ-&ョル!g ヨ$* y*メツミ艚ャ「\ァ費Hlタ)ノYw>ルルュ+{スu袰`]9:レ ァ?^#'9cp}#Eミネ;@Cテo蓖0ラ ムn u%j泣\>/*I.ヘムニヒE.9@ルホ<ニ 「eャFラニネヲ3樂リ柑xオカZォエ=WッMムhセオd「ルニク档7;ェヌ2xF穐スh=謂pィe =eラmヤ `ヲ!I饉pn=|ォ>E|奢 ・F瞻シo%゙ >Oi z)^ト$~・:F皎z睹'籬NiyィΑ諷ソ煽w・I弩ツ_B・ト_ニカソン"_ハ「、`Vウ嗇iqケCャ9Q_モ}ラaコuモyび"f;否甄t*稜彡q聢q3郵wEエ7ロエ緞¥U%ホニ;奴「 エy ラ廃Lu翫ュck。cォナNVGsワF縻y堰スdェワヤ穢簣9夷aエムルHハフテシ{$uハ9 ム7p?サロ|Abイ"jヲアN+ヲヒK肥s\D*蝋e峨eセァ裸。Qヌ\^゙Sk#ラサ7筬キ舵ケ'ケ息ケ組ェ\^ヨッヒミz;ツ。#宜狃 ンオ<>KYu「瞎1シfBy⊇y %J?Pァ|ラモdテu8シ゚Dシ、フ埠8]^A禺(`h「鮭)トO・ェ |~:3oァCu8?レ+ルトw/kテダ舩XRト1ョ!x櫛b純リ諄\'ルw/xK9xt^[ト2YVトア、メ>,,?凸|:ョJナEZア窃a。8 kE歃Jシセ^__ラ1コ膿林ォ惆>ハSテ.(ッ畜ヨ)=コ7クウz<|ト蔚ン近/湮ェN,窗w燦「Eャ「d/>瞰ヌ巖ミ$ヨSrM);<{クムワ心。^JA虱ュツE'コ7メナ勍rソラユ」ロr/コ 1ゥ姨キウ)ズ3ミ6ヒx¬モS%怜@;9崋マaインOラpム瘋糊廝o,災ォHG飜N「0vャュ,bユナB;ャ1」Bシホイ?SJk煮・ゥセ0 ホ蕪tォin悵'9\ 變ナj胼ネ[ kョZサM。u5コ昃Shi.イUシャGユ゙ラOレサス87竡テ ニア」*枝神-7PKノf PKma5antlr/ParserGrammar.class}U]SU~イ├`h)XレF6!$(Tャ ナ-I  隔b ルM7サH捍シ朱侮3慶擠3晞G89セ2Mf゚}マyッx g~繝faヤqSbゥク・"ョbフ象陸%ク翩&ULゥクヌ=Lォ浪嬰%T|ェbヨO4)Y%尼)R竅斤ン抽>,q+*ノェk>ャXAモX゙ネロ FBノ-}G*錻nhムカF.榜 サ` -冉!~^V燵2ウBA[2o9ァヨ寵.世諢!,ン& IIスン(ルyモ(ー。5'Eァ$ャゥ^.+8 殷。スdQ.OX9ァ( 將擅チ4羅搜](閘S:ロツラモ「餽1摎誦ォ,ャK/u+&オ5\テサ>テ>ラ#ュ!ΞぐャH;ケX2ケ}W"9Zッj3条ヲィm痺ミー奥/跡隻ロ (*ミェ"'ョe &bャcレノイ規ャオm944#hワヤ e}4ントウァlモ"(eΘt4モ昧イ6-yテf2v~メl骰8&エ(,=+Q} 3護蝨^$ソウカ鳶讎ーdIリヨ┳」ルC'キv・]冱GオOッS・;\ys(:-p8w&%.6\ホ靂9~l52ヌ爆l(Yヌサ;^mル」^ァサ蒭 ホP{m柩「xーH 瑶ヲメ["cW醫4jvノcQOo薇NN7キ 5ヒI%d@-ァ觝¥ オ$vV 19モロI零.bツュ=e"ueノワテカ %痙mメK%apX(昊疔ロ剏j婉 ぞ」!鋼クbPp拗?シ鵝d0z笨IB?/ツ.Rツ瓊コォロタ3ミ<+*ヤェ@ナeDyI$侯&ョコ喰-票ゥンCSe鱧ア《メg膓ェ`ェ~G纈サ盪ンy=V"=テ_:シソケ{*hルヌィdiuY$ォユeオK薮員UΥ゚'ケ Cs絛{テ鐫 }、ニ0ツ 慶 ~R誄ヒ倍85ケ鍔p膝,エ,?悍レ\歡ム@サ橋H輟#  厘% tーー遨y^gス/"ョ:コk-tYxサ糸Kョ愴VgER.ラPェネ稼ハ涅イzォ〜DTミ[cヨ篋b幄&シテシ'リネI斜ョ;ツ4k 6ヒ偶>oス9ャ"ナォカー 5V ゥ゚b {Xヌsl~~ツLノ メY Ao#モPKR?鰭PKma5"antlr/ParserSharedInputState.classmPMOツ@悋-R ィ`p「 Q姻,B1KメD4G゚詠yo゚フシノPヌ。サqD糎・JムツM(1ョ アltヲツサ!Obテ@フfョ7d`MXx|"慓?ニ{テFロ優!zフ\廣ォTヒi洽ノ耶雅`メイテサcB )fv&ツソZZf+ユuヲ1"5!sトXモjOルハ苅pヒ薛ネ=里 ロ>E]YロリDメ 6b(ロp薪(.シラRォp7ン題Qソ4(モ窺:セ]ケモ叫}ヤuオ2Lナ圓zVャヲヘy 5iム茜#K圧ヤ戛j恊_cshオW鑰0VZT@ 膣$l淳2。尼P尼0Hゥ=/eム,Qヘak UK=「コM=PKィエオswPKma5 antlr/PreservingFileWriter.classuUMP[Un^x、'筈BCア&$!アZcJォE@0PZZhタノ }NxチE;:ヨ始\ケ)3詞 A73硯オ;ョ\鷸ラョ徠'檢^"?ナEホスサ釛ン悳?G^タi /p「g宙v$シ,B@メトq妄ホqミ翦y7zEHHr閧ンxナ晃ケ%ワヌラ ム晦ヌコ! sモ' iI+m1翰ワ…ョ0ヤイ>ァ3yオ0xモoヒKrB-&霆チmフ/Tラ\躑M5.0ツ妨Aヨ繙ョjs=] 疾b周痔UM]悄Uk,竜ハぃオ3[(エy8^\ヤウハ卉?ヲ+%E_「 90ゥォ「ヌy.麻)/-跿.!減┃ァス箚f8Q朴 ラZΧHN9 |u7嗚ォCcノネ)%Cユdフゥコ5期サチb>「せ「RメBFPYVK ゚ゥ!TM輓攵!+kZア "g! !N「3淞tpDツ:白サェネ9Eghワヒンゥ゙蓙ヲ[fヨ!釦-睡ハf%d蒼pSクトaSン蜈Eヒ1トォS職チl擇04滾#5゙豫+哂YL)Kf_LQ0 g浮「ヘキヘ病4シソGy5b蓬&ン讐4aツモ}CC]C筮モ将)狆c1ゥ昼+ァCA]ナvコ68xcムフチkk"弦uタI+タo-ーLカhF2ク2ム ワ?タル┤ q$カ)ヌ.」n拏2ユ)レワニ[ロ貶r;n サnV桓雜fck1M{b8佑ル鱶 :C罵d[%:晒zu「$PA愬I\$d$ヒ店Iィ定!y鑑吮ムu・ン K8[/ 逾ュGX嚶D*58ネ9jムn。。刎/Vーj)ラノヨ _ゥタ_A スXPハ飃z|竣ロ)ー後Iチサjn游s=餾Cワ'リV畛テ){R>硴晨n毋ニ"Suレルシ'イオ?H枠N2,途ヲy aイj#ィスハ鵐チ曽2淌J骰uロ}$tスSup椄、6$柞 ホT=5+网チモl#`ゥCU>jヘ:'#繰ュエav#&ノ3'剱ユ嫩 ー6Hャ套Xト,/j價*。 (イ_De鬟イR青0ュ、qニ モL」Wy退疇\ヌ]zl?ツ ワ」ァsLS痂祠ワツ*}Cムwネ痣j貴ワニ襲Z指'tチlXbN|JL雄 ゙cワe'>ト,Y ウ^|ツq衍ルセヲJ亶PK甌/ツPKma5antlr/PrintWriterWithSMAP.class抃[sSラ志9イ|タカ イ漾plノ`Cl 8j n!p ア|ーイ腴G陸!6、m.・mR 4スクMモ6iA射Lァo數L_;モ>エN鮃カ%[rK侶^g]ソオヨ゙銛カaチ><[季恊S:セ縒0L b改i!SB~:ホQ承uHaZGZGニ&戸yヲ|EHヨ懆リ:~エb\ネ9棊シウ>|ユ >シ狹兇51イ霑RKク,蓁「 ^瘧X|M「ヲセ衂キゴ絎シ%サケェ羹:セァ。1槃fュエ}(oマ蓖疆レメ 捧アL>キ棄レ:4g4ヤ1マ區y;呟%c@テ漠ウg妬;0ウfワカイ」f.zNhM3b%ァ。課X2=粉咎Y;Ah6iロVハユ Yシサ馴、スGCーォ8咎-ウウ9=ミ}\;囮エ$4ノOOXル」襯栢ヨ「tBエ毀ナオ俵カヨUq焚袖ヘャ] ヒン5$ワ6ナュフゥ$蟾メ踏ΝK4t1。 サ'モеwルュぴ]dヲフT/Cァ拔8bナシネモ|ラノ關 ュ6棆稟・"ャPWオVQJgゥ4愆悁ォ27vuWzE「ユ]テトリ7「、狎aY」I;;8xx尨30杏 <馨テ.);yメフ%鍠D cタタn|チタ!{2'rタタA,YQ ・mkハハハラ!゚76゙ムーカf8匍Y2c05gk3p xイ6キ…x_CK%o躇?:wイサイP5潦゙マ}、c{曼槍c?1Sフ~ョ#ソタヌ~雲嫁_モHソh0ユO <#:>5VLヲー(ネ皖9フ11Yホl*/蠑イテ4ャYQf俣UヨミケシeM愕鬻@w5銀 サV? 笙エm&モシ 啗sハ;トeNNV|]ヨ^NMCマjUャbントア)rd隲(=kヘニ黒モ@ヨ@BRャh&秒エノLZヤシレ&靜eイv)」RI=L;テ垓ャ7蓿4Pヘ'フワu゙V」HクワiuX儺 椽Tカ%O<碧YKヘ」$Jラオ6@峠4ハモ\# ミツ[ラ-クツキ[シrヨヤ J _$D-i<|`蟷ュヌ:セュ-リ員xミ レ辺'ァ :uqン雫-椰n与襍/CレAッ[5Lッス<荷ォ・jワウモMwIuウrz`ユTラワウラ-wIオ瀞ObネレヌD%ユu:合雜l7タソ40f-唖=・注ス詩b」j。 「Jコ擴ツs( ナF)M゙{ァk枅ye|レレヌワm}枅OウC4#ヘ椌lサム跣撞ルゼrHュJ4n慣MpメヲレNYJトJ3b;Wハ>ツ、マ。1ヤ狒.{|t」tj] ロッア9Ipィaヘ 韶yクeヒ7チG埣フ劑B死9溽8^釖Yウ*チ・"ホ磐CL'ZタCeckユユs(シタヲzQ)uc2ョDヲI)珮%lQトfG%6/ム錵b2o沂シ/ア/S禮i舳8#_ッ/`gxeェナ揉゙&」}祇宍メEf畳'%5ッb^%}ヘP-Zq鞆、'ヤx?PK鉈YカPKma5$antlr/PythonBlockFinishingInfo.classu節KAヌソ展ヲ拑圜ヲxォ (A*0zクキレレ砥-ウz8」「ルC活餅ホホ|f粧キwMヤウH」斑ハF6控卦モィ・アEネN「ゥ檄*鷲B。=zュミ謄ヨ@ォ@試草ByZワ  r゚゙カンヨ・0ラ3・"ユ スルT鮪Bj燠`u「踴邉。PW゙0dOeo]ラuMNvヘ/コ♂k\>q$Oテネソ2z:貶楙忽&ンニ*6桝ア打轄+mO鷄オK'ャヤゾ _c裸疲Z| ウ唸{ns云厂ャkjsアュ:ッ b,8Nハ国粹X1ャ舍ア務乖M.PAUヨZ+フq]ニ岑Z;4クコ!5b,:・゚シキ ヤ燮`゙6Sv仍ヒヤ=(&ュn|PKハLhPKma5antlr/PythonCharFormatter.class控KOQ.}フ0ネ0HE@ムR(焜 T迫&「8@M搨:ナクq袒トクs眥G篦MンT「+W WF<ァモヨ.lrセs゚ケ逵鮃゚淦霪uトU4」S「K「[邃トI唄ァ%z$zゥィF\「g1タ」A鋭Cテ*p^チ8テ(CF/H験WpQチрI≒`メIコCセhロu"}ラィ扣:L喋Yー乏エ「レ妙u;アfeツムノ嬶Sャ ォ+e9ォ]n&鬯T゚゙Mモシ温+p,駄*ゥ、kgャ肌メ擣ネェ)イJ黥:婪カヌn聢#w-ーツx:s゚r蝦ノ"竦VィZt\ テ"テqチヨツQン」ル廁ナlwwキKクャ`Jテ4ff)蕎C)%ヌ"Y sク"+iク楽 クヲG ヌ@暦キ7暮ェ袷Tf齢ルヒ~スG/#ュuロケ+ミ_悔昿億ロル冲\{雰、.蝎垣ケ {」藍 +雰gWク忠褐`ハvVンオBrK.モ;F鏈~ェ|ゥc#8@I<ユ条0ヘx瓶@#トエI$、ゥュチO>3并ィ嚔邯*ェタ姥ーVZKGトムホ/=wサ MdCsタY ルkヘ'サHカHvチ ホ芟`ョ、。シCタ窃8トc5g鼇^!d齒゚筰マ|Sッ}賺ィセ啣読Sラ7QK<悵驤Yュ]ャコrV捩GォンM3ハi・匙 &di:a=ニ0i゙1ェフユe*rX)JCパ3j笳這<レ1@?qtィケケE裄~F樢 Eレリュナ:(森{$洲_スィヲ滲トサュ;q 」rテタヌiェ機馬・嶌ニJ塾biモノf箚&稽ヌロCスタ葬Fィ^&K*K$ロノW。PK|* s=PKma5antlr/PythonCodeGenerator.classフスxE?Z=ンモモサノカdル牧Xg%ヒr[N2凋 ネkie KZ。]91&#ハ(r42嫣棟xd8G<ツォ齷 ^匳{{埴<ウモアココツッォサuマタ#Pbユヌァ鴦娯マsy?チ_暝"=^イヒ~9ノッミ絛zシfソ!ヒI~oミ絎?ャ輟ム緡|ヌマ゚襃)>スn隻_ー蓍アミ'~クェgOz|ョゥWヤネラ> ムmO, (?R溯3=~。「ソZ?T7?吾ソモ繽 セ[ ?a  ヲ洗ー陦ィk ソKe(a["モS ~フホ「從Tヲサヲ綿テ,フズ"G応ホ・^庇[>J)ムW厭碑ァD%(1ミ'bΛ%ナ09UNヤIセ沱* B"シ2)c$*Qb」%ニPXG吶娩W ~ア(ートァトJ琵蹈%&ム{イS|ミ,エトA4W/PUPb*qeアィ軍ヲモc3葺・トl%ハ葺」トチJT(QゥD福#覲。yJフWb他ミ核%ムヌb%鳳sK馗ヒqナ K「ト。~q乖Qb・Aハ]モ)j-Qメ BQo苛Jャテ覆A嘉鮨椋ヨH=6ム」Y0坐E#派・、Q%レ嚢G\OI蛮HG*qtGモキ8ヨヌ痿艀%N*幄q「實楢綸z廝撈Jm晄gkロ牌:=ホ@g*qgモ9J廳枇 スマW%.T"%.規h賍娩篥泓*q燐L_Ad]I姥ム緞z\ラ危qサwV窘j%:蛤Q央蛤Y閏蛤U課|ヘ鈿JワAュワゥト]JワM゚C夾シ[丙W丑%カ)q+*SケG肺ヤ|リN晧ヌc4 +э"椄嘩I顧・トSJカRシヲト゚ィレJシAルo*ス゚V%゙U=イ+>・]wQ@メ/飾ィ+路罵,押スD3N伍ウs)szcヘ/櫓夙)薔%セR稾%セQ秬J\|ァトJ@%M}'ウ沱, >~Q禪%」トoJョトトツンハ?宸ン廊ァLョL4屹゚箸'レエ|*z\M縮畠秘供L?vbf(モVfヲefYf7?[ivWfef転ネ「ヌウヤ\ハッルモ2sQヘ^挽[凩漠G?*sJァフハ@?*3フAsー2(s(j9L凖9B咬ハDイ 漂、フbサヤIz犬ヌh;ユφf七フq9゙2オフ 伴絞祗rチ<(U襯ヒ怱ャ5'モeH]、フゥハ惡フ2eNW eホエフY~v)Z!lハッフルヤZケ2(`涅ヨャ齦4モΘチャR觸?;査q噎乏!!レ并魍YHソェ併H刹併З]Je!ヒ併Hホ「f!ハヨggo'リサ&4>謇~sウy:ァ(Tハ;混l2OWhSヘ3冨スマV9>ョyョ2マ」7Z毆ヘ冨2/T襴ハシX剽(ウ]兌輩ゥ2/S趺ハシB儻**ヒシ嚠サラZ誦ハ${c゙フN縷クQ7)fJソ踐VTf6e゙ョフ;輩ァ2R赧ハシG兌冨ッ2;6>エx*!4貪hIヘG頌ィ2キ+s2S跂ハ|S櫂3ニ゚Q覩eR覺ハ|Z厮(Ye>ャ"YX)~$=ッフ毘W驪ハ|ノ/賤ハ|E勹*5e紘}]冩(Me"6i6゚V&6ョ2゚S踴ハサ2?P諛ハHP貮ハD辧*3eS辧モト|。フ/捧2ソV7ハ2ソU誣ハ榿q櫁1ヤラ礁王gメ廟hヨ~%Zウ陦蛹ミ懐マ](sキ帳$SメP+)4部JZJ*%}J蔑PメV2Sノ,ju)YOフ-ノJP2電ェ珠ゥdョ朕肥ュd%芦キ菩犠ヒ}靦OノJPr%)9Xノ!ヤヤPエマr津。dセJ*Y、dア#,Qr白h NCウ*ヌ(9VノqJ燦r_%'Pロ)ケソ(Yェ腑%')9Yノ)鍜艨J"リクLNUrスヒrコ怐$g*9 ?L,9[ノr)ヨ*9GノヵャPイRノ*%*9OノJ"ハクR断ヒj%)ケXノ%ヤ゙R%)c暴。!T澑zFンJ%カョRイVノ:%CJヨ+ケ(XC6Pヌ 3pヤ+ケH6「ノ&jエ>テヤS拠Gミサ(UイMノu暴c/7「4ネ 勿7>竪メ(zM縦鑷,=宍蓴>チ兆<ム兆-y鶴F朖縦駲=縞S<UB曦蒋ゥラ3ッ莚J゙F・oW叩シSノサ半[ノ{鑢+eン遇!「チ喋勿ニ ;イア9ャ6ヤホk ユUヨーrruィyツjXホ@EろBユ。 Qャミ\j ユヘh 5團ユ蝋qxp]pTcーyィムヨ贏&瞭ニワツノ鱧ヘ リlキレpSSクyニhkpj )くチヲWェ"\l\lナユウ8リリv mィ オDツヘユkZテdF飄UxqCpU#冶゙}ヘBュSkゥ ) 7G「チ讓ロHヲ`Csr速ニミPkYc0ゥ 6QG-リuRJNエ5Zl\對啣ロヨレ#\ミF暝ィタ^[GムラエニpZdQwキfA(メヨH権ヲpl壻ハ` ムan[エ。qヤ`dM排ーrマコPmcg kヌ* G`モZユヨエ*ヤハタW5キjQUE3挨チHhaテ!dP(イ&ワXヌqアyヘoィ画ォtノャ 764"」啝G-ニッp+kチ 桟Wai枩ー( ラ_*咾Nソユテ$糲`]ンBァP樞R"レミ-ッ湲レFツヤョ ユョ-s4キ-レメナイ。`#DBQ-ッ媼屶4獄、フキ:営シ′ニカネ夛レP3 ナiネwケr%v"酵/ォ1jmFヨケモJケ}cケ:iFs]bユアフイ5チヨャlLフWムーC+アjyレ!豚トフ %6ミ;房A; uァ)リェs3b:Dフト1:「^隠所ォ帽#^ネQヤL=Taf[sm悼b栽mヘmュ キ&pヌc<ュH貨yユHM50也t宕sb%4チ寮ノルノュ2Qモ7)y"ナ^」KjΟui{\j ァイA婢メ4イ4qo粢ヌ壞「善ァqH洽l9U9U「{ォ癆pjdQ 靡&*qd マスiロ]Nフテル(oョOL/ ンヲK!刳)`X<メrm?&複ヘツQ3喞$擁。b0アK-Eス特0Fレエオn%2flhAサムヤJ7gヒ化シmO(丶ャT朖^Y0Zサ{r3セ髴ルチ貅F2譫窗jシー%Tォ嵋ョnン俘Hヌ衆ラセ"鑵zヤ^HJ?Sァ鰥yヨニゥ:qGェsトI崇CBcm6jンuD」80ョZN号<櫁2"H%;ヲ樣ヤィO斡*,踐c皎翅{'圜娵'.ラ墺c亡ヲシy"ヌ、「槁偏9ナャォ[Jz]kcウSォタウwN#ク]丿リI8:ヲSpォミ?3c4ヨHツtbeー9ク:苻5ャN[コ ナワムク?35i旌鱶'ヘ0メユ榻ス、+sュララ惰$*蔭1+21=6ネOァヨ驪Lア:ヤ癌d|~欹ェtュソ鼻w;GK蹇ヨリヤハ/Oコ7ニh,称hj穎L偖腆「#洗d6疊cネユQヤヲ` 9ネrワノi&- HUキ#レネヲmサ巒V ユ「9hi ラ"W懋h ネ。1pヤyn: ロq/DX 1v・8ネャID9xDZMO;H 、B゙ヨ*運юケ8ク 昵``MX、%XD~ムaラ$4ナタ;カ# No;2潮gッ亂ニ蜂Nsーz]クヨ*[irモリG g'ユ6ニ テmュオ。 4ァ}マKcG嘘、エ蛩シタ薮ヒpマレ!^gJ>ヌ踵侏 y堆及ツ4ヒ#トァレ|=ハxコDgOeィ)Eレa-・fルカワ.wリ1ク-汾OZrァ-wノァpvf瑕Yィk 「瓠徇、ト飽s蔔8gォレ辣措。VマMヌィヤRワ` ャ'7ミ 4」徙K>cヒgqフ9~オ-泓/0嶂GネソR梢 @ウn濳エ `N}kクノIp拑鶚2・zM(pp3ョ増劇!&ルE%_カ+ネ:ェ|ヘウ褄)[セ!゚D鰄モ]リ-フ譌ヒm6><「>ナ&ノロ]-゚キCK~dヒヘタ佰衛ゥ!<ェ|ョヌ%[~"?オ臠カ'ホ只#マソ ク神dヒ/藐カ壻 ミP2ッアセ、。Yッムw噴n#sレシコE4ュロ徇>儖ア=|ォ-ナKpセSueZ[}=錯[!09_シ龍テQ岷/リ|ヘルDPvJャ控譽MT佞8Qu挽キMフヨHエl ョ$ シHxNBSオ発ロ龠Zj隙ーツ チコuチ賍.Sニナh~%ー蠢蜿Hjr> ' モカFセヤ鑷 朋蒜?7ヘ廠フ瀘|Qウ.`GツIエxj痍bYlュコ覆,覘е!K T祀Wz5p*oョ mタVЁ瀦4啗Gラミ8{霪&qレk豺ルr7M/エルミマ‰勘PNンxヒカナh奧メ`3袁レK6:*X+2:2d[Nケナi$#uIーTヤ式 オ堋uネアH廻サe")v店%z$xー1R窰疋Z「+F坂+5)0blノリ1#p}堙Xq4 Oj冫T「;M 篇Q芍5「zd,カ%ミEi侠ル@YC-ハ-ケカiIロイネ・-DmKム栗ヌ拙冓ノィ・垉n@\゚ーチカ2,/チ。カャLロハイコケ4コV.カタNミ5コ_8」竺ュVロハ民!3R鋪カrャ楔斌|GRヤチo8}ア&Wヤヲ1海Iュ>隋セオlセ|ヨ<暗モcAュ(・チ2R窒@ゥヘ」フau。@M yハ囹|ン磧aーu:|ョ]樵"X「膂 。DkチラチセPs、≒]E|荼ユ慾ン ~Xオ」,アpセmC墫G&0-W+J09#+hシ+lォ沚「謀cL龠 、 .FjUホ#朔75ヌ望カユ゚`ソm~1イマ勺 Xネ~oオJャd ヲJ ゥjj(j[Sc{y'*,ヘ,-<モゥ゙格吩オ臠U互^ 3Mテ{/礪I゚壼f<90"FチRヒえアヲカ9笠 zロヨP/\ォqニz4爍ゥm)レbk藁 G7h座ルエYV。e瓱1癜"・9ワシ》ンニ@~IIIAQ}ヒトZu鐃ラ箍5ZPト+2Qス踪3ホO`ハ菎mニ撰(画*ヤ莊aス'ニ配BワCケレ:tロIBラ+麹@貅・16櫞鹽澄フェ苒、鎮ミ.エ*帳0])U$抗EI]V Gメ/籌、゚'ィ1xロ:厚ャアVレ戛?呷ィ虔y=ツンカ、裔ノgDoj。w\eユ"E。Fヤ竿Ο鞏@a」  Q鶉!7Y1q叛}繩pサ ミ4plセ+Uネカ:銀5Qb|F\k数zDラヨjk肯5X3于0v00W]C-ノ cィオセア-ワ ミlRXーX彿カオ-6?g[ゥケ墾,[ロ$ウ盾Hテ・悔! 2エヤカ7[カ&對P ワヤE]bpu鰌pロ襾」廩ヨ+ナヲ V7[オWhVjド、(ウムア#ォqM唖鐙ナアギ匕Bィセ梍m]h:剏A1Oツ4ル豺[ H_Gマヲ1Sf榜4vJ@マ/!C&5惚狼ロ手炳ヲ蛇&5宏 コPK@}C+ョナ嚼Sホゥヘ+]躬ソ:%i苅4領[0czyYuyユャ窶ea[ュエッ龝 &・tイ%シ]ヌ4}<レヤBツ%cサIT%C=;ケiヘIOナA:・Hm~ソWヌ計W'nヤラ|]f0)゚カ"、ヘQj)oFウヒ2齔mヤク%カヘZgYmkゥ}ッ庇Qw耿カ6R礬クee[GmX|申Zホケ!8K笘ァ┸徴 ムゐツ5ヤo タ濆s=pk&=:オェコbカw「^乱iンLc9家"#i。h牟l<斑>].iュoケ ヤpュ゙茎新`d イ&リリ ミ゚Hエ試qdチzチ獵_ョ衲:ヌ ;ケン'э#6L耿ャケ4幕・mヤ#ス0Eイ(ハ剣pスC拊Gi/ AL山琺、lDM頒t<ト/、;(ーチ?+!rm篷U!\Dァ !詞ey}p河マ疊ロ\ a鳰δレ`3ノ&「紂-譽ト塊'8E/覡ィrh盈dクD<ΩケMイyワN。ィ筰クイNモアア6ト蚯竊|キ*ム#/ 3轢カ朴3カAィゥ0iz荏gvbナ∠HC俶ク8%BB痞]カ、!稽「、湊ンツ.レ差恩bヒ!イK」]カェOr8」、9絎qヲr狠Xナ輌ツイNキュ3ャ3i9w-賽カホA-B]劃[銓'm<ンムkサt廛;鰹?チユィサf:8偸┫シjヤ岐髫8ヤi/ィhタニQ箒sC<撮TフFo蜀ヲ0セハE%灰ッ│ヤ]Te壗"芍ミラ」閻閂@圜>Hュ・ョアo:顱Iテンチh3FB映歟メPaン5撒ケヨ[トベdチz:りrヌ0蓮エ」宵ミミ"0Eリd0セ之ヒYリXモヤ{~カu5;@モ^ッlk:Z]15 ヌースpuuJ;@「 タLモ塹ラモ襴ヲ(ユモDフハKワBJ\<」T$ rニゥZ.hホメx眞褫テj[Rィcクgェfcヲ"叙ノP#ア閧06ネY酳鋤アGE苻芝k qリォS膩ユ\>5Gレ咤トィ^髦A゙。 WFラ74ラ袮4河恤嶌 袍li 7ヤiTlhlkEL蛻.ホP}\モPvンホノォモR!メノ吃セrフCJJコー_馼メ%フハヲg&ubメYOヌx薩n6 。ハオ!イ椎{ウ{アキノvワオ篆;抄94ツ\l]b[ホ^ニ嶋cネvレ|,-クヤコフ B寶」マヒ)耶ネ カ 7Fリ3クXx 2ン「Iサシ裁wエ鷽ナt7Gヌ&咳 涯ミJラ弔80奩AGマ y],廱ヘ!j#Vス Eiキ5imRJ+シ「ノM射y K喚x採 6エj片T、C ニヲ[k5ソ丘゚<泳ー。4ュ\モヨ文u埠5暎Oミ潸クdhD蜂=rャョ。 mmャ&2V剏X]n?ヲ-#鹸揺 bU繙`0i_ハ榮 'x?yJ>魃ィts((Xタン,6j(狽@(粮サ[4ヤ嵒姥P于シ &/ユb゙千;ヨノHンI腑ュ6憫P]H衍イ翌3ムマ8・コユ$ ューュkュi リ$ロコ事様確]マ;-ロ槇 閘ゥョ7qbP4゚ルc洲obヌ旆YロSイヌ9%蠢)」<非D憲Z$裁tュナm@叭gb1」ネラT/1」ヲbワΙホ1uコW"Mざハェ哘 g,H?V#)'ホY鯒ヨg既}[z:%ウn「椅zNカ(v@.PIzァ9カ.゚カnカ.モ~i0ラQ弼レヲー b癡9 ナ-ヨュ67レヨm|R%M!コBC.轟jレ菲ネ=マ:アホル゚h7蕉aUェ6ャカ駭ユ澗$レソ旻n[wXw端モォd`テナカuルネサテオスヌカカメJ饋棄r8>ラ贊ノ叭エェaオ&Y9!g9L筵k,ラ陽ムムkカユI':彿t#レ-:セヨメ! 籬6ロコマ:ネカァ5゙ヨΩ染ーm=b=ュ獗A3ロレnJ、[「ンラ朶TtOw\龜HモシXl ヘtヲム」1孃胱!ロ:フ_ュw逹カ、ヘGG」奄&メ゚剴j\ォトN汲派當Wンヲ6Fヨラ^Sナ,ァtハラム@gァ遶6~yヘヤリ qネョラE恕猊LPレhL%ヨネT盻8ヒ蜿/フbJェ&a8ノKAウツYヒI罘クzオモカvYOケメ\ャハミ?er化・ユsQZ}(2カナユxIヒFロzレzニQ蓿Y:67r}ーオYサミHト=レメロ。Nt゙"r懣カ|ネz(メミ襷ュィk「iAd-不シ-オ1ロz樞倍d#NSアe殞ス@Vqj$k穢[ャFJェ一ロ}匏温シ寤 甓( ク、,酘Т ノ8I:jHュ$キ・V糶%モy3ヲ-啅S6w建Y3ェf,六=w!mi %lュ%敞椽麪│亞4メサC(ノコ苦05イス+眸液ムツiY減 ア穩フュFロn畜トl「ノ旄ュ。ヲコミTエチォ實ヲ勵皋、5 s檠北セ4単ャ ュ ョkFケDNラォョ、S痩槹J揖ラ\N耡拗ラG:]AツムヘX"驃nレ 曲コィ6テk%ッd エ、aケ3@;サh967nD沿ミサノ(レ1)WアF、カq繿$}cW3*ヨニvフワシ蝦DSRョT・ゥB「ミ]ヌ゙$レサ9部ワFェ-m劼゙YロF・モoホJ+n ・メK箚NN1d庄セwaネ雍s]IPシ系"ナ(髢rO輙O征o/E+t モzマS7KGゥWk悒マ蘆諦ーモL箜ノ国h"レ4鳴qSコ)oラン螟iEODT;R\Dラа粐3忘Gヒッ&レ抗ー|/ラt;キhヲカEテウb料zナMS粹Qタ戍ー尽ホッn゙コ4f"ヘ冒3kS+睫8~ヲワ*傷dョX$゙ト夊L旁祇oサヨ ョ霙o+サホヒWモホ?皺ァッ=キ;\チッエgヘムiAレ椏&マs゚/昵ウレ奏Wマ恚P>モ喧腓ヲ.范9猶(ス宸トワ )8CX.)ル援゚HPメ\モ17aZm蛾リク>シ♪刺モメz+半袈ホンER,゙穡mpUz総{ケ^!#|ヨヲeク{!5ョハァr セ7ャ・・セ>Mァモ.゙u6wcロG影A\7c=ッミy軆シ$'ミ 54GfエD5ムヒ\}フネ-隹;R0~\モGヌi}$褓ヤヲサハT人スノEネ:}3チテケN菊眸カヲ踝-ヲt{ゥ。ホt゙ユ皴Fヌ\ナオ\(Hロ.蓚 xノvD8轗rメワ慰瘢dsDメ\&リ/:M喟ミ符xァ。{r-メ杰$艝^窟B"vLh゚=+トnb冉m。ワ)涙v}ケ>-揆Hシム暇撈I]燼ェ%t/ZGクィ鞁ケフユュ磆ユ碪賍ニ6コeGヤケ;エムテY7W娶/ キメチ sントXミ"ヲカカ7#S+謳ユ亠ォム」ukェ{ササ87p5澗侯z鈔ョスPAAセエ#ムH-bイ 7莉%イヘ$M=ン=eチ詈t 9ヤェ7褪{フ9'yiウ「痲ム@ス&ァ]゙・McDス>bヨ截」褄I;リゥ貨9愛マT 池(羃ヘ&Р探ナャu流 ュヨムヨャニ+w ヒ;ゥ カxk{bgP粐佑qン敷<オレ渕ノ箴7*9i禎JE歹禍y1ンPリーケホ餮:徼ユー,カ0E疂Thhs[ァ7ャヲ?裔Cア夏ッァソヲ*スlフー」ヤiィウラ_bヒス札X@蹕hf=セ*壙"エ_Hォホセェ邦シメトム\マ~U「ヌ〔ワメdクヘ猪96♀ Bウb+m3\$欒カ湟ァュタ?ォR_タッ軌~F4シ(蜂釐tっぐo胥q3>yuワユuナ舮^Xホュwャ=]Q純-休・,ン`ツスn5M-ォ.歇EBオワ」-゚@u著6Q/^ス斛ソ."ェ゚シTメ|ク萱. /ニ~ウナナD=ユ]ル滴シ。チ]Oモ授ヒィ゙ヘホzメア企扁Pオカ:`qd・゙Ntコヨケッp>@ミ鉱%鏗ス~7PdZソGケム{口セヌケ{_=Aソ~f|mタ串}@ツ3Xョヤ-?ム}Or゚廿>ミ>OユiサLモン蒸$t?3ロm倫トウlスGTホ軈ホチ54ン涌レァp+ーk _F6ア匈爵_゙ケ係3>Aセ` 覽zmセ,ワf!ソ$=,zィツチキl効OサPワ 咥揣゚ンン#゙a!vーサ>. aヌaP%8ト。8ィ*>ラ%、ト %サィr鞏 =Eンュ^ロ~]" 1e7鰯朗濶ur;。W;R価イ{wB8eY@+ple-Rヤ┸マZィt{マ#Fl!ラムUdサ゚・;閹ワヌmョ/H|カL゚9mリzア|。ロHl俶 ウG5+基L。j36v dタiXオレォ馨鬪<;ッ 5%bチE`&セ7I0ィ0;ミ ウ韈PFO\%>o攝_5~(`ミG!PT*トPフ` ワg ホ{櫁яbRaス`闖軾'語リXィ ヤ甦耨*タ・H砒H描テUネォォ1`ワ+キテ4ク *戝6X チRxナ膕ィEレヨタ聒SリN蓼.蔬モ(スマタ ,エリ翡オネPb顳g」qE@ヌケSY雇c;a\1メ>桎゚PXシ &wツ~ホ溥フ 徴"ヨ 8ウawp 浣猯ス =5ミ0゙"xキ。゙AI|し褞1,O5チK*ヘ2++レ ・ィ梭&uツ6 {浚lG妻>&qェシXy/ーa* 魘JァB'蚤ム飃謙箝0s'G-揣%fモg'波マチG'L轟 Lョlb・イォ0;V4/ヒ椦=/磔xQ4ッメd S ヨQチ?ュクg戲0;|ノ檎ト鋪村ッP;ソFC炮恫`4s+フ∂0 Xヒエ0?ャg6\トイ`マイ9?ーl募0チzイ,怜ウ^ャ螻Y_6索ラ萄ヤ兵Q貶エ!Y=レaミンー86%8悃・ルヒbcYNヲ`=チ竍V竢テv「イム^8ヤY ハ鵯ヲ水 輝ぇfa梔 ォカAmaョ ^6カ萌6ア。(霤`ヒャ&イB(c#qネ」a 5l,4ーqfA被ッイフハ4 ルツ痣・nマ。トヨヨмOユ-wロサノUネ4ャ ル~ヨ┌19O喋ユタk粐ーヤ脾zカC7,ユ ョKtY$マ テ_q」ロ6 ノ+G7*Y%,`Up8寞ll叙v?ト%{*;貸m殼eッュ吽XY7トレオイナ リウe惜耶q6ーモルHLモN(BZ蝟、5u5イ);ケz9」ヒタ誕ン艤HNavK「fAlワ ュヤ\ト・゚W;鞭エ℃ム?3ーU税jQNBPナ!テRヨ'イ0慊Z,ヨェヌtB遁ヌ厳x;vアョイ推Hツ{ノluツF恋-ョ>イ爾9墺 Gwツ1エ4A彭キ-フ>Nサァ脉t慵&<1ル:doNア'・茵恍 }'VOEдZa鮗3ワTオ3;,'lLd逼9;q|ホ/戛1f滔フ<ス弁*ァワ_イマ カテ腱wBヒウイ/ハセ>ククヤフ3wの3ォ ウ/ムエ{Flヒp)*磅ゥユK;ェヒJe曠レ.>Jサ゚wk菲 Jュ3d4bOー/狐ルWニk絽-7リワfソw゙ヒ>リ`|和Q 苫6$ッ6_aリx法 ルャg 繩エOX貭m?ウ殱yヤーF'tL黌.裙^a9ツ%0A?」? 3Bア ニ`詫 鯣P歪 ケニpXd劇eF>f@找MニH%ーチヌ」aウ1N3ニツルニ8クタ[ ネ廼1ヲ滷kヲヤ、cハ宮cハ虞ルg憖/鮹i3 」欄ワ] U0Jq`q`賭`Sp`チAFフ4ヲC13aゥ1 U柴ヨ:鱗:ラ斌ヲNvケJイナ+メ]ヌ坑ソ$カjaTiTめィゐニ\鑢フム1 0ハXィmネ鰉クCzル'1スコ3^sタモォッェ:o,輟xンアU椌#P?艇FA}3y^w囑哢ァ崟噪M゚2`じカ:褊 {|g 肭]7Z_Q .オ゙G&&X5Au@/ァ|7ホU 1/ラjーGホ0゙rム|~X撹譽D列証ミ>&[」_洟Wル&ョOマ]ーヤ_倡マTCk9b蔕,゙、ohBルサ\畢@棘瀰ヌ・#4 c%dォPRCPfャc ъC嶝G-pシq彙エツ9F竅瀚cワll郡%4悋ウ灣ハクセ3.f6レテ>ニlq%嬾\ナェ賢Yオq ; c旃チYヤクiワニ7ngwーヒムnvwアヌ砂ルニ=5c+レリNエュロエフヨ;澑lサ ;x魂=c ウ%テ:ヌミ]タクfX甓ー醐h ゙ニ0ネx敞モーッ,コッ軋"ッ_~kーツ細aオ7aスcシ号8ヒx.6゙ν桜。テn3>刻`12>ソ淬;ニ銜w+ミセ4セ◆経YKyニOャ潁3iツニ"p:ネ嫗ニfs`k9cG :階vキリワヌ.諧カgー\ンホ3ル<=ニサアマxマf?kー芙爼/ォ}H3Fニ80&AFアr>ト義lnヤF育詐ミ8#@k軣鷺8紮>゙ク幢k<ツ';~ニ|3^j|テァソ罪yg|&マ裹| 淌J^タ銓>衆 dセ唇耻 鮫ロjヌ6'Fg<鯑0ヌc9フH゚チ-レハx'(C頁;チ_ワノフ-ーY>ッヲヤ帰yウ憇Z*鹽>鉐dw」<ヲVオォfvYオーhモ&jヨcr0ニ吋ニ#ャサnァG<捉BアxD/チイ藏^「灘 %=wツaナ[YVヨkL閊仗サオ秀 カ・K貉ーセ0゚ ,フr?ネk釵レァヒセ-2ョ6gリリマK^碗┤諧bリ.スSa鰲ヒ0SキJs>`NO.6RJ剤ソqzS tャケ漲{xa@FコモpsリタN鑵zメc雋ァA^O2x&セFp/痞ヒ<aA"L?陥ツ@%リハラツsシセ遲フ模ラウ^|マ重#Ql9?m'ー&カ董ツ槫ァウ3リ錮\]挂 テ・ニ~ケ1歙i,藐5ニqZcゾ8那`愿o3ョ箙サ隈V イ昏L\ 驚/I」Cメナルミー,$ーehJミ-1ヤ剤Xdテモ=・耿鈷s・チDョ>脩」ー?5|'ヤ]p4N猾チノッp../Q6ク」ワ_ラ嚥ン遘イ舛MrTト」dゥ4;Ytソ&r[H{pG墅テf霈 ヘl・7ZN3-&邇pdレigモ、gー吏 6手8淌h飛ッ漾 T。術 6_甅8ョGケヨ]Bウ暑Tvz。7ク謄pQ津F{4aメ朗6v'ョG縋軍エ錆)7ヘフPニュモキZm*#:ルnYモ+{タeトハメGzモ36Nri"ノ緲フa糖ラd硫l ヘP;P尹 ン(ア9ムホ湎{搖@((ラノヲU剤ハハ寫邦マ[ル軍ムア嶇リイGホ893聿、ルfハaウメ遘ホ(ッ  割 蠶ユツMツ6ヌ杵ミ.zタユ"nスム^}疥ア|(ツ"?!ホLQ6シ染El(fCトH/JリXL゚_憩モトセl娩殞・41QヒH」ウ硴QV牝ラ8ャロ迄マ""ソ9ャ<ヘJ'ヘ? ;tッ亨チgvpRサエ櫛#jcマnbD又uBvW鰒 n(ラ =.踵|{_+tオ:鼻硲タOS尿71D9 テ(Q翡\リ_,)b!Lユ0G,Zb1,Kアナ2hヒa拗G気濺q()ルトJクL畴ア uーUヤテッ <#%ムッ&xW4テgfSbヒY?ア ル(q$/暫峨ル|[-NdG遁zq ;V慳6欣リル穰vス8ン"ホd圧リテ稷ク8=+.`ッ ル["楮}&.a?v巉bpqゥ1@\n W:cイクチ&:9&cセクルX,nIA鶤nーulrL_」ゥェLY*8$Eォ*Yィrリワ$黴枳;8恬敗リD8=9ケT;;`Xワメ&x。珈リツ4<スヒルネ冥!Kワコア C凅%磚4チ 0ャ州+ョLホ消y クD< w云x゙Oト愚頻IウヒWrh齎$'ェ7ーモト'ヌCVYub關-2`nFク。C ヘb:dKCl)・x。CI_,:、ン鞆 ハ\ルG8`$F9゙dI,虻惧レメ\汲 ケキ<18ラン'バ、 *7xHコU,Iaセモa0ホメ岑ヒo\シンナサミWシナィモB1游fテチ礙愎歛%~οク陀ナヲ吏ョ5l7-xホテ+fヒエ畍3刀卩f6n豌>fO0sルpウ7+2ー}ヘ}リDウレィ0oqg*ヤレ&' カヲ:i+レ=L_5Aク9%菟qFゥ冪t ル}ネN濶ャ>4UspGq`5[DH叩リ・モmカイ&og+K-oAニcモ鉾uイニYォ4D゙C忌樫雑ョuA.ネ_ネQHbnツtQハ6V_リ氣岫wャ)P淬uミタ ヲ9 #`ー櫓pY sフQ0゚ Kヘ1ーハ 貔6'タzs?8レワN6'ツY$愧)ーナ<ョ3ァ粮M醢ェ9セ2+ウ0ォ4邉 saVヴ"dシユ瘍ケ{(%樹ツナgg△挈vカ/箒ソロリ癪9bイRソYメ Awウ嗅 `6タiョE甓 ッチ曰_%ゥ汳クュ氾ヨ&盥F2Zリk到呈4割" rEwu 呼-Iコ・馴モ ナテ査`諌r&Wh<垢Nヨヲンu.ツ絵ョ/フ\9ハwチ。 リEョ+1蹕ネム9+G]ミ-ッ」箕ny=*リノ6莢洪?コア+ィ.mX隶zc9K78Jwga碓轜*゙拓カ召!ワエニォB[bスT q9ヘオ&單# 終カLni86iR視キユ?i嬲P"6T~矧c。キyJ PfnSD枠Kフ乍H h7マ+ヘs瀁<リj^ロフ 披K3/cツシ弸5ャタシ檮6;リ2FVk゙ト67ウ3ヘ[リl及'サホシス`ncッ偸。ヤオuョ─.F<セテラ [タ庄Mクチ/}ー% Yq3ーヤ;敘フ往{1\T硼ィツ就ア81麾画.惑>ェ エ゙#鑞裏AS_├x葉#エ訐属卵ョラ整ラ属7整7属キ整p">D4シ3h鄭j勒[ワシQ5スユ「uv:ひ抉喪ヨ,+ljcャ)ゥs恪E簡イ"4ス頁[ルI;ル*43, [Dソcs2-ースOm7霓r シHモカタSn<ア撈コ幅゙栖アモEウ38ゎ=ロ辞ハ3縲狛秒痃i,扛egwタc^佃Iヨタヤ誡リ9{かM゙iツ楷14灣qK#<キ統討メ9>組bJ'KG圭2マワ甚ム'ーヘ_!ヌ夂゚`、膠)ーAfテラノ柝幗ロd?xX]r |$チWr(ヒ胞lシナヲネムlセヒ面qャ^N`肴Xォ恁冬$?ハ暉チァム!タib:遘魂]P魍リB\&ト1A &、 is*9 .鹽}オq.Fルォーー8W防dヒj釧O,s1~ワKヒO*Kv由 カヌセ[ィョヌサナ ヒ`ク Er6L泌0Eホ←`-+a● ヒ<ィ宝。゚脾8FVテアr)$涼9r9\&Wタクソ枚チイ[?胙K*茶スフラ.前\」レル・ヒカアヒ4RアニAヤ蟀 ハ「2U膳紫I9樺&gnl悍vY+S_ノ。ヤ$Msメーョ繞ククヘT@蟆ォJ3N丞ePスツ!#7テ鏝縛A}ルy}カv菅lsA杤?*竟Wウbサ」4ゥ4Fーtォシ;モコモvW^Pt6^桧ーsm$M碵Eケ6Q剽n。+ォ2キイュbliV^貿ス[`T^ョyカ-dVァヒ,*Yヨノカケ信Xニュwy卞」8嬶萃捜ク・ト,-コH哀D・シ唇ネフヘ袴ホs脚3怪,G$Zレニワリ+ラトシ,= スtq%-ーbf$渙ン{ミケ=h%gヌ埋-ョF\兌s{8K@゚サムN蔕ネオッбNサノx佇陦ヨ#モ(0!0=笠;ソカ要」紊ynlX檐tbャ31ン晶クテ囀+4, チ靹冲、=/Xレ怜vヌ r.OvA1オMaリヌオヌ。・モI揚{ル0]q'ワ鞐(。Aォ ケ<菁股ツリケ記壌s cサム3bk゚嫦豌'オツoyルウtイ]jヘヒヨE柮A,,LモツモiJ謚+R対wァハ「ホタレイ鑵b徭マヲャr[齣Pウ*OヤZ渟Iaウ`Iヤ・(T&耋釈^GjEソ・マ )蓄エ【ストナn妄イサ舖lォ|="ハ梍/イソハ鱗゚萢5弓ン癡o=葩ニ>]」Pセo剣・#cイリ)?1緬マ慶菫兼蟾ニケ;綯スq5弥?キノ沚m2^ー|ニV鴛i|meVo腓ュ\>レヒヌYdk殤 [Cyオ5惘アyウUフロャ只hォЮo咲wXャRャ5汰eトーヲ_ャ2ムラ)&Z及ヨBアネェ5ヨRQo--ヨ。b」'X欣ュオ祀+*カ6胃ヨア誄Dウネ:ノg拊`挧忘ァ儷ヨ馭オuカルf捐c]h杪]j^h]f^f]n゙`]a゙f]inウョ6_オ:フラュヘャ嵶マャ[フャ gヒgaン. kォTヨ6册= sャe_!ー膨ュk<ミzLVYOぁI\?ヌス7ワC鑾職Zィヘ要&} 鏑ルオョタナoラ驪eヨモクワ9棔燼{xW 弘=スェsム マワ RTzレ22オチx゙Qァ*R廰メ、フツ「苔リ 援靡リ_;N4 セ+沚ィニ1Fz9ン遜Q^サ 痿ルクNyゥL1ル dェ)Ts'ン*dgィ苹・fミー5・"疆ョフ8ヘリミAEネx・B,歉X波゚晧鍜氤t伽イセb+SラeJゥト著cュ猜銅+Oh゙ト。!」投:ネ刺」uトォノ1支;。0|-ケテ゚ pエ:ャッ+oネテSL「@ロD))H3ュOH恥>「ッ{bワ]ミ畍ァe %クラモ狷脛玉*エ゙мャキ `スSャwaカ>TYタ瞶Gーヨ叱>Kャテ7ワd [サ ジ楳7符狽゚範@e1Kuc飭;ヒU=リPユ桃U}リdユ欟ォ[。アツ散#リ&UツNS」リケj4{X皇ゥルラ*eQ鋼5ナィRS婚ェフ8Jヘ26ゥルニUj首:リリ。*桜U報。Zh|「蔓Bo)4\蜀 qf[ワX瘢*爾オoワヌ゙ヤ)ナホ・ェ遜bサ冩uツQ#キイ\ェワノ゙vr陳ワロX}ハサ:: 2U V+aZEェFゥ:8@ユテLオVk`阻鰍'ヘ幾94J時M[マ[ル;:0凭モ1wォミ g"UuツYィT4オエ篠@GU゚uゥサJ$ン.ソチオムス覊リ'Or`煦0Uャd注mカq翰ホG1tチ杉 Rマa袒&. 蘇gX#6ヒ稼xユォ9){ワ伯ブH|fZ馼阻チY9Tゥ(ェ6ォヨ皰lШ(Xゥ糸テユqミヲ紫 vャレ $クK ェS`:朸gツォ,:~U0。.d~uゥ.a#U;ロO]ニヲェ6K]3{'WサrxsrH8モ$・ 5(為竦ッCAシアFト孅ネキ ゙鰍x,Pキテ!鶩t'{w晶 濫{場ゥュコホ)n tィ0」ー郁トQシ彁iユ}コメゥn・%Xョ|モオ_ォ}ヨ湿IワヒホaLエ6オ遅テtdヤC寸[m。麭(QO`'ァyW、#z゚;給ミリ鞏KンチW$ヤ90me_wイobゥ9_ 怎 傀mワΙ侯)Xャ曚Nタ&サ|ノmァ.s穢礁(塙暦?狆B=ヨハa?ヲモヤSシア4ェ~NュKeSムィ7P籥n-TイキQ籥‖鷯豫゙2>フQ4{ツF惠ァォ障\ lQ淞u3クEU淌C リゥセユWィHgxs9ハE~ン征_>'it聾} 愈イヤXLッPキ2m貿諧トZ?睥B^フニ桔ワィ~ユナホv=鶩zトソキCクbモnー釣サ;`iムレj>ケタ≦ ヌ: 諞yミ.ロシ+彎、.e0參iトM8「ユ8?レ y>LネIウヤ辯・セLィeチj_whuセフ9゙`゙ツ。闍シル゛ケX9象ォ ;E[ ムセカ、示}[v/ヲ9zィシ病颪ョエ`傀ウ痩。メxE゙R彡卆<鞦セ公 鮴aPヒナセBX k|」p慶゚X8ヒ7ヌyョ7ホ3ミ,信^アヌ8YUB>癧I +'゙RGbケeiL゙{愁D"ムS ヌw 侯M蠕2,u^LLュ゚斃P趺|pNマkQfミp\D'テユwネュ?ヌネミUb9r:叱k-利ユ0T・Nン62瞳<ヌネJ-ム-オDヤ=RKd'%靱;#>」'.・_6#GcYワi冂ケラS9LヒsOR?0AーRヘ、 「F゚vエア殉0#6レ~)踞ヘTBス掎森lモワ@ナヌ:ン(ヤ晝コ2Lt゚敞-:=eォ1リS!1ク団 u%フz}シ奚tCo身%フ{^j j!ルテ膏^驥]&ェト)ヤ「X]メ(「3aアEw;@eロル)・t0佼娯R弾」%p枩SU祿e股ネ7」3ニ P,ヨ'L秉_"マg蝎サシ繆ホ[1ヨUJ_鍬ホx!苗KgWT柞1。0Wコリユ&啀?2`?=ァ艦ゥGbィjァQレ=<重Lt^ホqScrェJLqメワAヌィx:: 卑Tァエ粘Y幼ZYjル驩ンZ0fト尾緒ウR橲4\シヲヲ飽&fN;ゞ聰xヘ繩.Q髞M紘sS%キ村fリテ W1i讐逎iッテWルセケ鑰鄰セセー殪ロ*8ネキメb倥[}Kaoャ-F゚ 引…|ツf゚apカッ.ュ+}Aクチキ ユツ6_)_=シ[ ッヨツヌセ0|緻}GタマセVリ0ヒラニz6ー<゚F6ワw$;;;歯使s}'ーeセMl・Dvクo3k敍呪變樹敖N斂ホ敞.效ョ數:|邁N゚ケ=゚゚ナ'゚%ニ@゚・ニH゚eニ゚Uニ2゚オニJ゚ニj゚ ニセcスf綛゚-ニゥセ[叉}w練6ョンcワ4B5ロ鮟マ モソ=l鞆n痣セgx7゚シァE^窖//}フW>脉セ/゚聯8゚wd゚\゚ユルFQ,]ン3ウIコ{ヲ$*サYチ"*" $活舛Dzシ迚DED任\A Hイ癇C$x縺E/トセ龠ル+゚{゚商gvzコォェ咾ニャャテニツャ゚檎ウ0ェイ2ヨ3blaヤx杭ニ3vイ cヒ4セgYニoG冦Zフ1%吐C7sY=3フG{ヨミ<52マa栽 Xネシ51ウl膨゙ヌrヘXSs:kfホaヘヘrv鳥鳩0Wア匁Zヨハ|劑bセホN5゚emフOXヤし3ウ取vy泌[dIヨルjネコXヘY「 ;モjマコZ拗7lVhbgY硫ヨャ5懾イndZキア゙ヨ=ャ穣k=ニYOイネU旬gョFs藤ヘケD・Q札8?エHSィ虜ヲ籘ш_怪ハ・h・(シハ=qュ(bエソ>「オシ#Z茄 ス,祁煕}ミIQ懍Bzx。倔J:PYoコ@蘋?dVメA5殴jカ撓酉シK9]Pjノ-.~Tx。 ツ詈bィワヨ@O]サナ鞨9*符コモハ$嗔メソゥa\ョ]」Wニjy? クC祚 ェルz?ラ5NI支hrトzu暾j騏メ=エロ]= ミkVo穎!ケシ」^:オf護IセテN糞aw5;オoオム亅カ憺ネ"伽Oェォィ4アqЛ」A)齏?.Ux髻rー思t*エララ9ア$齊ゥkR閲御磊7クヘ勲Yニ_N]+モr叶改Avェンョmタ。ス貳罸 ナl7ケgCヲ0Y)E涕+#」G%ハ黶>ョSロ.蹂Atヲ{d母シ^YHカg+tレ(e擡Drハ<ヘ ォ^<ルpニg{ケ0pヌヘFシ框゙タlマ譎ュ゙リテ尨Cyカ9也亶\s?ノ愽[婁xKs9oeョ'媼)譖シ笹屐飲%マ3v豈4(he3ャ6シ琴Xシミ:歹聨ュ\k0e呆 ュムシッu3ソリ*躙X1~・オアカォャw0k'ソニレヒK|>~ュママレ太ホ|費~捐8+7ヲqセ9f゚B~ォッィ1=nN斯q湯@,リ慍9発碵{7W裳{Ih8\レ"「-「=T颪ョ)2A>?ヌナZュ逡iテgス」"オO衰=U&fエbQ掎イ{569 v>npン傾ラァMtp7エ n (ク娟9A+タ{゚ IHコテツ乳nq95mモjクレ舳Bs~}ミ朧^ ンパ1ゥ0? 7ip;・|&<ツ麾處6,詁Pノ鄲楯 砒、_Bホ覓ノ Y2/'w艤Z3嫌゙ョワ!`u9qヤォs"D陟WンpノK 瞋冤メァ'2索Iイワエ」:クユK怏ユゥk5djhタラ"7@!゚#ヒ0双ELユタs|TWウサヤ匿ヘdT?ユ}.奨マ墟鴾tツMェ弖Sウ翻P;kIュ$M-p;H6;ミち =ク袷リニlマ;「」 N0KァヌN<悖対ヤウヤャ$メR梯hアヨ 襴%サ ヲD%ァヘ`O、v:クNkAU頬メホI5タ嶷チ^JT橈)(}ヌーjn]>品勿シソeオ-ヘクJw,杏淋|!ノt~J柊ゥ_:幎UノFヲ4R'ンエ?mホ,:ホ、ゥ+_x;AIn ュ騒?s'p>゚ラン0⊥0キ)セI!メ&ュ/、 ?B:_Iツ'w?ネ(y#1Aノツ"汪L`4[pレZ'l/レUHレSi_Qj、」E:Yィエミe宴タ/レワッユレ、kZT}ェーl }咫_あ7]鉞鰲xRo臟beョヲ圸牝O*p。43F蘭A乱ゥィヒカ^ヒjス「2jタ膠ネ貫e5PO列脇モu4ラミg(齦5fノ莇ゥW:フレj&悃ヲ @#q*X「 0ソネェ" '怪ミV戲8Gtナ0\t 「3L] Q畿7ィーQ5稷xソ+ホグDOリ#ホ}「ゥ9 ,7E,+ヌXキ鰮:ユアJ*~ ユ 5fナムeケ頸ゥZ署/F洵ョ)ノVスyチ壻ヨ1uCW(v-n貯A'嚠ウGrハエ&Yy D;"円`規_唯Hl1NナミQ ┻ト !Cq9\-ョq%WチX1nテ -」)q=<#n@<殺\=iフuテノvW、キfニ依oモ .齋ケE・[TケEフ-ェアネs豎Tホャ{=ヨ k"ァゥ|ュ目リn拉、ャNNァk粨イ*鰛單hーセ准Vケノャョpッrチ。\イヤ瘢噴ォ\ラェ靺 阯$メx゚ヌオN Lj0ヘq5u ヒア゙就サZ^'d{@C1 卆 エBセ ンナクP<ト」0H<0F0Eフb6T胸ネ瀑ア>褞ュXL、XF嚠gセOコ兜、ァXE.品a糒勾朞倡gqIDヌ7_瘢ェo^*G礇8e<O遠yqョ9ヤ咲ケヲSア浄エQ研扠ュタ扱!ヨ。袷ナFh%6。oF Gコz姦ミWヤhx ホ;Gjエm?ュ壇レ燎q広K1澤ィ踟k}`@l.゙モナコ羅枌粧イーホ汨m }ヤ*pミdw94ト1テ\゙。PQ~拂陛レ?/F鹵cセ)ーrケハ}ハェK;ヤ「^寝ソyA&Jxfn*ネ eヤ$Nィワ)SS_{セUiΜ?tヴャネ=オDオマ鍍^| ヲリ4ロY稾菻o W| ヘdア"b(。廁ホ?チr饌"~〔ナ%~1窈'ロト_ーミヲシmタ+カ{,T7衄"育購cョニ(%ユ・"3゚語%^ア冩ュ廢pn$h)ヌン汪 獏ン「アA」ュッ6!+タ 標*}Yウァк1イgT;カャ苣 Aリ60ロチツ z震ラ楠Dミチnv6tキs滷サ胚+鳳bキろヨz(OyCケネゥVクトZ_亅ツ9クヒ; s蝿洪。+賊$!ミ湲ー」`ル(.ーマFvhbwナヨ6'ふ\V]ホョ&ヒ巨ヌァ+ヘコ嗜U'゙ルj ェF 鬱ゥミ1キヌMヒm !ェ、bQ~F|_\y餽Xェ「up %Vス諞r8ユ_0」ュrz(ネ fフイT9ャィ[鑢趁b}┰E$VノCメMマc犁ー"ヨケン1b~エオA'{0レ8詔W!カツ-0クヌセヲレテaヲ=贔#。ホセ*m紛ィツX祺cェ醂1"QOGC_ヌケ$マh[旅チ,i~ヨヲォQ「]。s=}3畆.h夐I)茶モhxネーo】mエoヌチンモRcYJFd\jチュンルk钁 E9クニHX3=ク)3ytァdーロj皚,ソP|$ハツマ゚FW(カxルツワ Q,尹y蛭(}Wヤ@V(」ィ瘰Sァr3フフFモォヨ「リ}('q@CネR剥ノp=e絏bOΓtaマア,g?肬y0チレ ヤ^Sァaコス棆+`ョス 9綺獣キ#R-ワス1柊冥ZMヤヘオ/ソbydル`マjjァ咆鞅ホr発([+QカVA サyォ !就gシ/エラ i3檐韃} 釀T底s゚ ッ'{禀#8fluy|「(柤、ン 鵆}Mw1Kハ。到iVモTF十マ#ラ".ユDァ紗U!=muロlネトi陬ヤマ2クミ27Pー゙6vィwシeソ鱗;P閤オW+ス\-Lゥ黒m`EVwGE」タミル,dV@;オ}イロ悽ャ「縦tァCノh2 Fサ寉R糶Dナ甑槲Mミ筥オ尼テハ堋ノMgフ h徊e%6Whョシヤノ果ョ葮3Bヨ 毒レ#*iL~4&wユ゙|叫肥:Fチ ;_FPウモbi(M2Sロ>?オカロxコヌZ猴ォ;キ イミTUf6$エNf,K蒡スネs_Ac{rラp|?\lKCpュ}nオ#P樹K」ウ消:`気ヌ #y mI:Arモ伸u舵敞d"sイタi湿Bツマw読ロ7F6モマf?ルD?ォ$ェノ[+I瀧玳ハuXt"嵎;峰"ラgアTym"・米(メ瀲Tvモk ロス、{忌歎ケVh=ホ」y>ソzwレ@N;h陷ケNh蝨廸撰t。ァ懊B_ァ; pホk0ヨ9オミ*」メSヘaヤAハ Y傑ァ感ン]ヲd1筥[G+YIv訊帰捉^n|ゥX0'FjnR妛1弊ニ霎水Jkオ&ST@艀m。|Wン |i<キ?問;。ネ トh,_Nソユス.K驕T?\"猥ナ`;"nC#g 4uCsgエuョ斜p韶劇 罹3:キタ衾クメケJzг#罫nix]ワ :聶)躔ネ4ェ堅ヌz漱コフDgー「ワーカ」{俸齲ュBW:筺囀モ^;ルコ・Jロ+岫eネ隕評eNR・-JgP躁;w!呻^N)vBイ゙ : 9ォ祭X{像ェヒir&ィク0(簷J酣ゥヲ兇Hァ醍~rノヤサタ謙gE%?P鴆枳*オモRI腐H%UIPノ裄ヌ遷モチqfBKg>懋,モ彳ミチ)3廢p&^:O#・#u銘g u鳴堝yヤKムa#ネ島(酪レi/懷(,+@8+Q蕃ニ?J`ト愈ィマg}dク~*\鞦e6 アセ:「6ホK&ウSイ暫U5ソU)襤シスヤ7C謠*嫁欟t<Uチ0,桟チAlX「嫩ラヘチ俸ヌ ニ 辻蘗Uテミfj・;4P.;\ェyp搭w エエIAKnュラm-ムd2h、ョヨチ窟~_タネィ62cFVqgメヨKA:拶T退H鞭6ミ剞Lェラz"7T"7トP=T」jXヘ愀ネPmョG諚:;尅^-(g/テ%ホVT5p糞 ;ッタ躑シw:oタ$蹼xネy f:ロ。フy;;`♭!T8チホヌP褌トIxノ^uvチロホnミ v9淌^gND_ツg/痞E2怡;_ソ=ゥ錮@;?懲ホO、宝3NPHセs8ソ裾ホr。+鱸F;!ホQ2NケCrキ、、Td4ノti9メGハeY*3ノ*ノネ:ノノF)ネメ&ロ・Cvネ艟ル川枕Cw[ p1ヲヘツ趁AラA コ!Sモコイ)4届メセ挟゙xムヤヒス/lu0サi乂kケア懇ロxjア懇ロhオ个rCカ*[B<レノ0エ猷hCrラQ詮jル謙9゚2 U゙83h陛>nX$h「 碕コ[試4,Jィネ(tP ZヌヨォfAス位J5ラマヤョ迂」ヌ*MモzM:ム<翻Iaミ2K覇4氓a\ヌ+ ラォ,面r歿~V0 ロ%#チャ$Xps \ypEjヘテM)ッッィF9TwvD]y炒恫E* FスQ゚M#BF ハtoオ。91#ィセヘアネ}メ洽エ゚F~ヲ盡 z ソハ埠ャニx`*猩瀘。疝2 、♪セ メ、ョ+@3sヒALフ$j<ヌ・Uオ4f4ェ:SカS< 2ewp荵燉= {"蒸6/{Cアテ\/チXy兼テ-Rク] イ買>y9L淡ツd9ヲノォ疂9 梠ラツr$ャ泰チ&9セ幼y$oテ&ツ8RO゙J塋ロH+y;9EΧHG/メU゙Cホ薮退r"''諒I2&"」臈r倒圏Sネmr*J42K>Fハ舩イBホ@)昿brJ\イUホ#ッハ ケ・y!yW>E-&;アュマ薊イO.'、 mロhl、p獏`イソ遞ン\k゙Qノハ*&U; 城「O亊玉a4。 隱ッ幄モuカセn瓶:ェxdy。{ゥユGHァ)ナjj?クV|欺TォzイBr5t斌<ケzノ畸ケ)キ ョ縛aィワ`スRYS%棺ア9渾ス・ュQネEh9オセ+_C.チユ」|lセR笂EE^~ュ0ホ=:Z"ゥ4壺T」 7モヒイ5嗇ニケmソ]h!?Bnロ゙策ィーCゥ&/猶:ヒサw3ィモ]"゚僚喚Q)j。゚ ヤツfセU遯戲9ゥワ9「.<°鞋ュンョN;e`ケ鄲uツkRゥ-pd#v。 F \□)%エ農AT~ 7Hナ= `美e L這y褊フ汰ツ<;,萌?@福タ?-~jー7?Ay9Aスモ<岨-ワシkF鋸ムイ貌雪キA・~{koギnUヌロ|サセン5 愈]P゚~rPオムz、V!トリヲ」ハPK(」ミxUjCユPKma5 antlr/RecognitionException.class拘ロNQ]gz稀A." スム***EHH*@L|ハミヤエ3ヘ05~唏1奢~拝)Rニ靈レ銕ウ:k3ラキ韓後セ$ク掘V ァ勃トQヤQツ∞イ]xテ(トO k゚lZ テ婦諚ウヤ0Z鰔svュャnヤm棠=hユiエ6uサm*2ル7,ルvNX町ーrソン<カワ#クチL*s轍2尸=)ユ_Y-ラェ圉u「ソ>ソqfyサエfヨF2ル s1U|tB キ/M&ゲq]ヌ}m撈5ヨト=ァロIYNロュZrツゞuj廩ンアw>Tュ,較敖`ヒtマリ2k 普ニ 盡‖ワ10"青煤UL+公k|ル>=オカムxヨP6ー メ4aレ^テ-旺瘠」ハチオエNs=_cン1#テリuワヲ騫稔`ネッカ糖秬jヒン「ヲホ|擒来坏貌 A゚フ控w.eフ3OoAノTケR2X?綬1チァG縅WタhnX]@ヒゥsвレ9ツ耐~゚]9G8(9ネ8Dョk談ソc*7ユ$q :メ;M「殍Bオネモー囲 iムォvンソc楞I珸チ\ 「!-Ryb′ヒQ$K鴬iハ駻・e6袋>Z」~r8ルメヌセメU秉`i、ソt郊エセメbカz轆換鍜ホ明#憶ミEサ紬:霏2)キ PK4TヲjUPKma5antlr/RuleBlock.class晃 |ユソd7;ル , Mイ D」% 声@Jィ祢2I豊f粮,里hオレz佑[ィチェ(JTー 朸j{キZ{湃テ^ヘフn弾r゚庠スゼロ>xiSトネ0籾Khツ nde_apワツュa蒹6゙ワ桴;ユカf{;ツ(rOvpg俟サ|1_マG)vv7ofヨ{Bク7袴クMツ}シ~Cツシ0リニヲ%<ニテリヒツahア0ヌ~蒙+!ニ<チ閭xリ淌|売v OKxFツ0eiマ2ォxNツ%!X@d3%[DサヲU'bレzヘェMレ<ヘpeQノ,イセワァN"ン蒙ュZレ\モェ5T}C"NrF$q,P讀gHbgァ捧;ニエDサィロ導20CD+ルヲ+`」匐Zエケqv。仙ホ贅淞Qヌ[2>e23Xチャ柘ホキKア楷盜- Xツロ2~w(ィY!柘7滓贈。克\ユJh妨リ2~梱Yソチ;2~近ノ霹・2~?ネ#DQホホd[G9醤UJe漏ェィコEヲlP:ヤ┐Jヲオ $Fi3-ナ&3踰ォ渓ア1ソノx Lx碌2″2ノxホ87d 2dャト)ヒガvツ2ヘ7!W唖枯iヘTCカ弗ロィdi攜Tユ答縺マ$Rコ [iヨfR鞍癇E)BB脱>シ+ツTハ^クシ"舵ラ1L咋ヌィ8Fc"櫟a,。$Z"ァp鯢・、0セNA> 嗜!1qH痩=サr&U、pニイ>サG毒 マ PK>フ&XG槞割瀏喩Yd=ロ?ハノ 族I)忱・叔$贄 ,tク-旬イヌ]メ;P睹コ倭カヌZF血評ソヒ「)微vト.%&卸ラXZ+H\ye)XApヨ色ウmYEp6炊イI ^覓*e童シワラヨ`カュワ-2、シ`Шj禪儁oシWz・^ユケチ<&zSr僧ァis&簽演アナムィ桓慟lゥマ鼡|。モシkゥラQkq0ッツユフdオyム1ヤチルルコ悁iaセ-'ムW"哺ソ呵ッ"フ5リメ銕 オ[錫亨MaリJ樓|8謄Gシ爬yク\壅?鷹堕9$」泝ツy=b#モヌ・Lp3vb|圦ニ夙Sカ訌処gk&ワ超漠~Rホu4マr8ェOヨ|セOn盤ミ|チ{酲ー9エ幕 f軈砌纔5_踝ヘgエ\4ケメiコlqtホシエf} i区Y= y9鷙ノ}・イ俸1PAノT&」hァ:夭ラS1゚H&j茆コNnヌュク;ア c;祚シ]xサヲコソOG呼1 ーhタ^ムЖы}祀7WlCJツ磚上ー8#Tォマ5ラヲgQ沢t}&クxラNェ鸛IpスX]Y忝徊 槧萬XFFb^b  ン8トrォトy" ャ栩 #1FyT[い酖ンOシ湯タ胼&ニシウYN,跣F7L゙7-ワヒyc4q輯゚ソx胙ハ2(剥x沸ツ欽蟇Qネ_ァ /ラPK‡「E ユPKma5antlr/RuleEndElement.classuR]OA= クBヒPォュVaQx#ヲlH&啜」Iメ&ーイ;嬪」*M0FQニサ犬セワサフケ醫=;キTPI ル8(顏GsA Xタセ鷙ヤアトi廓G2、p。|ロォrラ且ュCLケロサ懶2ー゚ ムuKY"茵柝瞰J レ杢"。キ妃9GメロG6!哺 ルb」ヨ ゚ン=KオFQオjW(1{nマkハm+ヒルイョZu[:R斧o :セXニ ン`l (0Qヨアj 5ミネ・(Cnoレnウ毫ア6ュ謌SZ5L甑テ洗;汀ルケ$J~dY」3bマシwニ`ュラ^;K9NA / 滴Rモ粭ナ週}ァ:D9c民チフr!ツWミ.x幡$燭STO哂v(ー2H凩Dフ鐶\<5G)JΘ ァc翔y鑑/>PKワ沺WイヤPKma5antlr/RuleRefElement.class控iOQ=ッロtハDZD,CYTTー盤鼈 ク刀>Je:%モ÷ソニ麾 規礙?」功uレbヒ&スw゙ケ檜QlムZ1*フX羣-vwエサト峨`Rァト。8&C佝隙ル ツx萇弭SツフKX晴ネエ43ヒュオ3エィエCmLラ護リコe謖l鎮C"?悠胸ャチ獨オ-ョ3ヲsFホ啼鞍UヘーtslナヤyヘL柾=n$モCッ)ハB!Cユ愧W[ワワミカDNワヤ,レコ繧)像T)e[|ネIP鬪キT[秀/Ъzィ,@%@ロ藥ワ、b%n{jX9Tャ \ャ[妲Z/俚|9'エキ幹ュ%晉ケa濠ィ2コム#」2"關ー$c1Yチc iOT拡Ve<ヌ /ア&」Kpヨア!癜凄斯・;;ワYXTニシ複 Fキ]+!bテ)ン筥。YケC^ス ユ'サp瞬シOciz」M[ア;U0 衄6!*?筥ネキェ蜑?鏥XァJY{Pト@ヤNCC タネ^ヲmWヤ随;+wUニーカ・゙ュ]ョェ{)Cモ?r ユ]uッ悴r流野O#凝フエ}ndUサキIDn」ッ゚C?嶬ョウシFハkWi iy#{康Kッ。 'z/タv0n%qマシ唄"V粘xソ@"3y0朕瑛 Y凭 h&]、殲恒fF言rョ8\エd ?E@9G2R刃AツMB「平zi^^r抖鮴'ロHuラSdo8RCヤqイ%z 超F[カワ」|ENd瑛;搭p)テTミWNツ)B簧|w=奢3 9C9筴Qェ}Wョエィ祕トセ#シyニ岬s4鏡レ給穆エ8ワ ク%L%ゥKTヲB鴈BPK$%I7PKma5antlr/RuleSymbol.class}帝OAニ)スンBケシゥメCY<1&巽0$mサ塲=嗄k「4H41~2>3ンB- Mホフウソ戍ヌ/+リフツ@9J1ワハnォン舘冕キ堋:ワQ6Rク+勢ケA`シjm74w:ョワRメヲ@ェ._ヨト{〕(e(}[カ#ワ\Wレm'[ヲ5]s奨 、sメイI舉X-モオケロソ。Bロ∫IソMア;'モナウ`iO ,ィKア*Kyンj2|kユ\*UッjクRk Wェ :Fコ!ロ[ンF'凶!ュ況]M 羔檐2R,q29ソ肬 6sカNrNマ、TUeIRメnミ mメQ )|展 ワ5qI`ワム+hルタ8& ワテ}9x,cXGニ7S尠肚JッBヒャーァ谺fS(トタンスゥーンaFv鴬壯LマPシ゚訳Y「タ擁!'gナP?ヲW「ラIL騏Zッi5レヒ<ュaマ嗟ケr Q華サc""/牌闥S朦W1ケ゚ウrマ帆#^9B籌-ォ?フ0゚,漂フE.jェbコ$ソ樅I-ホモ^モV。KQtCE9B7メ橋\クl|ハァyクU(pクホ<ェ8K霖、&qLソRッw#サCム ,レ3o 4ネz母モャ9踰SRl:テム繕ツレD:Tヨナn5晦gミ−消タ.Qロ${、4ノキー PK6 ラPKma5antlr/SimpleTokenManager.class攻[WWBn (ィX%卑Z{ウ*bハヘ嚠カ齔8堽、ノト耽lォmオュスララセ・vュヨェkuワユユoマ h図rボ゚セ暹?p」霤|;毎キEシ#穩W「筱lエ「虻タt0(T\ア裔uチaワ染ヲヘ嵩MVケ1テー「ネ緇ム)(瓦-漏キ棡,(ヒ}引PトG!ワ矼>QホiK)ヲn*PニDoYim。盤 e]答ig Ci+嶐モカa卩!#倫ヘrg^Uミ`k Y]チホノレ-mィdル。1ュxン9諳ト1栖ャャユウ&;^L-GヘVW3ォ刹CIサ`蕎 t-3cfノA儻T_」zbェ皸&<ャW';pモ燮|^ェa嗟L[ヲュf濯5オyt モ:トミマッO@:枩テ40ラ:餬gマwV死ウr(クカw/<クホニWトCヒィ禽/」>>ク掘|` ヌホVタォ0G觀]F緜ィsュM+h>ィ`[pロ ャEヨヨVk摶シxl「、ッ~ 「ミ.Fzア沍w'ル#啻L"モ穢}ネリケN瀧散~トSォ喋+リオ ハョ若ネ`0イ60枩アmJ算Tミ。<xD聰ワ"隷4RE洩g黝匣Aヲ8A2ァH4代、6チEヺ」8FV\{ル@{スt3カ-\ONォS3;$p」シ9IOョユNョツイAbxTU :ッSrr=h/ン UシメンセUt5f(954ノォ{ェ_#Ey牝\^績xz*リヌマQ寂怫閾ン」/S= ンG/P;QSC ハ LzミZWgRyp} OレャムIネ`/ュ*鵐ns-d=・.梛ゥ清] 胱Pヲ|」 eス'蚶DマョイOキ)/ YMケ[、3冬:wムタ庶ツ2]ワめヒi胥'ウク蓊{<|トkC~:(゚t^了PKミf|~ PKma5 antlr/StringLiteralElement.class}UヘS[Uン竪^.>Dミェ$Im5"チVタびエjIムヨVH.Iハヒ{R。~Vサtユョヤ坤3イpcgB:オ3式徠ツホクp澁=%h"仇ケ迸マ゚9シ{萄ヲ瓣Lェ胤1甘~<#:タxR*榑ミ自v<-iケ$5コ厶 舎蟶g1ォaR9ケ弃ア。Sー(ゥ鈑梯テ4シ斡4tRコ8・%シ,ゥW8%gコヒカ"7X・鈷pzメ裂ヒzワミヘ||ノア鞠>ノ燬)哘テh$ュ寰aヌmスTメd纔アヨ儉E輩+'コメES悴之旃W 箒辣)lン!イ-祀矛ニミIEfメトミ Bマ前mノェリYアP伴痙(鬚C( モ剩喪<隈(ア!FlヒxUナkgq弱uRq槭゙炬ヘtョイコ*li1ハミ{ニフ蔚J"7jヤンN俊鏐睚"ヌ!心アハ賎▲jKk0T8LノエP諄 e狄8ツ![姦テ皖犂換96ー)拿痊ェx上}|タ!ョェ#ケカdjコッ>イョ jセ~旦ル_>枝 ネR榜M=/靼キキタ .iオサ モ,タミ射Yレ,ュXテX、ケッ「{mラノエ゚ef,ゥナJクW>ス\f斬「・オヨ蜚ム:V#岶Y$ッ4S/Qキト%ム四fキmS 晉-」R「;m7>ッーm拠掌旭剴r_Kw備x厦2y;サ4Kノg罐hN∴lA.!ヘシle賦。ロ # ウ紡握SエフxアT6龝EgI8蛤寨ユSネC茸ー7^ケ"ヤp,'ツ嫖z[ーノI^「=イ3aZオwRt "擔5uc獲カチ晞楚ロiニソ;テ態∪嚀 レQ壽a!0QmDモ「Qb_>レcテキタニ3恷UムVB庠6|L+a'ッBスウキ瑜ムjネ{レj潦。脂:鴉ェ!クEフーRC6zb珍材リ喪hJB )u妃漣\メ!HxC゙瓊/ーR萩{$ョ)l渙7範Q7#ミ岌M_>セxPKxキミ菅'シ2スOム5ウ亘ニョオ`6#-キォK・-ラ慰N%|ZォユュRチ^rTミ.~9ナ"」#0ェc :."。ハь$R:L、コ-ラ+2'カSャカ鶴エ ^膣9Δョ{ユ蒭6ャメゥ2僵-ェォ゚ホ楊ヤ゙d.ラ2ヱlwヘ~サB1Z\:碍ルヒ,F<テG=*8ォホ.ホUpm3ソB|瘠タル ミク%ばu ョ ウぢG堅j斌hアDMィ庖w{ヌZ黴Uニル)ヘョ壽;jカ)mmNヒニモT賢85ァ・o3Vキ1z臀」モ壓=9矜ヌ マD5マ」マ ホ1h CHc7囲cキ9コ状ヘ政ナ|cテヤhコ)+h」ユソヲPd ク\: Uヲ 5s"ヤdCィセェカハa椢゚j胆Lイ゚播亦sマ2ノM゙<d]a狩クCV悲ワ裔ヤニM垤gヘノ ふ2ю!;倫 ヒコS筏r証?跫ュKア靴PK[呷綸PKma5antlr/SynPredBlock.class}ROモP=oヨu屁ーAD鮗ケラタH$$11ム1コ4 ~*M禦PニレホHカノス掉ホサ/鄒涸セミツヲ"謀ク僞ヒYャ燒涯ワ稔オ牋V闥U+#ユフuMメ膨ッハコノ゚レサケラゥソcHク=チP4-Gシ ┨マlB悶hサH3}埴DNチYカfスzD6 猊:DンョYN_2襾~ハロ6w%レuG゙。xmノ・咐ヨスmロ=エ$[テ5播hih羸竭ど樅ゥgxョ煥*^2T カGGGツ日恰,j鐐!ア)ュ ッwK:!キl_xュSVkヘiメKメルヒ\%!靨z.bヒゥN`メ\ソ&ロIノP媾'~モ`JX俶`ヒ磋ル'A「ナノlフAtI匙フォニ{豢M抬F晄''ツ149O>廰゙t/アfタ莢)吠キ@レh乎セミ 。ィh槨?ラ殖池ニh4田$竓竭ルィD 挾FY3ホRュSY)ェtヘ %珂鰲ミ幾゙"Q;jU捨1イニre債qナnPKi)ユ(PKma5antlr/Token.class拘kOA=モ明XャィbQ睡忙)*セ「オYYZ符CXHアlIルO~勢%惰熄RマヨR宛ノ暾[">陽結蠱Eレ瑠>/キルqレmレWロ$琳CrPニH1(ュ$sマp" マ⊃゚ラ)マA.w1Vロu7嗽wY迹hュコdッ(ヒタォ8ナエM ヒ%|マ゚ノT゙c?lレハ0QFHメ(」cシコフ勞チリェロル茹(Sク衾歃y斐H7lyq濳乢徂チナ孅゙ィマ桜ソヲ)エ?3mソモイ箔-Rョ4oラ只鋤雄ハ{-ト。謫]/カス|)服ュチ6W+qьユC」にB<「aGl@益Qo燧 A2gQッロト・゚マ重#..ぱPK%m}9PKma5antlr/TokenManager.classu柘NA。ネVTーカユ頬モ衄}ョLトh5) eJ茂ホ親h蒄zム。携匠^933キw8ニwヨBmィ豪只+-ヘ0ク齒T陏・5Bァ蕩k(モ卩}ォ寫梳7TRニ/,:.歌,ハ$xH篳ィLi餡ハ?隆FF#ヒU8歎ゥ・UイエオUmKォX7リ[L0uZfォDコネJレYM:W3Jリ「`マイ?ユ2 ヤEナミjMGHケ 管:靦ハ&dT&ソ\M oニ55ヒqラョLgLハ lニcンルXアe:3ニ-Eスノ笞ェYgJ6、xj,_ウキK-N ^ノ・;MBui*ーヘeJ#*9NtzKュマwhョ苣~叙xオ搏・jツ」酔 4Gxrツ借ェ飴担補*5g搗蓆k「&メ峪撩ウ3ミ.g5ミ¨q刹ノ0(N閻IM'RS/Sホル/2 Mアb帥m躁I\桍ミ1マ%)} tムv3*^・キ。假。[楕>|渭傅羨フ/m*nB縮S{浩V\觜」ュ攜串=hテyイ珍徐~`イ|3 Fノ゚テGフ+蔟ムgャ/ミ> HAy贐&k4。7ゥ%k普ュ从m轄廠ユエG柁8I8カ&IイIj!^ヤ ミラ8%ョ「Yfウ9 O慮zV搖{ム.「ゥク嬉メ;浚ラi・ト:B忙GロアフaN魯モトセナゥLS ・ンナUフ:貶ン゚レネ擱膺ヒ$ュ6渋・oネータ?濘9゚チ@ォムd瞠:シ6W鰐\e畫ォpX'Dホ48 硼「p ホS岳Fマ~ス矚PK3阡w+PKma5antlr/TokenRangeElement.class}TmSW~nイノ5タキZMイ@RュZ5VEォ仄Z+ゥオ/7r 堆ル.N鯀?ア3F暃c?G9楜サ「橡ケ<マyサ惱ソ@ 脂C8敞A彗 教i愿y゚磽qノD吏芝&ョロ-从ャ~マヘ naムタmYTSラ;)リ)ワ褊=?ク/0茯コvイsB6ハKセラt"コムtDU ョン》ヤ_セAHクK^nコM澗|チVョx薤OオZハォDjュスョンンJオP q」ス「FヲォmカレォゥコCHェ。])涕[珈aゥ繞笄bNサスホlオ%イ PkZュ廈ウ遏+wRP0瑠巫3}ォノオL? k}モム-%8次$寂K永ーd&3E%繪〒ソ78ニ窈V`Gコ“+ミ兌Eテタ噪&裃[ヘユUqR3殞_ミ1」Rワエンカテサ魑゚]蟐&uシ?Z淡ヲ褸レ*サ゚Z`iF霧Z653?ィ隠Cハs`Hvテユカ6ツQォ%@HK:ニ6遂b.va>6ヲ >モ曹穎s7箟iロ^_p糞7Pw~.サ\・ラセソセァ xIオアシケ@8x*4ツ馗 ム洶錨_AーNGL ヨ$ 胆鐸碆サlヘホ;V|1qヒzト+ ヘu酢 #Rフアセ衡璃゚Eヌュ6ル,ウ /8Iメ$ウHAb~Y牽q廡キ1丶g:Eヨァ随ハ+ F;@ノュF>F4牙cg鐐M拒ラQZルk鮎F_フ8Mメツld哦<2&箍籀トラ5ネ8ケーSメ壥ニ鞐zt。BJスzカ「S,s=睥[d謡0 ヌ-ル英滓dク7i饒A&スF<モTy -テラPKd%j゙PKma5antlr/TokenRefElement.class}TmSSW~!ケノヘ$$ハ禁`-$AmオJャ-R・iッhMl+Zチ怱傴B/ァ鰊v筧3~L斐M0ムフdマウッgォ7仟T纔f狸 s3Iフ 虐覡トq6市|枷・I埀L\タE&_嶌ヒ&Pd+フ}cw頁o9フw&q沙・$セヌu7 , トョ4ワFpU`2g)7ーe_9儒ア5オ[\-鐇ェZ`ネjクzeヒYモ~Eュルトレユセ 堺BKロ饅)゚営ヨオェフイキ?ヨ73ワムオカvエフ=QO米q Qツ?チ2pSbキ$n緘w$ハィHワ・:ハカr蠡oクk[オ奚9ツ、タ鐓キェkTo5p樌豸ウ贓 Y雲崙=ャJワヌェソ罍トxh熨ツ噤ヌU5juカ]GC 貴リ 榮4@IOaS su>ヤ眷タsイ滕{リ壱袖X0麥#ス2シM蟐コ&1}0n%ック「**/J栖>$餞ン^ブЗ9lヲタtョロ70aロ絶%Bー箜慾oェbjcCサUル樮zfセB犒楾n;イHRス,Dヌ「磬フ跨衣{.ハl!ノz:Z川ミツタl j睚sd c、゙E*dFZHリ`)」タI「SH=B杖0"lァ即iィNMU:メcイSs}ト。掎G -ロ宿$9!ァBハ^モtsヨ オスF' /1チ?caミ)「ァ鴈テPK/kマ^`PKma5antlr/TokenStream.class;oラ>=vNv.Fホシヤ樺線ヤ7 カナy}[iリロ ウ%蹣ウツニsノ_t%nR扼$'おY―R糟L~`w5S崖:A匚コ2虎クスkレナニ0M{=UラヘZ幹4フZコヒWォmngヘj!yョ做ゥJヲP皈Nュwレヌ ンBウc捧3Chチ酌6ハtls+)LcCチ$ヲZュ&ワテ怩0"+リトilIxィ 確1ミチ ルYレmメsム:8Khp%セツ曚x?茶2ルI 賈@ネssヘ 咫シ#。rラq~ハKフ;!KNOィzヒ)7毆Iク>Cbネi゚ア(オ#xP゚OテP*舵0Av*MaX、/J メ+」ユ/ィN 9|ンuイ1Jセ ヤメNo誠Fォ ヒPK寳PKma5 antlr/TokenStreamException.classu析Jテ@マ$i」遇Oュォ&ヲメ<"郁*taijエ摧征ッ*狡。ト{c u゚Lホ=釵a>ゾ? ムュニ.」ノhルhロ煩#・ヲ{r'`]ナJタ "ュFォナD%。慘I鮑チ」|\齷?N滴マホ8瀘ミ(瓩ソpワシ[ェ後U2U7;'・ゥ將躯ラゥZヲQャュチニ」ハィ1=ゥモy穉J エ蒲」0ク篇G応@チM課ラァ]ミ^27:ミユト劑ワ^]ロ秋蹕ルd0& iレレ >タァフbKwレヒMャ「ソGIiミル PK坐gPKma5(antlr/TokenStreamHiddenTokenFilter.class攻[oU弱K,q湯圖%。gニm/註贍RSサ@ acッaゥ/啄ヲB TRERy、ニ !梭ト珸゚nlw拊di譛3゚フホ;ヌヒo&q##クニ ヲB滋 3イyYトl楪ナ訶3。XアA7zI 容活+イMノ鷆W臭ラBx-ラコ゚キrfニィ゙T86JオB%- fカf必ユ、Uワ,$ャレ該娵/匚j决ノェ+fnオ|モ,5ンヒナbケtナハ衫メJュbE@キhチィヨモ胴*G チ!gcn,辨XS/穆&・ュ馳mォクaVV麹OBYfケU茗+&タネ・[Ysモホ=74ラ~Yセァミ杠[イェY」痴j庚tャZ惧T銛5ウ「0PD逍4b.y6-qNワY p傭S,」H纎>>カ_FJKIGニb /=/ネT鈞a。姚エ体セ5イR゙ェdルm厦hヒ[刈曽戛タミミpマh8aフR'4<坤務「%徇<&[ Cヨャ8セ5ャ祟 kxKテロ:ヨ5シ#bDp:"゙ナi マ[メFヌカBロ@ Fユハ廐レフ GロホシUX・ヘ-禎浴'3Vキ7ニァxy寒ワg゙膕ョ捕スUwヌ:r葉Vス2r9侯8ナWネマgチ'ヒ/Mカ悪詔Z#偲・蚣外緒zP擾B駛>Wt]゚◆haf=睨ァ逞 メ[笨、Vヤ}コチRン;7CD=C憫ク攝簧't&s[ッ#「メミuヤ。ユトw潛oヒノ},{0q!セヘ>I?タPワオEmッ辮cメ;L!|{阮オOセ燹{ヘlァミKゥ3ヒIq喚モ价フ続)%ラマ造軋夘4感Vpロ椀ゥN! キユ#ヤメ「Tc紊Ap嘘Gシ*0縷/*e磧鐶/t咄艨 シミ5ハI$シミ肌リ:ホマKa:Tヌ奈菌ー]ホク.cョKPZ祟,マeヨネO┸ルX培s\ E uQ゚ア郛誓Kレリ3蒋サOd忤ゥ\ョ%$ョスZg古ヲ耋\g・ワf+n~フ o鬧シ駅x>gソ ュセ$・"ッXユ=メ駝|oセ}С邵磬ソPK紂ミPKma5"antlr/TokenStreamIOException.classmPヒNツP=SZ@J}lワオ籍ミク1囗. /xモ\-坦諺蕋M\~拝.Hh&厠恙99sセ>ソ ;h「蝣み:ハリウアo」Mー!xテG,"e「廐ォ吏\dハ3B\i評p#ハ玲AZC・袿j>暮XLF恆Y・3yュ諄l椄eゥ Aホj」翊テ°コ.ェー 'BgI_叱ニテvゝヒV."f9 {(:侍A髦N4憫 ォー尭チォ龕攘サミ゚Aーz7膿ラ{;9._h熕Q養 浩PKンァ惶 PKma5+antlr/TokenStreamRecognitionException.classQMKテ@}ロエIcォmjオ ZP4?@"zl驀[mw%Mナ衍<Q稷*^R賠a賚シy3;歙zリuー炎Ux:ャYリイPg(トbャBスヒdラ:撚)y8レ9f0O"O[カ 3u#ハA$ナユ|:&ユO礬丁ラjキ.C薗鑑Wx,."]~8PwBRN*ア柤`aロナJ.l4\X(コXG蚊h9?=匱lr5モ Er)f3メーオVv滾Mo梼タ靦l蒟ネウ瘰]K藷」:ンリr晟+檎批jル 1ォエ萌(-テ=Hサナホ Oソ,3ナdヒゥ簇7PKc`lc0 PKma5%antlr/TokenStreamRetryException.class;oラ>=^.f.vnvF6嵶シフ;Ff ヘ0F錮乃F~淮シTソメワ、ヤ「斉、Wp~iQrェ[&#帖撓\R爆濃RT饂像ZP貯洫藍X綿テタツタハネ 沽W担、輯-# ク$"ミ タА$制30ーjmg`ワ貿箪@ @@ 實PKnt眇イPKma5&antlr/TokenStreamRewriteEngine$1.classQIK1}鯲8カ纂]+ カn」W/E。PュxOkミ4#iwトc/"xマ$oィ−ヒ#裹ヒ聟ン=y県ミ┛$tサ鎬ツBニナ帰C M襦ヨ&F欺僉;瘻P%」ッn鬼-更SR影d饅adx+RIウハ0序ロeー癰`h/ーQッ妹゙砒キV昌ヲj*[<'ワクレ7ヒ「b"ケCェヨuEャヒH`霍{#ョ {hFメナキユノマnXhndィ:~B^A)。ッユDヘテ(ニhz/2トセ?。;鵲~>7。fーイムヤVep、レg ャ@6g(t`ハ)D+AサュT拘!ゥ饉ー髯$ョyk」'器顏<ソEァハケXウ野ケkリ崢廛罩サc7ャニ予oL|ヲB^唾ヲN擲ュ4)ァゥイミ>2トPK:ラョ6シPKma5-antlr/TokenStreamRewriteEngine$DeleteOp.class請Nテ0ニ?ァi宕@。 C% C R、 、カbwヒ)2クN8蟷x q `Kwソt゚ノ迴マキw&h 艶Fエ.撚Rルマモ{}X>拭g、 ンヨルゥ彿V棚Yロ9ン(_Nヒ'2gI.ニl淵kSーe(W2E]i惴ルoンス+メ蒻n)cCvィeUQ%ミgニ忙ホラボ繞F5剿ヲネリゥLqW<{ #hrlqユcz|カ ^!^ヨヌcョ冀mlq2リタ&ォ ホロ袷PKQTPKma53antlr/TokenStreamRewriteEngine$InsertBeforeOp.classQノNA}ュタ(滋ク .チe>「 船孝」F 2=逃G(/&F~eャ1齢、zユ抔殪vー咼饌ト1牝82!垈0eaV ア*ラ LoTェ7^:mゥZホアムョjァアイラ 》ユUtワユI淙z脅tFル壕エ4乢櫛魁リ 5ク。hトサ%ナ5$手AサU擬lXイ1小宜イア懸[ゥL[;ソ鰊+ェCレ畑駟ェゥ~ツョ(Eコワ慢uN5浙4ョァニ「TフPラd「ヨア」?{ツ朏゚'ユリ期Vフf・@ワU 6u鴆゚#ヌCN \。ォエA゙UネnスB<1 qLr!pホ「 fF`貪嶽f/<{モ]r」ムS倔彗ヌ$ヲ9 }PK/|ヲ@qPKma5.antlr/TokenStreamRewriteEngine$ReplaceOp.classR;OA<¬テgPZ`,4喀b4「ュ 警z.幼Q~剥縁トツメツe愿ニh33;3゚キ3゚ヌヒ+"ヨ「c*篝剋bウ"珠hK5ゥ' ,Åカュlス#席Zヨ皈シ%GェVゥェ=[オハケ≒^サI島カ「」u攵址;1ィG紺(vママノ+鋩刺オンt`tュ}E顎H^W靹ウ5ォS}、L5詮ニタ忠eャXネJ・ッ4 ソ^!ラ :vyヒoアi)E゙ン。察饑鰄%OjサュbΛケ0ュ4エ@|ーパ%7整Kェ)P罰ヲャ@跣UXクヌ(タqQカc|*`+儖n>C膣ワMカ\Zフpチン6gヌ9ワbヲD#o07|)@ノ_ ナノ1,ー峻ツ諍ク0$F?PKG相?ニPKma55antlr/TokenStreamRewriteEngine$RewriteOperation.class}SYoQ.ロタ8J+.ミ筆p゚hk-B%チ「・1ヘ.8-\ネ0(?ハM4ムト熄2;簫}ホセワs耆79廼フ#@$ャHイョィク殻R メ*Kネ装コ怱シ 0x ム稍VaX|l1┃{;=゚ユE'_キLCt セUCヨ:C$Y匕ァ^Qtア゚ チェ!ィラ琥ョ゙闥F當゙YトE鉄。屮v屁庖[_:ナdjVjス?2崗lネエKサ}.ネニ゙o/マノH ワユニfE慯p<ミK i(`Uテヨ5<ニ=kvc杼逃ウ!劼┤b2E!ッ ォkラGb"ユワヤ-」/Bモ*ュ"7犠}8腓ロGュアヌ峩wR邪゚癘翡ーpd:賜<ヲ「チSネガ紲イア,セヨヲナ%+rセ.ュS焼タpヤN|i逡 驛-f モ兎ヒ0#゚?ヨ ヨ蜩ァ8敘<ン ケ~<uチY0#6yクモ`鳧\鴈ワ櫞ナp0L@臆Lウ9<#ヒD' 毎コzモ溂'フGxNs\、>U跳カメ6<6シユL(aテキ哩y゙dl(6ユッシ>Й筰ーF ウレ8Zア1Gシ婬o#X/猊KアCスユゥァe錏驩J!ワタ-PK鷙|OEPKma5$antlr/TokenStreamRewriteEngine.class抔 xTナg_wws ー% nX券 ネj0> 帑&,Мワン( *U|U+「"JDエTEナ* ナ糺ルjユェ}hk[オカ・Vmzホヘ跏hフ撕9醂冱fテL9n怺拵T`雷セ`r'董)リ#1s&w)クロ 7睾コ痩]Nワ浴ア7富@チ/ワネナキハXDッムノ\珮y、極#LUpミ国xワ'$3<・熕 cY匡ケ佶3L枡タsL樒U^牘匸、瀑7寄 w^ex攸~ノ膰L゙`゙莟キ仂ュ7ヲaw~ヘテンw匸ヌc3ュ婬ソ3身?箟゚)ス褸怒x苡n|薫傲Iチ|ェ/ ェ瀰ホケZウラエ ィチHD勤iナbZLタオHkmユヒゥャ`$ヲE羌オニ蜜ネY、 3N究粃毎@veーjYヘ4ヒォォ譁. 冱Z"アx(_jnモャエソ2キ|^ル竓啼 -8yQY蟯ェイハr⌒兜。sC%ヘ。HSIu<4ヘヤテャ ヨ,ォ朮ホB剖ZヘIタレ粃謦角,N gkエ・)ZMウルiウ複VJニukXF0メュa楠a"ムアxT ュ\A7GK$[オ$ 畊}(レP姦・ヤキ47k宜IxuksノpシZcm3テ叢⊂"y当リ豢4hャk8「Uオュョモ「5。コfj2トムヨト(瘧&・ウム悌5ZォTFチg苣(iWェ'}mEOコカ慂lア0瓣k循Lリ・榔CCMKuK4ョ5>=亘「ケホPi"; !$袁3Y![ヨラ「c{i3」[鰍SPェyEf.6gUテiL@&ホ@ーH加、†囘ヲO?メヒ4y瑾羽「sEF@No冨hトQャ瓊。ヲ: 。韈鮪C敦svシtメ9螺+dBS否テ毎コ:Mハk9ム0_ィ9ア6e5:& ,渚7孅ニ[zフNニ4、フナgニ[詼umM=k #c*フ2_?寥&iノQリソ餾yァチpィ吁レbメゥ$ョ<GオJkとFZ?il詫スWVンタ&&FSォ[レ「レシ0gム・舎ャ」&ャbWア ヒU儘▲ィゥ]F\ァャ-祈Ckルd幌至ウ#イ=5??[A譜絏k オ5ヌ|ョ ]E3ネ%Cフ「Qナ,クFナ?/Goュ穐 。 ンロロ*セニ7*bhSq.}3カ  鮮+ュェ隕エ┗│」=0ナ騾n。2IX菩6kア榾ェーウュメ託ナ溢}Ea3ホWq.fr w7ー+ホSアW0ケI.)/ウ 幃、ベv[c」Fセ Wォツ〔U。ァ危闍ネf&7ィツ%ワ*カP庸3Tワ渥=ロ-E#qュ王jヌ6Edェリ.イTワ*イFン.J5)Y 鷏R艇>≠゙G"XVンs襁トワ亮テ匸Lミチu屬lリレFLKO %g雙w梓<ウ2ァ(從ラカr磁[!-ネ8ハj艮H沖 ュ3菻」ュィトレw!ゥヤ悚ァニ tu戴ト督ヘI:3uT「kォオPエ~タ"ケJオコ5Dァィ%*3ーァ(hj5ニ&Oua9塹yヲ廳0ン2:BュュZё 晝R笘ーg]゚ニ?ソ/P軛ォl柊ャ3ャ?Hn 7ニクレャ8侭q~ッ1イェ椈Upム#p1孟モf)=N,8曉オ8#ルマヲ冓gチニe∪m\rdメ[*<イ・イ"[ェ1ヤN!lV So<ャ$pΞ殳B< ゚U+下蠧f8)91dir:モラ 軌ヵ河gンサッ 尺N(瑶態.クk :続*}gメ\57帷rhrャX囹キ・曝 「7潜登.Eョタ`\Cマ-Gイ[17aカ送レ E撫gE/ヤm;穆タ ヌ馥?"?菜;0リ゚!カネ「ョ?遵エィ・)ロrイム等:盛ュサヒソ7ゥラXr p;2アΣ久レヌ;タn披.フヌンHッ エミソ鉅+ヤオrrウ;0ヤ頏烝沿鼈Sネ6j'ーロHヘa2ト[U|yr、リZpテュHク1%/W「:ネg暉ス滾a嚔!ョヒGQ揉B嗾 ゚ィネ」p燿エ% ユ渝j「x関、レhb+ケアタx僕}N"ヨ$「.噐レG#ェコ0イ鳴lヤt[qタkロ肖・v梢ロ剏/リ` ;ctラニ霑爵pmエw~w」]曄睿Xn-ux・絵)>6mV:レ騏縉Jヤ` タェュTJUトヌ6L,(45ムJ據`那ルキ5$コ)sWb8ム#茴ッ#,メ拇、pb徘眸L早ゥ_.イ0_xー@臑択"$#"アN1頼Qリ$F:1[hセ]孤mb")8$ヲメ殻ア.yシmャコx(28N1MBホ7∽d2 3%dスГ-ヒミッリw"2ナIw℃屏聢>[9aFフ\クD9構リッE#d>ゥ柏,ウJA*%茯チ-$起萄屈エテ戸サP\[Pワ暇}綽ヤ K,ヲ窓:Nミ頭孛d%K堅モノオウ! cチ嫣ホ"フルウホd?#f9aBc砥|La4音Xヌネvjs(ォヨ交レ:1・ヌ+-ツy猿<朴故Kケ>ムxm&6テ#Zidc2ソn[zv倹v\B.5 ネyY#!吏ャ筴T5ユ9 %蔗~"キ"M瑞+註遨:&訐B錐re_HX們ユa?I櫓キェy8#*\M#zID"+gセ曩sS衽チEl"J"3忙!テ02 1_OtョM帑W}V_Jm 8゙クツV華ャ#4/ホャ.L」崙ォKm[;[チ.功リ+トミb汞jzft`贅6ハoRlマJ-舵ツD; ナvK4アデッ;囃'舗%ラァmサ.投h腑#dキリ5ニ毳リ卓75.Xエqヌコ/Lキym鷹オIl」m(チユ}5ンN%イ+d;橇(#v。ヒ」、^修トe^嵒a(e'JメxニmtTギ設ン/y化 モメPッモKウ笛圃7交Cャbbッン躄メv瀧IGゥモ絖ハヨ卞コシ.Z_v;2シ.ケレメ康ツスツ゚iロA>}゙O j/ユHリ廊'j韓娼「 補1ヤ管ム$Qm~トa\-棕休Pシ隈トォxLシトxWシ焼ロBシ#=z゚ソ/<1F|(&QソD|Lォn9ZXヘ6nァム#uzO7ト~糟アスbH2$ウタs /PBq祠埒1y哦コ<ホ5#dタ菜ェ3マ"f`/)a科ハタ3ー[P*恬ねKォ 7ネ゚b郁ヲ騒苧當6D伎B9?/N6^}e゙イX1リツuf ]黛ス+Bイ+{{kbQ親q"ラ"゙ Bキ'eM#I,kXS)・ハ、Iハb>K6ワ膾ム-゚=+Zニ['ウ4\ティAHテ6イ躬J$0レネq僊 z1ェ%゙qスワャns!TyテミNaナ4菎慟脇サアシ]オrX姓フi7ョカ0X沌P・3hクロfaJ迦Iフ(リユ0= wQ4エ@雀レ狃コス]ネル{YNeJ皃 C-hァメoFtt'ーュ嚔Vリ4c∵]zケlX9←タム セ \ロラ\邑Es_ラcFκqNマヨュゥl縺c47ゥタフカセ葩^治イナ ンrvクN>-眛Dm[Gh?@ナN馨P^X チオ %ワ&1U'X;H禪(S島m6懷4S$ ソIセモe但'敖u.O9テ5鬲メtヱ% 碓xD朞雅ヌミカ紗Mツ=ス鞳Y_9.?ツタサゥヲnA参hs<碣憎1ヒ'IエGメy帰/ソリ!?l>N症$Eヘ" セ嫂前捗E,6Hエ執8/@ハ]詫pア橇j梺_壗Yワョ!ワャ! H渾ナ6xッヨ<ケクヤ リ、sカ_w凩QVx鐓モ鵄ワemヤ剌ヨw%&nnソTーッAE. vテリ 」ネシタb`5mタ]>xp粋T?j;挧モsmァsハスhカ{ォ袮ィgカc{ Y譱クtヒ\猪aスn;Vcヤk[n狭サトト;鵡ヤ*c之ヘゴ,クヤ鰰?Yホ4-%zGl 優害豢f葯YWカ0避ヘqッンV:b:"ミt┳ヲ# !ノッVト )w^晶SEレアJロ裡r:)「」~ ラбコ戞「H伜0溺 咄qエ@ツUi」 qYハ$頗u(PdC;蕃y呟甜┼ール0ユ,O災|6/bメ+[KチヨEツ鯉%(廟メCツ祭.、 ソ蝿ルス/sK漁齒HLフ6%PK\エPKma5antlr/TokenWithIndex.classuRmoメ`=oju矇dsタヲャ瓔ヒニチ%&$ト/#cミォミ,f'?h「辛Gマm 鵜モワ懃ワ懃カソ~ }yl \]タ2ョiクn m牝魁Uヌ 峺寺k:"セ・羝ト;:基梟(、=ソ缺*ィヲBヲ碼^クッ清*マRAヌUXlyセlヤ?rm遞Gfナjガ:忝マサテp顋ン]ゥミ゚サa3隆モ1lハ30 V虱0 醤ァオヷケ /|uゥ迦ト%ャ L\ト フwy2:9qシヤ+H牾Bbァlb%Z)ロ サリTミキg「!ュメqミ綯O荷欧? 娉ネニAミ~エノ!:メwlC:CナVy]行lサァ48瀝Rb恬。c枇mK_ホ惑ホ9ラク*1ハ^コ: / $K!イ他0賓!ノミェ{て|A椶ー詩iA匸dェoHホW馭採lN3?_只4堵"r拗,橋テツカ~ rU.3ヲ/枕。$テ`ネ搴sSア謔芫qセ$y愉ケマs'影ニカPK\ス]oPKma5antlr/Tool.class杭 x\U>%肉2yY:mレヲiレ髷Lカ6v》。mコ&m陞箕等妖レノL丼 挟 UムV!。彫(クPAY筆タEQQQQ゙%妬!ワ;゙sホ=ワssホq"ェS>讀'xj>bキ4モ、.ヘ 紅J?K袢<+擘q阜x4ョ明F啝i熙ゥ庸ョ4、iPイヌYイ遶シタIx。|,呈4'湮坑ーX%Nェ爻/敷卮gHソワ イ*ッtメLn断4ォU^」Z'UZ4nVL'ユr怖ッラクE綏a゙F際T>K藪NZネ屎エキィシU蚓Nj費モyサ努pサ努.uカハ酳ヌル"畚'v~槎サ擽歓メtH纉9ソ4ハ扼:Sё9ゥサU8i/Ty藷レxッハA'miBメуュW胚・秋ユ8&}\繃N゙マT>隍si.疲"i.VB(/QR/Sr艮ミJ扮メj'虧T、ニラ:)トラネゥ]ァァ忿=゚ 貔Q唹kルユ!'?'ヘ5セI/hナ|h|X#I/k|ウニキh|ォニ_ム6坐ェM;4セS>9ヌサ4セ[ワ」ラ5セWエサO蚩H?U蚩jワッN/ラィニ゚メ瀬TyH蘢NコE儒fホィI縊 ナ5Gv0,&yP紕ヲ耡<恟坐ォd;>¬:ゥ欅ネヌcメ<.?疲 。柁O鈎Oic"?疲i此ムY!yN uユo殺。ョナLコ延#シ!ミ0MlbチH朴8ク:m濃キ7j1チィィヒュ xサB疉,煖29e [ナェXキ・オ)?穣ニcォ+闃x{zシ5□)扠%鋒}&テ7ラ) bFト幻E湖タ5"M゙ィム ユユソナ&盛ヌリワAワ[ !%QX!sリnr`sフ衆Qモn ъィ睡崢。h,ナツ z2キ"メオヘ cェス/ セ ュWb寫ヨフ[bヒ腕*ォカ16ミョXトl姻t-゙ル> [嬾述俵ルNbv囹ニ6、6;エ」キ[l・68&}トカ ̄ュアo。jノーz9ニカ榜罎FLエkツo,&3ケメヨjヤワbアナD_キ瞻恭賻>1'フ1B1\Pケk?トヨ"R{Z婀僞ツィarョ>3zM0・?シ1r0ヨ埼ッ冓|6}ЦO'?]3禿ーb・4儀"ァ#iロワ,エ旙`k 掌ycンLウウノフ;rー閲サ,Ym5lラヒポニDワ 睨ンFー]74) !業2カ U%テサ ゚アq ネ/IセXイイj4G伍p$<.&O*ノ糖s打゙ wE肩`Tァ7 レ(跖> ョ2zアIoフ@タ蔟z#Hヲ)肘udi[1?。0(」ニf擴カ「燠淒MU'緡ヲi!ヒル虐ンカ Yメ nZth7|n\W.瘧f9。Xカタス草鬚椪。uWモ」#pF[ レp贖雑|・cセ$:贈モ/餮:榴i桾9z_ァモ$SYツtヘ!(光 o瞬ニ*ソョ H畧Sヨli゙イzモ竃tz憎ヤ%2ス「モォsi^導ラe相SbНoミ/奪ォ君o<ヒLnイ8カXァ鍼ュoヒo"ツ3-オ2゙ル);5`。俚#オー批レァヨ靹w{亘ア;,1:ソEo鷭Fテホ鰰$6|_?ミt#司B E它oi?51ヨンムクッロ5マナ-!鏡-f・ネメヲン→;窒ンX(。ニ*Fレ鳧'S7千C^ム"Mヨ~芽┣YルゥA ヌヷ、ナ\ツT沱リ溟fキ?lDCsbn"Kトマ0紗弘lマ:3テ櫂孚[匁/Y,萍犁ェ゚*ソォ?:救ワ稚宰3ヤ=キホ9~[・諒ヘト!゙<・ウハミチbYアaKヒヲ縲(5&Qウ5B鳴ツソ腑Kビ!oiトi)@?HNぽレdB!]aヲ「XAソ@ニr繙;v孳.Zァウ:+ j環1#{Iр・鯖Urt%Wqタ8ヨ#fu%OQuESuナ旺T ト乞T6スム閹plM8ァナナ&oHシ, rdト・゚ィJマ+頒K敦悧4iモモH+]゙`胡緯」榎ri$紲ェ璋怪9リ彭X<ウ$彭Yd乍 wtG4qkィロト+ソロフ<ヒリ未モ姜fモ ャ NW$ローー*Eネカ$U)ヨ91階Wニ*辮C%j「V囀*・TeショLP&鵯2Ihdェ}ミ瓶ノ3絏 恨ノワ殆亜サw.」逖ネPTユvランノロ遇セ@蹉{<-璞w#6ン゙`01e[レラ1-ャョ` 炎ホ、ャ`リgチGクモ"ーッ&*LgぽotトサワAo<ト講n3Dカn闘 d,"タツク8_タ琴fサアヌィテヲ1」Β`僣lケ瘉\+fサ{ョ1ラキラ2オ蚋Vオ^w式LAカQヲJカ帛惰:ワUa閘HFゥt珪ャT wzXcW柿杣)メ? T cモP)モ双bm-チu~yエー陥x,エ?JHヲコ2S%オロサ'/Cュ_R, sカネ#+U*Uコ祺ェAkpOK L扛・4PjテLzネ ネオ」。O &Xiンャ:O錐9Lシチ>タ。+オJ攬ヤ玖|j ヌャ%~縟:シ~w縲Uノレn-GiVノ Eョx>.gネモUeョョフS・Jハ 7v≫#6姑lTn踏-Yョ鴛Y'荐R愃ヘ較貶鴇=Xキ垓マシCbャヒヤNゥWp磋W.ヨV%Obアー5f刳ォZ^ j]禅ク。T薬F 厥"+VケM゙r2/セ」オルシム痔ト"j螳&\燠モ(矜リo-m邸暗 G1 捍&モG$マホT ンR<禮w゚メハェ,フDメ2ェ41e.ー[Fユッ&_モ‡iエ、扮vイ7qx・Io)ー涓胼P,-啓t[リjニ ォPD.顰ラヒラ[+$棉ヘ-。5n。N[ヌヘ{:ツチhR」T。^Q叭セM<ツ *ィ-エマA7゚_cュ鵜O*=轍56ヒー。|CヨccZク'G?Yマ茄w"*ウ檜イェカ:;8lYィ歪ヘ!ソq@L滸1 N購Dm者ハKiチ腓X柢~ワ|ニ!マ>7秧ヒ0HP["&エ襄%PEo・ネ旭闊ヒ牡エェ繙・wSV サスムヨpト0ル|マF f~`ホ・ユA」ヌ\&ォョタE.2汢%ヨNb<_7'fA^ネス~ホgQル_$] b+ア板鬣#スHメ4:DOムハ隆蔡)1レ\yB2ソ浤g9ウマ仏$ウチ/舵"E{%幟ezナ_オヌnッルッ嵜"/C+捉~IソBk|=Z. {rH&G喀iW゙ゥ杪メレ蹴9ィタ耡ィPヲ<<@ナメ禰3ニ;@. 子CT:@胝sB;膂イI蚩ミョ`V`}7l3リh6婚*唏54譴抂 エムiエНe#xモz|オム2レE+ゥ禦 キヲス イ墻歃*マ\,、ハス、g「=ス腐エ8Sエネェタ廊メMh紛$鍍レキウハヤu'レ?e瓢)\エホ*オ(Sjレソミ;64叶V」ヤ:ヘチNa+ヘ&?_ケ,ォィ>Uェッゥ、ノGh<。N1ナ5u折ユ h]モミ メA嘔E}LJxrンKゥйィソ躊/fNbi,・lMョヲ呰ユC4kィチ照\ntpc^ ヘ>LNts社蚤ェlO(Q%JXォ醍aPTgPヤ研(ヒ5em敷ホajヲZ+(eャ]斟>ャ5)ゥy#ァ<ヅ)X!C縁$諦 訂 -タヤツレ!Z4Dァ5:騁 4カ怜ム!Zメ7&t4ェe25ソi顳ュTSセBy}TR枚ムRm趨}ニ]TヤGオwQ^Yチテ鉱吶9骭>r4門ノg゙ ュ0?ヨヌム列ル|N<孺哀シ叙ナ|_Iヒス映。=郁ソ!ホ"*」ヒi]リセ噛駛モ8(F7ム5Yコ拵・胖5:AwID、゙Oス孛&WP?ラモQ^Hp p r;羌飄ミd呂G鮟ミ訛>@゚迢餡セ鯨ュ罩ムリ5ルヤ滬/ロゥォ痃9 <必'」ヲ鉢儡マ!x曩Tシム鞜」ルを啾Zユ¥ョnナYュYfmcョy*諮#^夙椙ッテtf;ご`椅ヒルオエ9ィオ1o6始lTゥ 「ハヤ!:ォQヲM82 S崘i8ハVHワv偐=ョΧkan %g'愴S奈%。xカ蕋eヌ陷G鞋Fg儡fマCァメ2'艙ヒ怎ヤムGd<チYV粋d」笑>ユァ囿6HLコ、Q,oフORIフ#wレ|リ橸Mホ釛鬼コIiロ@0蟹?コiッ畴タ策 ルァ|yミセ#塩bエ馮*烱メE烋!莚 ッメ&vテャ Bクヨ・pィコ.ェタ%s閔廳承kサ鑛V餾ヨ-~: Xe;ケ訊.笙=Kク管@キ2n誨シ7鎭゙ヒ*航盥] xァ TIマハQ:?a扎 ゚)壯ホ<<擱 Sリmaka>チ9ツ参ャヲ「ホ?・ミュ・ルRヲチ孟&フIニ(w;F[ォ剖ユユケサ饒ネシタ 班#晉モ^@cy17ロk>0蟷.霽/コP~衞ッ釧ォ鬼ぉ衰淪H3ネノ+ x%阪UTホォaオ整dMエjF「襄`マIヨXw尹ya,ラヌa?a鱗・v吽_nWリ夫鋪_mラリ'Gリz。8I6*盖h"o"7oヲJ゙Bsy+敞ロヌロi6゙Iサy蠹mタU脂j/恢k!゙uンキ鏨9K<ヌ署冥、,晧琢吁犲~刃8BeFg只4~カィッP$ス克ムgh0め|カオ亟r;キム-iセy8Dセン[゙6灘f_H殱}ム商=2J8ソt?6Z唖゚C浹ヘヨ齠\藁7イ所ルt"h。癖ォ8Lkンハ1:九エy<>H_@]|ANルヒァ絳蒼.」!セ悚G孜9ルチyVBq鋸クU慂婦必"(`$Kミミ}y0ン ミセ%ワB"{レg)Y」ワ:D_ノ 罷qェ7P)゚H廿モリニ。'ニfサゥ)Qf キH9Auлムm(%ZhJk\ソ*ルテL66カV嫗7虻|DLンD zキミT2シfj膽。A>Pホメ`晨q58コ塢ネ盈ン゙GrR犧セf,bV」|Us、$ムo&ニ%オ」tGu施,Orヤリ:Tスwf籀炳 88 ヨ∃@/ィz\}ーG覲ウ" }嬶セ ミ逃ス+DGノy&sI9ナ\,シ-ホンュ0疸ク0閏・ッテォ蟋^M磴雁煩LKソ梅鋏 ミ絛珀k@7=YノZjキ]耘ヲレ=衂Cヘヨa麸/qムpZGアp-o メ}シzeラfョ&わリメ[@ル?PソM 'lll2?エWkネフ 螂誇_拂ヲ挫フhxG、ン゚テZ。\ヌfェl゙h筒ヒラォbC&弼セヒアト餽]#那6'ォヘ。ノケサォ鍾Fコヌ戡」.@DヘイUXJノsXキmk4<椢フM,ヌdホ =\R{+.ワI謾 ォ[SエBム僵壺アJMTTェP4嗇8iョR@ 摶*エF)。fe オ).レ。白OOンハ鷁&R\僖(蚯・2勸U*關筥[秘t2~e而班ロヌ3ヒ)=ヌメ#ォxN5X&$clコヘ堤ゥ 5得 *椈、_hネ、襲セX2P甸2ハN)羹ァ罰2繭2薜S「9I裙゙)ョ。ルハ:h防"6DiN新剤鈎ィsO)?ァ!ケBBGゥC|オ疆?山G雁 \讖破&F+m髪廢ナハfZ、l・ハ6レセWiコ薨.。<モ添^C4_wmッv挾8F ムT怐弼N゚G蜉疫rイ,喘"%<タCヤ)戝eフU/PKma5antlr/ToolErrorHandler.classu就Nテ0EソK!P6,3タヲ9@6)エ*オHー$V簇アォ苔ツユXpUaCUウヘシkセv8術a pア%6#Fmョェオrw7S2Ns(-ョヨ齡ルЛゥ%龍!_!Ok2U<ヒ隆p'@。z}#゚障'Qリ胖PKgcuラ>PKma5antlr/TreeBlockContext.classePヒNツ@=C ・オ橿<茖hヤ`1$jLミD%.ワ櫓オ商ト_ \姨H\ヨ~非カムー鞋ケ迸{踝|}|ィcKリ5qd#ォ ァ ッBニz0)((*(1$],NmpW\<ンx杼` ア#ヒオト1サンソc深^3$レ麺ッN練ウk1ヲ-ク堋Cケレ6]a凝F ィンzソヌマャP、縱~b{スヌ也 2T0ヲヒ:4,(cシN;3ワ7ヌ崢stl「「cI|羆:「>qeエ#N ソカスヘヘuメ<ヘウRoh|ym惶犂vリ嘲7唐eトヤ-s'・旅砦*j<@=瑾Keィンb峇。W!xXト#sムィネA孺D7[エ」照巳畳>'ルOサ1レ{i ト'゙}ーマ5ノ<$ロ髻&ヒ コ "ッタS7^$+:林ウvY縟J-^掎n ~/>オス$餉ケ、Dム、WtォQ**ZURロシUエ@x)QAG.C.vあI流3#ョホ &0D7iスtラQコュB?C e1Bィ{0PK韃樊PKma5antlr/TreeParser.class・V]wUンモッL&SJモ([セ$MQPСP(Eェ・`S(アL妬 M&a2 ( * ハ禧+雰Aンwf囈4]>゙3ウマケ釛樵~ タJワVニ蚤ネK&紡「 ェKL,コXFト2テAト=8、eLA5 0トSハ4ヒ0d`ノネ*8ぅ 斬ワ 訛Hニ嫦K,2゙,郁'I ゚}ァd改@ニ^j:#罧茎|、 $エチy 朞゙]==黝4テJ和ラミ?榴ラK睡Lン7u]ツ\M%zヤ刊血、ト衡ヨ街哘リb&忻jfF7#5S瞬1ツテJ埼Fッ網3絃ッ逅vD '4c4アフク1J浴ウヲチkkォオR貂Aィkウ M-ェoムモヨA Rキ━失キ6J(エ鳳ム刳Q\MOワミ{ウノaンラエT碕」1ミ:cぷ(/。>殉DЙXdbンュ・2ォ ヒ\kノ T&5+ハt3)]ヌ「L[%ャ ZツOヲ睿q+「[鼠MY懸ヨ警妃,ィゥGyf1 >SOァLォヒ4Sヲ賈H}z45ハR3ット詭LOJリォヘ4h窖贄6ヲラVヘrpス択斐セ=楹J詔=髪ケ蒲アMネヤガ*>テ鈔蟯WT|/%ヤ澗ZbL7ロ[爼殻闢セR5 w 1イ{J+U僑ウyx!~JフワLsカWtナツヌo彫.貮ケ゚+a芋エ絨I62楹$S甓0e'L<゙ノl%Sラ直?鵯郎斡xKp*|Nミ]V<チルゥヤ效?X極H98w`卯ォJKァu#Vz讎{8MEソTヨセx8ニ冤,/Z「AスbTゞXm{隨リZラホ袖wN:Hb8ケセネキm(ミシ)eQ~A.怖ッ篏苑ラ9ィ"゚rヤ zャ@3UIリヘ.Bし2x゙ョUカqラNl)ュ(ョ瞽ュ.エ卅e"wB沸Uナ炯ョ戍シJ1t#ユwイ hュ袰侶=D}ロ|キQン T nタユγfオ-(ソ恕゚ 脇;u「∋,・ネW"軅7z\m.y舖ル!=ルラヲネ_;;テ馥ゥwN皹_袷9U|E)ソ銀ヲ須n」Kセメョ7ムrィ79ナ, C」0vリh叡_s渋ケ{f>2エ0wナ4ツ}ヤv黌旒新|?!2佗lママ。M9,(チ1F$h<膨當9メ8LK?K穡,vナWロ8ナcбZxチサ蝴ぱ皓b 祕覓4晋コフ csア)ヨ4-{Sサチui,h「W-ヤNナソ(ナァ$鯰゚(ユ1ナO&6馮」嶷。さCS}゙ナル@' bヌ騁Aミイ゚マLルス灯ル/3ヌs綿悳慾8w8gナ6F踞&SA"=#Cラ汕:'磔ー5フ。\CE*PΝーLK庇(隈ン/="カ゚:ト:?ャトt`#O迄fオ啗Zウ ヒテ<霰モ.k#ィkXカエッQク筵ツKb猖ノ ez x%「b]ネ-WYk! N^フ嗽粱儺g壗$セ僮オヲゥkト:gP モヤ 9オ擶_評vタ`?「殪ルbキy'脾OН[圍ン「カテgイサチハ?PK」シ,ァクxPKma5antlr/TreeWalkerGrammar.class}T[Sg~vウ,渇 レh究$ =T ヲD&=mツ4ールM7サヨコツ起gzテL 3係ス鷯ロ釡D@;凩ソ7マ{>|゚ タ4o`&k穂= k@ヌク.チャ$s:n爬[誦,`Qテ墜okク」aY'サ"サ剃、GV・シ{*ヨt犬a]爪WQTQR\モnUキ。@yャoョnラス fmゥ匈Lサ)znンョeヲYnヲ8Vクt≠諜%慚ラmア7ハツ-册笈VカpM粛(!5;マ*「鰈サ・稍5ヲp,ウユRp61y<рチヲTDォオ獰ー=j'棡廰、%シBBチ+gGリルIッ゚ュ亥コフxク アiZ;ツ]qヘFテtァ・列ヲ8.xM <4愈ヲロ.ロ8ナセVш'>チァ>日溘 PF付T[ieソnm-K*カ 、リPマ【\Rミ[5ュ3孰ュwれ ソ蠻ヒ".u緕 T2Pテ齟lcG、ウシYV髮&Uヲ疎*イ虧fCV}エK救オ*\YU譛ウ泅V}ォzワ[ヲzョロホ」m緲;ア Hンノ [タ4コ+祓cィ;ケN+ッa?vフ困・んD?2カahe恍ハgォvハロ「穹"ム}゚T゙褪ウ合ッq浬;ョga辺K臈C協@螯瞭389ケlvミz架{ワ申ミoノアmzハJ'OZ緜H.移g6崢゙R>チ顏裲 塲h榠Aク,D.<ケ^~ワ"o騒"9伸・ァ$C/ムs`0Az !メQィaDッ*zモ マH知フt澗ネc'ネ碓&滝ニ)垢ム゙滾ヘHヲ貿bナo飩豌コ1栩#覲3ムセ6ヤ辷%テ/。1zdシ }cRヤ%ョGルョ B喃zS鰯 ティラネヘ`盟,e 、款\ンヌ ヒ Iど諌}。.IRB^A7eワwAE*J2Vナ@ニW2ハ2セ&゚(ミュ穴━5Dヌ羞g槃wKヲマタカニIV4]nK<ヘム屹ンf^1mモソヒ0郎xネ+8MホpQ3m^鴈nsキョo[$閏d2フfエW=g騅;W]"Z^ーe・エo=゚tlOツw Iテ蠎マヒカ鉈カチォ-ケキPUキw(臚 j Cワu隆9ヒ個マ!屯涅ホS前妨 +L^ゥ9ラ爿ヲHUルMヒ[f*メク*。ョbU|秀*゙テ隙M`Vツ槙0ヒp鯑ノqメ*~トO*ョc フ0u[リセケヒ障 ムリwZ-N&兵カ杰モmモク天。c[※テH飮蜜樟ロ。TFM」XmTェF餡ケN5l髢ヌUpエ$エU<∨0モnヤJ甲yスTィ7 ZセVkhユ|ア\y@>tロキワ\P鹽エC 飃+質黎f_~}窃 ヲ.4蹇,狭u+oワ ノS艘縄ct\塁庠XfA兇hヌロワ/8カマO法微r?モロS 3エ!9χrワ汗KUアモ7C &鏨iv伏ニブ稿ハb<ス=nS6ラ゚イ#ウBトイ$Qラ。)ナGuXs=マnホ罹0辰2ノ鰔|保カ胄ゐs勃 W體7Mハ(bG陝"v駒a0アwt^」ラコンCルCー?テ!D\,"鐵コ霎vп.オ辷柴?/fサ:ネa@ ホ7AH 腺゙)b碇・_感c、擒0&1G礦ノk 。ラtLトrォ胡◎Lアホ#畿イスar・}7Hア#絃┝河C\H]"I'サ=Dc D&S"葱!-「ノ此ヲL]Z<トX巴(謇"C蜴K゙t@.ハ トR罠花゙鰾2q辿!ムT汕棲捺フPDゥb"bQb刳ァ郭П鐇PK笵lハノ-PKma5antlr/Version.class]尻Oツ@マ、・チキHt&班B7$ョ.0ン喨兪I6衞ト?タeシナトナ慘w2゙s鈬ゴ E80pH02IォvF3ア抉ィigャ邸Mッ ユa、R-牌E露G+ヘシ" 觜h9 *。%フcヌЩ廛3鴉ァソB&P∪%-猶FノOG£ルワ橡」Bvャq%セシ rーンNメF uヌケツ、nP_-瑯c ヤ3濘nェンオサ=゙エ 愧ys蠏p ーJャ陶ル\ ウ7ラリfョョ゚(祥コヘホル馳ホ+閻/オシ4峡サヒイス/PK莅ホ'ッPKma5antlr/WildcardElement.classuT[OQスlサ]n號。ワ啄ナbャョセ@4<陦ヤnwノコ倩ッツJヤトGQニ9サ[Сメdホフ7撕33ロ?Pタ[Iフゥ佗8ア GY9茹yG嚆街吾ョ{*ク/ナハQ E$ロD?S%/(k2径<`・髞ャア箔愚ィ諮Lc"]蔗耽?Al,レIc$'鬲>i(ヘ賽エラ3沿9}Qマnソ乃 YメoLDbZョ穎禍卓!ム、蟒腴リI[ウfIホム?リ<。3dwaPKゥ}4PKma5antlr/ZeroOrMoreBlock.classuRnモ0=nキヲMモ抹^カ士ーKモイrソuBbヲJ"mチ?oレミ4ゥ「Oメヤ 靭 9I走m廻ル>>ヌヌ:タカUー"ヒjyャepWTャcCホョfq ラlェHC流コi4膵WミTp!オm9┌ィ巖mッオ瘰{[ケgnW0Lヒッニテ硼寳オ8;ZサメtO8ツ>蒼zp弥コ釣ゥGd゙シロ&コ槫$」n~茵xヒ覩ッ「トPワアw(^XメソAxk・ロ=lI≧ X是、Q>-75ワツmw4ワナ= @チC Uョ*#ォX卦嫖"w 。*オl墜キツTpUzW捉YU!~褐1:AカィQ ッP]ヲ1゜PKハY')PKma5!antlr/ASdebug/ASDebugStream.class舞KOツPマm* 蝪tkAqゥ#!ゥイ(q゚筵)ツmR虞ッ/petat2N-峅ア漁{Oマ:冬~スセh。哥 Z蛎))(+ィ0、讃' イ゙クfH忝7廣5\チッfS廐ヒ梵痴xp.ラ0黽%i゚xキ\マュiァa遣{ォ=アモ&ノN'JG」;トネc隶Jンコ キgN、.トL巫フ 7M喩。/BオツイPーニP峻z1clf妥C-n勾ヒュi沈rラッEp$咀唾ツ勦ョモm:」柩>=ム〃ヲ奧モdノ サーR葎李。6゚!{/詬ユ摺ェメ7 ユPD俄9l,0G束W"L>ト\'!ュ澣唄#N(;F%MBPK"、ヲaqPKma5"antlr/ASdebug/IASDebugStream.classU錆 ツ@DォナあK人d%ィd1^`ヤホ'Gl.<'D餉モ秀u}セッ73Sツ\アルi填|爰!ャ\/ホ蕕ヤ*ヲハエ -Iモ姨ァ%チwcゥMQツ:ラF戍xW m}0蠖:>+于6b[6溷ユッヨン鍵CXエオ団ホ'3"zィァ7 1bd チゥ9ニ?PK渤YイーPKma5#antlr/ASdebug/TokenOffsetInfo.class]衆Kテ@ナ゚6l囹、F=ィチC佞s*hB@。・g吐キニ トトO薤h<P$U^゙フ=跿タ&4ク]ィ4イcタ3ーヒミ及&$M毋ナタB=2ォ駲&、ィホエa取 麁叢FH~[?ニシ廢qN+耻畢~)ハpDsZヤ蛯_区7+OL(モbシ捲# ]YミaXリっpノ*/ルiツ: y愴苟フIシ芫Mヨヌt詮gv64zケ ー0 1I{ヤ抒ッPオ70`克クJ#:U@ OコM6嶽。M溲pメヒ_層薇M^ワzサH6レPKV3イヨ~PKma5antlr/build/ANTLR$1.classmQ]/A=」オォkiオヤWPmI礼灰DイAT(テアZ@リ1g醂 >Z#"NP競惧fx髪g|x gg2ト n・0L0L2彡PケメW?マ蝉メ&Ba%メ<チ`0禄マ窩aZトレ遏fx"゚^bx貰9^"テ,テK ^fx甼ラ^gx甌o悦[躯ゥエ啜 メネTt:JFオノPリ2Tmrq*j熊Sキ。ッiト*ゥ7椣ヲW5ユレ)タ'」!=Aカ#ェヲ掲R1ナ業稚"N*悶キ)ミQヘ'鵬 K#QヘJ!埼zr"`c6ヌ」I518:6rl/VユロO赳ツzニ+Uニg9 ?ー諭ルAロ>楼リ)?゙ナ{Ju8?vソ{lo~呉逗e蝪ノLLpW気Nヌモ、WpPト~フ*e-'オ e"8)-w6閥。ヌモヤ 旅ナ-Uラフ図ケテpqK8朦幽W訪ヘsQ」ィ猊 PE蝙,@!ェcィ3旅術'cJ3彦三Oネ~ a、OFMS゚F、!栞ョN6ラ A;b鑛ナーf蔗ャ P&舮ョ亟ヲ[イ、jRヲ?蝟殯ヒFYu$Q哄R閏コ1Cァ・ `d テ圜C暫r漲セ0Tシ#哢+ぇヒ1ワN知'2ez基929\ルフト頸'嘩躙?殼F J)ォレシ+ィスク0.ヨI オcyァ測ュ"`IEル魴WvJセヒ:`ラ的ナQcW怛ヤT:)ホ$嚢 ,rクチp姉(漕Rェ/ )鯆%矣マリgLKIム4Yヘ$`[rクT杁戞^^2孝Pwホテ&;f鴿4拍!{6U:輳5゚カyサムM{脊頸(チRァ嘴犠セlウd{s'ノX@Oイワφh默jフ蒔モ{iレ9Vム掠淦nRム、]-ルコ;キメpユK/(ム}トB 茂9Rヘ-クセ6\堤シヨ{Q'*捜`9ムfヤcオ蝉譛q;sI箙#ッ}ヤFt;械/\カォソ;サイ`ラ狎ルuオ9靹,$?,eアクLPソ摺a!チ着メBとぢ5ミz Mwッ紂Aテナ5ノ)/+nーキMュ9ウワq2レユ摘l昨ソsナDbムU`XMuYGエ晝キ桎ケ殖.」逃SセA7~C_託#"K9シTネ!ヌクDu+u銜J-褞Zチユ挾ls「ロRPッ絹ヘ示ュh5「X~カル訝L ー叡レLサ^4a+カcレv雍f(ツ欽」nユヘチ0q^イ軟シ壌ニャ糊~「 臾X同唆ェmuヘラ R[サレコRオ;h襲庄マ"0レsャ鉋V%Vボ鑼、ーマンホ「ォマモケレfマBコKケoメ5皆「{(!Jル>鬻モ{:@:>* 放,・*Jツク@ヘqヌq'N礙Dpァ;NS{!」Gmリ喉wH=)o」U ムKnх".iC8笆6#iS8笊6#「エ%aRo8メ熾カ・mD匐拮ュヤセ恨9 ツCキ淅ィ」1r佩jp3エHウンニPKWァ/ァ PKma5antlr/build/StreamScarfer.classmR[oQ.lキ薹Zェ「「rGュw睡MH(劫倨ロァu+ンナョム泙&窺|バセ[ャs・R6ルルウヘフ7゚フロwYeツ5aヨル晴)テ惚nノ壬[从 I、d、&+!'cw$ワ頻消。3ヒ5ァケ釣アュェer(マ犂>u8CpキU G丿 1テh Lユュカ冖リZサ吻聡ーケえkV贈欹rスヒUッ峨ケム"~_Yモyナ>jpウヲ6レ舍垳N6.ゆェa娥R@pトPmェ>7ウGAQA,!ャ`+ ォ3レ緞魚"~蟻oタカスO・xkむWI豎ユ2lKツ#杖DツS @Ccリ}アノ;貿 !Jユラ12巓[BZ 胎S磴hヤ゚ミybh Kトャァ}、ソキユカク サ垢゙エ73ス」ゥミqqム 7I艮C>jXmセォ冕#Fw5@キリI/ュ筍ュ ソエイz#Xテ:鞜%bーdェG2ン3咎チuB~ォテL7ル浬>イ~ェ 「絆_STレ;サ檣 ヌs藉HヤVI皴Wホ&フy滲>?c対ョLJラクミ=ルァ懋テwB @ e毒<ル0トe 頭R#M[ヤI:*aΔラーブシツ&e]*ソPKソ柾RPKma5antlr/build/Tool.class昂[ラ~スフ0LAヨ b#ョvhl「u5F$レメツJ$+1ヘー 0コフ拙Y汎&ゥ垳I圖ヘスカ肯ロ、腰d。。喊5Mキ゚キ驅f累 ャIyフ懶|ラサ戍狒^ミェhツ2t#*ェ審眦ィ姦0ェQ1*&`Vs9。爨4&X2l慕Xヲ< 梹 \Yモ*ZqJニi か3byPp<、煬`イ欠宏<*_ョ恤qNナy<ヲ爿*セッ挙ヌ-鉾d|Sナキャ{T憙<ッ`Hl^シ$綟サDHサmア\栴゚U=ッネクィ. セ/?aPチォ ^Sア?単eg$йN勒zGZキニ;]ヌエニHX憂儉ウnケiァy$kヲGl;-。セ現K聖コニr' ラLI5-モン'チラメzTちヒ5$ヤ桝ネNホ>&ナ?ゥエ ッ4/)テウ扞 ロ[VrT ff2ョA揃DCタ季0コ」vヨヘソ)ョ6淦エ=.!ム&朦Sョi[yォyィ$lォdオ\jソ掎Rニ!S澑-`mL~後ツヒカ{ネホZ」E」哽, ロcコロヒ3(4フ竰 冠Wテg9jgレ-}メ栴 ?ナ゙タマ$lヘfq#Y {エ{駭~伐(肅゙ト ウ_ホム9"a膝hBJテ3 吐&C ツヤTリv コ4$1(羇9\族コェエr3゙ツロャ泌リネ詩 4フ笈~k,;慄ヲ&ルエ參ニーー:フ2\ュn|~)ごク5ワ九~『yX9ル湟F-縢~壌"チ74シ/゙ RHRコeルnリ8m、ツワzヨLサCT怕? ィ桑K瑪pコュi=m紙ヌ}rRwツc窮 ェ」ヘ&U秬ヘ;圈Umカソ牘& ,エ\。ェFク勃y愡;At^Im繙ロkdD]2-ュ墓S9eヲッ。X歡snク3n惑,jyソカg >メン<リ{ハ *#吟$vi6棟|ミ5モ"ッ)椽Fキ敏u+ex?5%ヲe73dコ7`ヌ8kX^5*!熔*W\{ゥミェモカ>鷆?゚=、ン Wミ8シbz保p験4}ネk峯7;」罔5m歸>wキャ ~x%ゥ"D%レ速蝴KナRワャPアネ脚寅Rイ)#ア|R7」+]:9W%ヤメ)o/Lテ団「ルャゥlゥタカT<ィp,ヴ#,碧"ハ%zJ7yq8゙Eワ抂u溥マVスKキl泣*ンァ汲モFφ3絲xERZヲ毆mヨM蟒AOp狙゚Rア〇獣3ソqカsャ 頂引ヲ茜ヒン ~5|-ナ5チン.X僭0"ス9RPヘ9%ぁ\wwpソ苫>|ア$ノマ<チア-rヘレィ。エケミ'z"セス卿Mホ」.ム榊ーェ、v =v#8}゙:ワIユーOMリ叔t駐w[゙荊イ'鑾ネアP#。BsRB棋・x コユタ,」。オ9ャサ 吃bロ? d"イDソ・Do 豌qMqケ筰フモカP7燹k櫛ッ「)鮟M7P /`s-l哘数カトニ匕&}嚊鸛Qカe辷a6"フ/ナ-ニ}|曚指笞\Lゥツ、ヨ2・uLh=Sケノl ヌF^「a訶 ナ `z┬$迯88ヒン9顰IQオ渋脛LZT>」A`pkoнn孫!Z跫羶ウ薤ウキ 小敘」;If7宝l_ト]<゙殄+獰Iワbリ ォX巍w@脣$ゥO1ラOウ-榱艀愉}皿帛Ef%x%害5SイJエtEE+娟隸0サIケァ嫡ZI禧JシFJイ(ア+N6Pb)サヒ」^。c査DハアJ:コ釦HッSヌテ旄矼sM鰯$C%Uオ楝婬u-3ヌ問'ユ'<頤." .,ツ陸&<y瀞マか* zス0Y%6利サ_PK7ュwヤPKma5antlr/collections/AST.class}苔N1ァ橿(x>瓊z#タ1嚼5』畩♀ユメユnラィ乗澆ァンa濠ナ~ァgヲモ訟78 {9リg千斟控KA挂ォL・*%レV:ェ5墸ガYs郤bPH\uYS_竄+ケJ M#Djp"5莪・)\U,48~「缺0ワ*薨サツ曷Yコm9#;伍3dイ・、bWhbチ,サ蜿シ「クVヨ烝 マxゥ矜冶コヤィGquェクァフクシqッ%フオoテ、粍G;ソ{ノ8壯斷>T言_ゥl;キ= ホセLPs⊂ 杜e惓LッミcモァR。ネ聢]ャ4pィリWヨ-ne0387ネ G #謇」トq ニ=尭LxNァ`レsf=辷ト"秤竰2*ャyョテ&裃"n;竝v?サ蘖Kレ ォjPKma5&antlr/collections/ASTEnumeration.classmホソ0ヌ゚!「ラ筒#,tマ[ュ坑j0>寃澆ロヘチ[.誑ノス?マ」)ar褊ホXケ7gルY^-/и撲ケ覇ツ(%EWンイUy\メメVネMュ愡カヨMZMム;',゙8ツヤKヲクセーテゥq*" BBタ!"WアHセPK頁テdヒPKma5"antlr/collections/Enumerator.class]鎖 ツ0ト゚ッhtpレ・yg7キΖトメエl>%ヲ ゙p7ワサラミ`ホP2フ}痙ァPK4イヌヘ%PKma5&antlr/debug/DebuggingCharScanner.class晃y`斗v届萓ネヘ&獅ネU!IンL^6CXリョ転ェスゥhナ嬲AゥオX」B-TリTQアヤォZュカjォカjオV{゚恋セ厠Nv尸ス゚{゚゙゙キOタテD4Sェrモケエ゚E゚pSン砲k陋,ngキXwナ,セヘ祚テ,bq7宮ワ@tム!}5イ貊Lt8社Xw競,竺圧a}カサ淒n:Nコ!&:ネcロ 条\(?cヌン昜アイIOケ韜lzニMヘャ宀Dマロ.1嵜タ/イ$ソе/ロヒ,~ハ?耜WXシハコラX~チ+z?゚`&祇イ狭Xシヘソf.隅~{渝~ヒoロシヤ゚ケ顆.ル(QqDムTュmサ革ヌ"族物チァ┐Aュ・WンkY3ニb船yツム<゙U%6J肇ナKァメッHTrセoォイ]i *。セ矢ィ%ィk4オラ0r.ム・府ロC噌tylfU[ヤクN"Gォ`/Bjgャ頭(宸ミ4"|齔ェuoQタ*タンQ%ェ lアメロロ。 (}ェ/0UCェ埋モラf7q晝 Oi教 \p^%`4[凾*7QJヤソ%搨喘cチ┨ゥ。S澳lセnオヲ?&猊%Xシゥ,カ魔ォ婿v┐?=Y套フヨTーくGSIuゥ)」胄Cア~$HヲソンmC~5 @亨ンミヤカPTユコb廚Y樢v6着~(ユユu9f。mロ描 マ^、-p択5ツrフ*V* ロユヲFjぉ゙f<踟ォj/[蔕ゥツ)>/冨レモ擡|Xu'b_ク'ワTユ(Iヌサソ3|ヒpエタヨ ロ~e*メ;ホ,4猥迪cV゙ェI」アユ/6カ%ミ カ,タィB冏ャ流マ\拌TキxaCォ wヤチ N4オ?シ]sz 侖Yェュァッ2Ym9^)c魅Sッ渚s, 客朕D7アdpョヲrmモエー牟ア#jo+J劔エbロtネzE a'EMョシマFカハ6>aD`ラ"ワン癢觚W8ケ+M 棹ロッ獰L有驂d・洒$S7ネエ蒙蹙2]L泱I。M2ゥW&6ヒヤG[d ミV匕QPヲ~ ノ"\ミgG5U7+S"2"モ鉅2譴ソヒァLKヲ([ヲAカbアηz:Oヲ t梛#モ ョK$a;z-チアK夥4 モ苓ヒ2、ッネ}フ%lシxネht,eH.ノ!KY駐ア+ケxフzlM}ス姜DシWサ\Rカ,薊nYハ$ャロIサ2)eゴIZカ'ニユ3_:レ淫戊#」凖トュゴUュ4シ^ヨル肭Jホ x\a3o」a]*蹣[轆レOQ班「ЮV5|ェ。セム(xo禰D 。U」渋1ス/OSナ 46ヲイケ。「A$・ミ4ァ*>UxZmfr。]D冉4cハョ蔗"Pわ宸ォ鑼xム囲 _Vミ.d苗シワex86漆ワr悁7マ。ィ]-Ke尺Tレ-s 智垠~ァCテaハきXナQ鎰モヤ、5樋hs>族'V早7「ホF[9コ嚆X痰ニホ*<7睨x6 <ンケt3ア+レ諫 Zマュh キYVw桶8-ア?cワフX0ユ{jfワdニサ」w礬 テロサPF。;d ッ{セヘネ一:S怎0B 吉トiハ,惧YリBモI{ワxZ%F8ム%觝q/エム]餐l」。・et ロGス゚イ悪フメ2ヒ(|hWKEユ欣'縟矛ヌbD@Wキ硬匏Y曄V?ヘユカ7貞Vネ」<ヘ5カ7歎+ @NBsュ硬$ OAs晨墾ヘ y身。ゲFイメ y千。ケチFrカ /Bs」硬 /Cウセf@f酢メホSヨ仞ワォクyシ&ーサlー0ヨgナセタセ MカリニvZアo6ー@{3}ンタzMlh繍ケk}クルkfp・ータ/aェ7憐ムC冉&_meル!4キ;クPK,>ホ =PKma5&antlr/debug/DebuggingInputBuffer.class拘]OG=]ケ]jBネーm&メト!)狛、!間シTk<ク巓kエ^モだ。Hナ瓢ゥ殉闖ェzx-Hy匯s迸sケw赱タ:^ヲ1ロ)L Y$p7 ロr(慣#サ ワKワ々%ア慣'ヨ#アヒナ:' トォ撻#,サ~リ q'|旱fスqD瓣鋩ク未坎v黹ィチ02、t@・US跨LX|/ワ50綵尋ォ、ιハ椡柧啅シtォ zfワZm クオC薔ラ暝6F'[~サモ$馴錺G^」チヘル祐zァCuz牽コ /睿筏Z~缺クN5 ッヤaリ巽eシテセPSjメkWワU3+゚%*qkメゥc フェル:Qハ鶉マ'レnォpンr^Z杳P=、7sセェu)ヤニ%\キ妥シ杭|l獺 N@sトニ\オ)ヨ勁,}aワ諍ヒl\$Oフニ卜8Xオqk6槭lリx!崢カ%ヨ ,s胥<モNo ラッ朸゚p zト#/P・キcカ恍8聹8%=熙8*z i1_>S4`"&擇陜縅テ`cク^讖ト缺ヒ#o!棒; ヨキt骨!~ハk贋:ョ」d青ュ,ゥヌyヒFК、ツTwトラコH疂}1$ヲ鐶ウ#ネw巖Wイ畿「Lォ掬X9t第!フu9%フ、1ヨミ#^@BWgRイMムYJ枡e 半Lムフ的3Igラキタ.c胞ソ、;*]舅pOJ-]|ロ96ョWク豎E_瓦GDタッC。"ニy 1%Gッp褫4ワ」RSュァ!ケモk-eヘモ16゚'丐ーェ兎决札ャ」"カ「ヨL簡」誌鋺l」:{エIカ-z7マGソ盖ァ70セ箇!M狩ヌw嫂「ス。ヌ#Q轍ゥa&ソヒ葩~マ#ユマ鋩コテ僚KマMnPKu‰v、PKma5!antlr/debug/DebuggingParser.class;oラ>=.vvNFヤ帳メ弋ソトワTFQ OM洳トイD愼シt熬「フシtkF>ェ獪ワ「ヤBョ珱メ「腟キフ Gト%5ゥ4=ィ4 アィ8オHd#フb^IN掻 HRM #ツ、ャヤ6FF&`bad`f`a`メャ (テg`PKB zツPKma5antlr/debug/Event.classm析NツPマmK+・( D]闃氛>ニ采т錚ツセRR求牝wrc祕篦。県キDeム匯ァ゚劔{゚゙_^ミエ舛ユ〇棕naヌBCタoo、 佗Aト'オヨ`豁<7「ゥ{鰕8>j省榻'dヒ ノオ/Cマゥヤラゥテ毀xィ隴6GXヒッウムR@楪ネ ケーッ賈b,マΧ・}カ嘆ワKロ:リ@゙ALX/甘;掻2uヌ旌I剪maョ9>奔Y藕ウ」ヘモ!u裁t!Y`エ伏セYAホ'゚・ョ3゚ル{c*ゥ_E哽ヒT峡2>ウHヌw゚}[L%Vzvレ}wァ*ロラ波 k搆o塵RルbュaPKCPKma5antlr/debug/GuessingEvent.classm醒Nツ@mK m)"`「^JIhシ5$D-l囃Zhy*/&*タ2ホ6姶ル叨ofサ殪z8ヤ。。.ツ~*「:ミゥjjhih3艫煩軋0ー>ホニarホPオo盪ュ?皀莇3dP.ヲcホP1ソI|>サHC_8劇'ラ!イン。1ニ|)v&,za憚マ~7Mg#~ヨキ蚌チ网'ニ厰aP4賎チD哩2,'ムフs? ワ_N觝ニ#z喉舉ハQMン)垓;&.QV晟)SPヤ3'u!モWネ壞m-!m g)awehSfbャ q-W3X、E.ヘネロュヒt脚{+}虜ク吟4猶慓+r?ヲbvT%Sヤ鰻i/。PK~A+耽FPKma5$antlr/debug/InputBufferAdapter.class摂Jテ@@gz郷゚-ETヲ>4オ(゚7ヘエnm7e屯Iチ」トI-6%9統恙變ヌ釡;t。父$ヤ hミ4`!s"峰OゥセBル抽ョ儁z )g辞鏃hOェ1ツホ%?ユヲCv06Z 驂Aハ?嶽、Y0宅サハ fワサ舐ュャ 拒BォヌcDホWB?!T#;z面A044yケ{7ミCコ雪トュネ=Gフ}メン厩、!Σキa囎咳 es*ヤリシア'47ヤXメI鮮u チ5イ 旨モ獗.G苻>z|-f;ノフBn・カIニ_-ヒフC!N+1虞Z拑陥フrシヨdV~カス裳bV=j_PKヌクオエ?PKma5"antlr/debug/InputBufferEvent.classuSkSa~^ョ+ュキMシ$j B望 ウpナ"Sヤヲiーネ. ,ホW}颪ヘホヤL?ユtホBノ ~リs゙シ逵9酩/1{0G恵ケ焼c ニV{ツbユ懇Hx&!ノz喉Uツコ7|.i6^Jリd捍cヒ混8、O,W*オc]+$O+ テ$4-澤キウケュ如ソZ1ヲfZケ。ロ!l$氛c+ケサノg它k7:拆gヒNヨJノ(勠゙P襯;モ稙ヘ(ニキ'。PィZ)錺 S2l4ッラエ|剞i5ヘ1「nェヌZMタ {Lwtナ%ヤケ:B*ヌ*セ諮聞情ャD<ヤ~L2+9ウV2#。pァ*載。セQbレ゙エQm鰐坤#ス:モ 3ニ2f1'.ノヌъアケ(モィ2`Q`エ;w+ ヌn syバeシツ詞],ィ「Aェ&東eD0マ9啼TF l託Nニ i槫Zシナx7mzーコ慌Sゥ%.ュZユ高@4tケ夕ヤハ喇ヘmC兵茱a\逎q\u)Z1Z.}ヤJ^E&ノ I xq南ノ;HT=ML} チc"y寳iツlNd筬オ! 雁nヌ ー#ラム"nv恭奈'ニヤ 、搗oー箚@?I喊WGキ、ナ#~ゥ゚wリサ+&&3、iz;コgノ4pロ英鬘茨戉hツ}ゥXa褐P幌 R!wッ拌20蝓窃悒O4q-メ辻 Ne 61、 キホJラ:7メユノヨ8PKEZ> ヨJPKma5)antlr/debug/InputBufferEventSupport.classVkSSW]<.щB$Qhュ!綣 VナAアリク`4$鯱 >レhg:モ2SテXgェ」3:モ湲Omラセ7$iヒL9{毳ヨルgオマ蜿ソタ漓*ニ=hタD犂IS*ョx")筱8L{hセヲ簽fhq]トg">W/="ル2゙阻トャ茎エa^t "Uワネt3コq[fwト弉ア、"ォタ]ネ9]7q;オ怺fRルナ顏m}ホTミ榻諡貉篦O$メSマ鵲。禺4モ呵4=s}[k|廉ャゥ`g"53Ft^-.F縉ワ」ニニ鉄Wヌ(q-ア\カ`rヒt*Sヤフ4$dヲ(p M写シ‖O_'マ区v*扨岌Boム;ヘュアワ<ク5対ノ簫ャn\Iヘfhァ跚肭ィ`ohウリラ|,琥エ。Kリナ%「9C116衛-。ヨv檪t チSqfb^融r妓bH+給。Q c)緕ッM黔モルyロusyD{ィw」ゥセ`閻[ユヨミZィチrd。テミ羊ヒiLYコ趨dwユ3`ェ佰 悸(F頤!ニ委1 !jリ急>ツ~Vョ愃褥膚モ8」`ラ燃メSDQト2xサ=U*ナ3}12Y隨9|oNマ幃ヌ_u2衡vムF& fSA~^jク宮"3ヨ滷ホVJ礒潴ケラ7セナC 条Xテ|ァ`ヌfシムp 殄ク4|奨 z>隼rヲラ旦モャ{`8」/Y|m*钁1Pワ&峭ヲオ+d゚スiリン-゚イwk肱胴ュニイF^w*湶eゥmg;ススLfr8タFリ|オムフル繭ィョ{=ヨ弱-\s兔Vッ゚Vy$揣ンjセaMコ數!Snhアヲ汀 _ュリヘヌコ慳マル.゙ルサF゚ネeァ~F蹴?X」「~クF?ツO"Ey恂8i甼(癡h倬[9憮セ輛ュXッ J/タ/酷ウVl\9縁ヒ8パX8ÅッpソヌBゥ3ォh|キXJhェN⊂痿BmG'vミ:@鑄9vns:aム,jj躪ォ3ミ%オsnRサD?ノレ ∃ムF8VョセO\ュチ@ク6D3ビnニ2к38Kケチ0.!コ9*ュゥlq[ニ余,裂Q゚LM歉浤~棉3瑟ロ痞y縟wトェ?メm玲v)x鸛YZ!s盒Y蟲Fシ=h、シネユK,L恪2Bェ薯/ `畠」g弖;凡D崢mψロzャ伯pvムJロ「]Vレdヌ;謹メォサッユ'j凋彙軅。5aス$cPKモリモ PKma5%antlr/debug/InputBufferListener.class;oラ>=>vnvv^F。フシぴァメエエヤ"錮シ簫ワTFE 淨シ凋"番、メt}Oライヤシkヘ0F^$>詞 H|゚ト「lFA$蔵ヤフシFョ珱メ「腟キフ5Hd陸讌馥%%22ネ羃ヲ倦A、P?'1/]゚?)+5ケhイ&労ァト禝6FF& f氤早ネ`d`υPDユqI&.PK[」Fヌ2PKma5%antlr/debug/InputBufferReporter.class穀ムNAマ絢クBUィ-ア\PMワミ4ゥヲMLカュ)ヲ 8 ウd俔Xzム&ス。LマnエR" マgマキ3sfワ `u xオラ((纃括 窮レh^ モx]`8(&P%エ=働>募Ndヤゥエ#mz彦(鷺ロ」Zq/8ウイュ>^)帖エニ(v=>率ァ:オ蒼'ヒ02」xタ咐cb〔?K{)ー6ヲ|Sラレp:マェョU」sソナカュ>馘・賭0イNル y%s染ー匹ロj9牆@uハYユワeオu。レn:齣Sfj゙P0(&コヲ_t彁m\モY%ナgdnrt}CョNョラ「y率M>(-エ・カ・侃iiハR、レ褒貝qヤ ノky4MbBチルuワqPヤムtィ、肩韋韋3羣壌韲セoケ5yyI垣セシシsホケ迸蠕Dヤ*VケィtメCN:爐ItミEー勤」G徼ネENJ孟ケ4ニ0ウソホ膕&GX読<チ('<ナ.タ枅虍廏,孵ョ╂Eマ3&゚f&/2.難ア゙匸茲 'スフ枅ル+l鏘スFッ3 貎鬢-貍ベa.ソセヌ゙g#&0惜徐ノO咨庫マ咨Bソソソソソ螂ヌ蔔ナTBヲソモ_]7サ /(ヲ:饑ソ矩?ャ_~_ア <(wQ(EN疸鈎m%ナツナマR& 橡ナb  ハ彙ェS;E勘?WcロユーKD」遭.ネン。X[Pンlヘメh4pD_テHP$6テ。X"、ユクi輻moキ眞6モツァ)quク7ヲM・ニ@ネ7ハナ蘆江]Cメ ニツぬ-ヲV3褒|;テVUラがュf"Cj ェ畚旦堰カYエL k AOgモルxャ杢(メツjObxo 3ロcテニキユ;テムモコレ.ムユ絆5rI ソ I?V縡レ・ナu5フnケ=。キノロW憮Kヨ猖qbィェェロッカヲ。沙:6シワネ1:_シOェ ミ^レ7モJ^Miォ枸碾⌒ニ嗄ニェJkSaュ/X フフ4!g '~wuィQ]ラ)ヲ。:エ俑ヨユリ'Q。ァS椹";4ン`マ葫ャスvlx則タmnBiユIヘャ混k!ヨォマ-Fニ、揉づn=cUBラ#aァ8A hヤ)*b8Qミ:ヒンZ婦A,靹#D\uヒ:[SGZワ[:k瀟竡ニヤ8ミュン;岨ユ]+}ゼ扶懌$E,fgN謫ッ唾h庖0メ2-ム、ュ胝k碧メ圄ェ ンO」 mcユrCユR: ルF)b8(ス踰-ロミ拊ゥ"レナ23Pネ底U2ヘ!39Qdセサカ」Cレテヤ亮ア\ャ`rェ"NUvT?Ktキセ瓮怡∴ョサ1Pツュ カク鷺ラミiテhトシzk Y)オVSョ+]劭ム8ァヨ ノメ?.ォY案繋萃|2GF' ヒ隗Fe*ケz|ョサエヤロ咥 ネキ 苻_Λ(:昆.ッN<'#~ モュ})9ゥォ丗Sリイq,TjyロSwXツタnc、V{:ウ收~係スX>乗bリx.OクDj\雄WLs6XG ‥ホセエrjホ!リィk!|B売"ャヘ弧トクEq3篳エH[/<ユヘ6^O冤D5ハo!トラ。澳畆'Ktx醍悽 fヘオ:タ1kウーZ リXX鯑;J 93/c朔<ヘf!iムィニョ」ヨレレm9ーゥャム_オ◎4 _「迹I珮HフG<<ア舖@>胸uBW#弛ロ2゙,oヘ ニ[;,Lツウレ[;FE゙#蓆参!*"ラE@+HTI4厂ゥ謙cCQモミ2ネ 憮mョ」メcZ哂EーTKユT#-]責池LKオKuメR,ァ・ノヌエ4モbゥ\;LKs$S悔*iNメ4ヤ%ウ袢珥鋳Rf42\i@ヲレ! PHケэラ・< ;d> ュ珥r2ヘ9珥*2ンYネ霞猥wャ0!US.:探ケ Uナレ$UァUHB「賭エKィНチ-Ebカモナイ}コヤtdI* 4c7M賑@j。肚探峺;E-Gオュ@ ワeユJ昊ハJXY+ォ⊇悵HU[。卒弼i・ョ゙エ豆ムQ孱Z Y熾病h1%8。r6抜2モフ匆ムuテニUヘFンニzリ閻叉`cラGウ盡ロチFlljコニエム`サタYオ J♀、餉ラ覽椌ュシx]ハ;ホH#4g?9コH%ワhフp\ホC>.セR),、dャ袍CNL顧V゙垂xLネT3キ蹉Aナb&8チn敗椽ワネ$_gO!タChOテ暴藺レ籘Xヌ3/I柤YG+リn4ロ:~垉ヤaセ。,ョキネ[jG9^e易<ルB"イ攤@uヤ]嚴贏>1ゥ Mfレフt謝ム .杢ャ砒ヘヲ邑嫂vレスl」メ肴エュケワエネロRnホE3ンシn^ ノ蚪 ネッニU戦B羯埠 ェ:屁キ、yラM{エ# 'pzクy$7ツヘ ソ キ@カn○キツヘス67wM駆錯$杳ノ;哢エC舅pnヘ;哢イCd8キM駈須ケ愎yGモノvネ撃s{゙ムエトyチケ#h:ナ9ネApqmBZMネイ敗オン゙゚瞭8豚サ`1vケ(ーG} ワサbe骨jヌI`滌&ヨ崢ヨ7ッ;廠ラァ+怡エ゚トヘ`」ナ ャx,イ9 ク7 ミ,r4ォ}ーモタ睫z jeナェテエzモ~壗s蹟Tt$iュ0擔アワ檠E輯gチA:9*ォ80u?Lgァ"p|祉ヤペ-ンセツΧ檠スス莟%鬯・Ln/゙ ユー}Xス姥?Bヌ韲凱Iリ$'i3ワaキホ1ン:-R)ママ+ミJフヘョ@スK請漉チywWqE} \(゚=゙B{y!y.弟}ネソ\ウフ1R庵q{庚lマ縵ュ$$ヨ86tス|':婁、&n5ァアcm垂e膂XOiエ"cョ杏ネ>焔i族1'ャミコ「4ユム/_宀ミlイ.オサdノSコ互?ツfz・(。ャpーヲj)エ靠オ改嫣FS+モッ睇qjノY鼠BU端:zノュ\鵙 ムrt*j9レ百pトフiァb7ヒョチ<ム圀ェUDキ7cUムV僕PKア6P! PKma5antlr/debug/MessageEvent.classuRロNQ]ァキ儂Vハナ"U8 Bム6チb チ翕エ囃:mヲSヤソヘW-&~eワ{アカaホ>gスヨ^g漱ロヒリミ0 I浪Z倔纈F dUャpシツ澡ョケヲ篌|セゥ'o3~Gチ]ォ!Wセu端ヨ装ュYv%サ:UサB9襌Fア/<yÅfアク]井ェvユ]1:Hロ・CYvWHa}_ $カェカ,エ^陸ウk彬}゙ニャ。T、サ96zk)ヌFD*ャZK6FQン驩カケrヘソYミ燧カSo9eィバ淕fモェネヘ#iサヒャ・c tLcFヌ0Fx燐1)jランA漾:フ乂=ウ惡t:'_ク5クッc酲ホヒア凹。卯ヌゥ;0AoJヌ,&t、ク,eルnヘノヒRォ弟$ヒ) yセZトj4、ス/宣7シ靫ェ,メQS' ムッDG@サ(庠bハ 初ヨq:MS]bト\zァ^S_65L#^ユ契ソH87児'劔6ツン朿D#セ勞'斐+゚yq%ゥカ5ロミ>#jRpr鬼H&hi綵キ4Tフa゙W暎0噎アミ1サoヲ220~PK杵ーレ_NPKma5!antlr/debug/MessageListener.classmマ1 ツ@ミ?& ぺkタNUトBミzユ!DツFf79怛Pb,,7|ン顯|тP幕汽r!フ豎6.u甦捉-[ォ捌6nオ8}ヤbR}^ネ7iニ良ゥulX妨]jツマナF5R6蝦攬|vノB#ラレr諭hUモョ゙hyハn拔ラ=zoPK梁ヤォPKma5antlr/debug/NewLineEvent.classuRMOロ@}ロ1&@ !iBn揆ニ類ィzhユJH=P!!トチ!Kdddlァ8Fj・ィェo擽z劔}f賚ホ6ウxaツツj’ME:6L莖i`Kme^x・シュ」。」)浅ス@ =ュxム;%サs^サ趣}輳\曦サ垢r? z萸u亟_teナDハO$ゥ ス/」Nレ k7リBソsホル)。@獎cy%`Dャ(だタ「ンォ8Dwフア槎O柬コー/ソェZッeオロツ<,TQアPト怛eT(nイミL含(燦Y;カーラハエ&'椒&Pqκ撼ニ}g廾絽#献r2ヘスシ尿O`ヌezコ。(ホX翕ワ,7.寝2メsjレOト3 mカ!R3Z3E .セ泉G,Lハoヨd%WLイ?ハ」トzA殪゙#{7EKA・軌?・ゥ、ヲヒォS)~ま$ソL2.fトC\uュGZIO`(Y %賭yロ Uレ:}+ソPKィJ有ヘDPKma5!antlr/debug/NewLineListener.class;oラ>=nvvNv.Fョ故ソヤr淮シTFY 淨シ凋"番、メt}ィクkYj^卸fPqp~iQrェ[fPアTレ'ウク$5/オH/+ア,荘A 0%  E9謁鴾IYゥノ% ネ`*鬼ル鑠FFV求 H21ーPKルdム4ミPKma5&antlr/debug/ParseTreeDebugParser.class昂ヒRW=# ト謐E,Fホテ$1岌ュD<慧ナNツ ]ト4」熊ョ埜WvルdWTェRY逎R9=ヱ親 ゙鰥鴃3#ッソL。チ5フq/w*貲,Y筏%1ヒbV"=設C1I1゚Hワキ扎aUヨツX葎0アル》}ネDミ局籬刈Oトd$;1Y1゚4携rUヌQ冥a8敏卑エmWCwj゚xn$ェョYL、]#w0」aーdWワM府;コ?eXnムIヤンユ「Vオエ」懾ン%蝌マ ラエュエォハ ZRCャi咒怕P,9セナeムホ+ 由ヲ・ヨシcァHマhャ゚I写'ュrユ%5Wヘxキ゚ee-Tww辺$:ハ('zAユKムミo.ЕDエョ:こqVメV2ワワ梟ネ/9U悶J?c彎,ェ忤0K蘆bチtモバ5ieヤネ匝t3vAfctリu懸JZ{フSムー 田Zマ宜ッ/銚ォNNュ來ネ。Sャ%オS-cクノ゙7M]ワ_ク:F。誌ア稈&トLBB>チM1wtLヒ'lAヤ珂!マ豸粋]jー鯤+ン久ゥd['セ瑣侃ス+ヲcヲ枝辛キu覗魏%ンur^鶚エ*崘!ゥヤチマU゚ウjVシ→シ廉tィ2拉!5FフSI鍔0セs鴫PU ハ「ャヤサDuヨァスセウO5'椙゚ ー股=)R'ヌマ_tカgルェh+火ァ3゙Pナ|。ГオB¥ナ゙ヲミ旦ーl窯ダk qy+トU&ツ」朔i荘ョテエ1ルgナオCエナChナc 羔h」・DFミマTQ `p園Aョ#苑ツ蹌C;サ8@トaオ゙ア綵鰯&ト qイ%b郢ァ<スEッ駿ォニオ-m杰m悄メ&pサEhM。モエ|ュ9著WEタN$~ト?錚旌Eッそ :1ヒ$タgワテクモc「ス1cセ cン<>hニX"ニ21Vホ`;uん+サ(Yナラク。Wワ社エユ8キ=。゚0x伍1イ=5 ヤ0ク;コト'怜ybCucフ l Gク4逑チe<ヲキリ,フcゥ+vホ鋏ツ{U^ヌ靜イSムU游Kヨゥョン碇フu腦<ヒ1カマ(ニgエMF褸A椡・籐.ウヌョ們F3ァ梧Nコm函 SL=w漁lfCpホホrタラPK9「タ&Y PKma5antlr/debug/ParserAdapter.class掴n1ヌヌ-ヘ豕Iモ噪Pツ!UU{JKD"8;゙!クlシ泰[チcqB簑P鯵キ畝[a{~キラ棔ソセ#xZM(ヌ秤AzーAMJスHサC#4メネhワz リW ィAH nム又|左鞆ートMフjナ、U?V$ュチカ+替俤ョソXエ鯢ェ、サ~ャFコ酩磔セDヒeoチ熙w癰マュAz猊ゥZミaMメワaサ、]?カGタ常コ菘゚PKkモ}_PKma5"antlr/debug/ParserController.classe廠 ツ@Dg5 66bg帑ュチ結&9.イケq~%^ーーH1, ofリ錏a#11$ノ酷|%ャrB;\湯AIノイyーアヌ黯/ト趁Nォ%セケコニMdYァツQIツロTウォAツX)エf2PШウ靠ニ5keョ徠簇ァヘト.--.ミrjサZチCpキ ゚9Э」PK「ウPKma5$antlr/debug/ParserEventSupport.class校 \班オ?]Xリx** 0篌寄I\ >01クタ'ャツそ錦$Mlb呈m^mc嵌垳6&ム,ア&カ贄4ヘスキ鮟Mレヲiレロヲスm{o巖セ醤o畆P{fホ劔恙9醂兢^hソ、テャ、ムヘ懣 'ル8S,g;)Egr<ナIi:3ユチケNRtf噬ァ;)Sgf88マI9:難'裲フL!Bf9クネIャ)vp遠 ケTネl曾"}8クフI・メァ皮:!q休档=.塒蠶TxセB*,イHネb魎DネR! Y&茣4,cケャe[刎ャ鍔ャ黙ラゥr蠑VHオヌ、WュフコNコzジE5シAネFラケネヒWル萵zm!>!坑シルE ワ5rォ-Bカ:x寞Zxサ6U_鬚m|父w:j'`%保・ワ%ト淕Rv:ゥ砂doT!サEアnムウGHテq^'pッ徃櫞B ル'$$Nテ)G<(ク)シ_リk腥ネQヨvラハ ラノ]/イ {ヰq!7 ケYネa゚籵[儚ツΓN)ァ~ソソイラョlリ」vFV1・wユ@8「ユP)W4 Vn{"賛2対SヌSヌュタ。WVサヘスj0Nw嘴dユセヲ接':+ ェラヤ穰uj:yォチモャz0メェR;サ+廏。ーj宛C7ナミヘ陝ラソチヤセNM 深」M,タI\オlS::56J_」1jNィγ袷モj牢ロ>cミ減Aュム7ヒrケ>S嫣v#。゙^5dスワレh;z;j}-[シL\B[8![スΜ Q頷オz巒峭ヤ{EツLi゙uュQASR}オヤlLノ ユュオア」淅エ[E昔卑Pラ垂u柵」}5{媼ロスヘヘ栽"Kイmユヘセ:゚即AoCSウw擴Ntji ロセセコョ^亮2G・-ュユヘュコXaハ至キヤヨzスtィ頤nkッッiKヒ:}゙ヲc祗ャ+4リォョS"=ネユ〜 Rgs舜ノy[呷オ]pリL9#゚`_MワWラ:禿2コ緕kエ。ニV5ィ敬wJヲbキ報掌Gヌ=nワク1レクl胡m&6ャネmaRq」ヲEG5佚 |ノjh\1綣ヘム&チク>レ?ホQc(ト.U(X聢7V榮モ耙イヨj湾Pxッ聚ゥ笆マーササN"ロ=サ冤n息衿肩。襤1アw]6 Mk0Bサnb3sF;#.OCq翼Rエ^c*aォワヲ)オモYュ&サト]kLン);Uz+k5イJ6ンbセヤ a越ヒイィ >コ繋-モワ 碑|ルヒZ}3ータ ツ}インミ Ff哦jiox・ム゙ャ"゙Pィ?$j丐シ:鴫レム膸フォs d?ヤ g綿,5\(サeアワuvタアエョ絖|ス?ミ+ ュ媼"T箱}ーウSUサエゥB億`ャムe=フケKH゚ッニGメ\]8.Xqチpェ.フ1ム,ョmL、*メロ&BナF bヒスヒ疝ム2タqコ鴾埀カ ネチ.図S陋3ホモq 、Rメ[ ンGテ蕷 =@Zナ2'「OJカLa }湊t\J。Oムァ冉_D、ミテtt顧觸H。G鑚 ンBキ*t}j +|」);/u渤jH瞻ル棔(|'゚・ミ掖oャU*|キl+ェBG、ナ.ナ.痣z博)&妝N。ミkm~H?R閔烹゚ァ(t;aサワ#膺!w <Wスュミ/餮 スィア|淞 ?ネ9 ?ュテタ倆1z゙。_(|ツC)壻rUウ按溘G` /-蘢 桴Y"ニ9堕ヌ1ソ 芫Bセト!yセィYeヲイ興~~悄PI!OL膈 欸!マハ: &H>ノァ`レ&ツIw{ォCンクイヱhhw訣ハ5サw鬼ァuサ#ヤbソハマ こG/c<エリ葹ャ敵yマタ隗糀暑1|]ホrJlヲレ?ホ)7トX^Pャ<'D3淀慵- M:瀰Iラ ソャ[リ+メ曖ハDメG ソハ蘖5カチォ 'テ9 ヲフ)0jN=ウフゥ\3絎チオF=ヒ<#ァ餬Aヒャjォラojタxキhル^3カ、契n1"敲ラシスj泱6蜀ユ補ウュイRs祀テhMケー~」9RrgoソワSン訳ロェム?oエU;゙ フ*葫.{heル|。ヌn閾裴鼇オ=チ ェ"ム59;{ヤホス5!ユソハh<$」QKマ9Kヲン鉄潮書/M榊ルュiヘ]ア關`Fモ_g炉hメィ 嘲eヌ エN 2攣鴇 b鍋ァ9ノ1綢F陶b薗+dホ -_アiメC)洵ヒrp"ム4ウミQカ ィAhPa秒鴦mBンュゥ掻]2ヨ `xB8pュ*カ・[nオcXャiaワ$癩(;ーヲNャゥ -x宜 .海\$嫡綾Qvアエ9c∈8qアH"Eル ト> ムす竰8ト%"I8rC@ 」. ト・qKEq蛬 ^トh1ト@\fク4詣擇Kテエフc=ヲヒb8$_,_K ]シR、ニ[タd媒敏竃ロlカaZチワ瀛ス、サeハ'・6L+マQ~'ネケメ>|l・W謠ミjFミ梹ハFトェjdeツi/?ム{<)R糊ZF,オA塑T演金朿VメュヤUョ「Lミ(。v 0Fワむキツ塵テメoGhケ.}'戌.8ヘン0#0カp裼カアDッdJ鞘 >gム}ク糒_シ済G政ニァイユネi傘m杳ャソNx薑h=x ニoチ'襲晰oニ+?7 l険ル#FSGィIC1~ウ1セY]濕タァ傘ォM1~愚k+teキ ウ際タO。ツマ>Cmmgh懌ハ忰i'フ龕vTiラi沚搶嘗>堅y牌!E@zp ツ$ヌ<桐ョテ7シ#Dxヒモ纜EOr1=ナ 駟^Cマp昶>\゚,゚v豐'+ア}クw齎黙sー緕續W遡ヲNリカマS0ヲ}リホ'>zwシu宵Y 草 H垂 ヌクKQw翦ットMソFチnヤZキヨZ。ロh預」ノ碕6ワ・ロpメQrxI栽准ケB:riネ椛フE~Plヲ_モ{ヲミa球爵3%t、$ッ tシ { メ゚ミF5・jュrY&泙3ヤモVp<モ}レ縡ネ`:|;P キワJタノツuクス\81ホ對[*7金ホァwr懶-烟_テOgpト t慇jケ堰~8#愧5゚广ノロァ`‥.リァ「蟹)馮リァ殘p0ュマ[_ソュ|イ~ヨKH歪ゥ盆|3褸 \jクw。ヨッ。ヨ{ワ\些]・覿ョサ61゙4FP「燹「ユ齦燦Oョィ=椪xソ~゙5シ゚嚴モ;N土%'遏5゚Y^ミ_W4コcオニ赦カリアI鑓wlル?FWオリユ猯髻0ルQ}ィ!ルzカ岐Iヨ芋塲L怱_フ)鵲装タhルNモ9價8Eテサ=0蒜p粉メソDsィ%FV6E."\8椪 Iユ-J蚯ハ Hェ擂ャ0菽築揮! Sリ%訖b摘 ]r4仭Eイ-塒0`L%躋.O0":FZタネF>、ミ,I0 ノpネ7靄rJ 青ァニフ$ iマ「サコヤミdェ6l~O訪f =J(幸i マ"( 。 s!ラ チвネニt0^be9Ral5遞エチ"?(カUルチエ械=9。LmH ゙? ;貶J6.ミ霈・0dd蹕滷リテ"塰叫圏ミ+xゥ)L4O8痳、庠A淳ャZケネ(蘆」トR孜2Xeユ蝌rJr*Z咽灘久cコ,、VリO圈羝*,ワゥ=ネサェ 蹴ヲムxカレn'z嫐WA⇒i*ッユT#`ヤタノラムメ"゙ ャE弔o僻WlコDマイ臑~ン6y+]?&c+ネチ峅クzリ5^詬鏡フヘ8e:NYb廡4g3pG0}78E2cBnr」燭B>.荐アザJノシ愍2yソ暈ク糠'G湃f%鵯.Lャ膓Pr'半J篷.ソ宜,メq%ニァ8畋チC]~UアタYャ,N元8ンタ樒テホgIb愎タNp(痢z,p爺8K gpBキ涜賊?坪PKaq&D'P+PKma5 antlr/debug/ParserListener.class}ホKA狒スハロ`Ks +BツJォ径エ、ァI\ヘツJ虎オィJ%゚_ゥヌv0A猶#ヤ B寅"x6。」P]斟Vヒ4轡。サd享ア#vイ +ルクネ[ルS(xo紲lアル久 」ャ_ノ1YCスエイ 5;I」テサsvz頼AVフ%9歙bRムヒ慣e扞?セシヌラ%Jネ」b2 eシK。碇ェ/PK際眄fPKma5$antlr/debug/ParserMatchAdapter.class析N1ヌァ vソ1jク4&ニ 船篆サ;b tIキ^朖<>拝コ慧O;寶ホ釋顱9p8舊トx)4W 晟撥(DオセT8Xフ|ヤ#甍ノ繙堕。ミアTcヌ收Pfェスナリi瑙併ーEワ9qィ ^エ7ルaK3ェkκ、マ~ェ46 ト5>k)Lナ*セP猾,ホ'モ 粕ぢL*ウ栂(ンク5スャKフ&Xノ,ァメs Bス<撼K*Pナ=ォ\ア鐶叉モ^h螽カヘヲe+/l几f「`7緊-kンセニSフB府|カネ」Yアヒ&5∨|1oO)案室](K9鏈ネュtu'k3fカ@fェヘT(wヤヤレ噎2olエン T8チ?-hッQーレクegl匪 +ゥfョネャ9フ拉セBヒn-bNツオチハaLj・ySケqマ妙Z・uコ/睦ヤd* 箜ヘ.5'ラ nYx衛・T-ッ[7艇ネ痛ヒNフノ=ォh暑Kッ癩%シゥ.ク蓋u\A\ヌシ「罎タセヤ柁「孜゚BJヌmワQミ mヲコアa頁3;3=v梹}|・c7hi;ホ4dヘOL 絳/f@ヌラ俔qqiリメカU隋ヘ詢謚レイク出リロ愎7dュ趣0Kセ仔フ搭 $&ホ局Aウhハ奥墳n&シ!*wy志ゥ^圈廷フン]ォ投オゥェ装fw>゙eIXg粹榴ウ蘊tロァ1BP2ネQPr瞿qラ&\;瞽種声;「kル漆箘ウCh{H'9;Mセ6`;+G ョCロ ?"止ハユ_ニ>Go E}Q$Q#ZサムI罫?= 親・]Cエ罪ムKkKタ!eq3m、>ッt佇nj#綜ル、m、ッ,qョュTJG逶:通'xSg*ニhoキラォ^} レv・zNッRvセvz欷越i~レナ胱ッR/テ辞マートc ?f ヨ4ニ繊]FDュC7「セ:;v*q遅拈)gc彖k ア#Nン夕ユ゚ 堤ュ?ム}=當u7-O{DD>!N4~CGrツ 487>ルソXヌps|Jモラ1穃アY゙p7mト鍍8カug Q_ g5シィ皰ZテヒZ アテkモ8「:_ヘ1ヲイ/[チ[ホ{\PK褒tH>PKma5%antlr/debug/ParserMatchListener.class;oラ>=>vnvv^Fて「籌"゚ト剃 FE 淨シ凋"番、メt很Yj^卸f#ソАfq.トATー"ョ珱メ「腟キフ弋F $マ2規RR偶イヒ舍リSニネ Rィ沒琉沐伏 4\YL・Sbq*##3=ホツツネタツタ d02ーAiv(ヘ「8$PK旧ク+PKma5 antlr/debug/ParserReporter.class・甜OAヌC-Kヒュワ/ハE` HスI逝SA"聽t;瀉;ロフ挺ア4ヲ$奢Pニ3サカタセ恙殱~s賣フッ゚゚Xヌ哩^<霖" ,姨驟 Kマレャhウjb  ャ(xトミス飃ヌ゚bHYホキ"KAュ,ヤ1/WIノヨケレq・ヤh=g丙ォェP裄ャpz醤マB^5,蜚カz」>OH}ヘ萓膜ラo 燦(70ィW下ォ]・\ナ0ss毳痒L4ネFムク虫<ゥP錮H職氈ヌ<チSQュェ\RAセ「Nシ NOハ枷/オルネ^(fー7ヤォ ^チ0ヤ6イカwヨラフャfiDDァdサvナVUュ^vY殀イd;カサ,7rGヨゥ)[ホa訃瓸ンナ萓?[))∪懦ィュqAUャB卻`SWネC蘿/ョI!クモコo矛h~テ+車.x~#yッォ モフ{トー[ルuォカs(0`$/オr.RンJスZTカ6アォ自綻ワョェ綻エ\オ~ェwNラIワト6モcヌ 46% $%R伜sロoXュィェ.{%0ワ&^Kワツ月゙PエトmワムYN-f}搆タ(W3%Uィ鮪' 圭&mR嬖( モュ蜚メニU}ョJv 伶Wo7渤nyS倅ヌ9~形C慘af]フF0z)OpQx/ヲaSh'儁「ヲf_シ泙鉱h7コナ氣zCゥルア&コレ zD~Jォ`弯ミR_磊タ~ZXェソ:タナ損メv|] アm|NW'セソ?J~>砂-唱q5X;゙@陲6FAIC濡Rミ~V =mュ3r<[w>磔MDbムコS ネzbスgq_ラホ縷塞7psロオシOシPKロ姪`ァPKma5+antlr/debug/SemanticPredicateListener.class}マヘ @琶ル・ctユナ}nA揆。ェΞリテuzィh=ミa>`~30マ ツ<タ4タ,@HXユ|瑠ェj8W匐|jeユクカサリヘ*#rN妝$Cハレ7B<二YULX`ャjヒ哺TハV6ッ于雕ィ、.ト%-9ウ襃j/彰f檗ケサノ#xt|宥セPK莉Eh・PKma5+antlr/debug/SyntacticPredicateAdapter.classQヒJ1=imァ/m}b) .緋「 bWE -3ノオヲ見d2?ヒ歩ツ」トd(憾エ亨帑7稼#Vサヌオ庚()JG瞎pA7Sメワ % ノ狂\X%、ワR劉$Coト`N嶼廾Xnャ淑-潦!愛g+オju柄ツ磬W?[21漑 (2/j訌hxOs= 「1 ヒpイワレS%MGネケ_p常セ、鑠!テタュ\eョボタ^ン┌4!ウ撚僮嬾莨篩フc'ア匈 dテヨW・nャ抹ヨWヒ|轟ワPK町權7PKma5)antlr/debug/SyntacticPredicateEvent.classu逮nQニソ‘ル.ロbア・RィァユクョmHm!i蕁カチ・Yホ6ュM|>T纜-枦fvホセ撕スス@ ヌ66Qミf_嫦%,<ラ eU,Tオッ・q佝チ<_ス!コ躙礑エヌ"オ?ッ、ァN=んn2推ケネ肖キセ ?3&6ヲRト8担Bハト9KMコ*aヌュ/¶ルみ戰。'?Zサヤ()゚サ蠡抵odZtーャ69Oアテ擣ハス号C齟淀ツ#Z/^チuPGCGM}uー<。&5ロル醜a桾劉ホx゙){q葉リヲクセ銘pョマシセセy剔Tcュ。钁%Pシ  スカOュタl4遐カルレq6ャク>キィッ!ノoヘメ蔚タ {^Pd ケ l1{皴.ラ'・゚0.踪衫テ3X3l舁使ネッbニオロlリ'/PKムュ$ォPKma5,antlr/debug/SyntacticPredicateListener.class}P; ツP懾ソ`ゥXリ6v7Bタ~}Y$橇イシ嚆P (襟)fvfwマロタス]ツ サZe」菓9テ*kNR モ驢ュヲ.悟ティ$\]ト鹵カ' ヒ.卒モツfν カ捏gd# 縒ニ&ノTャク/L傲゚陬& }亟=ロテI厚Sソg?ハ%gメ *セェ=ユ。:牾ニ@マ7}ッPKbSetセIPKma5antlr/debug/TraceAdapter.class棲Nテ0キ4iメB[` B l。y"$ЯAナ$Gp$ッナトタPsィ(Rw>ル醯ッhc鐫臙ロナ死s「エェNレ罐{″"%、、4]ユO1劔茎ョメBモ4・メ丗8勅ハM漏\g睥ネ.、ォゥm穢B豸カセ.スィ'u =*サ「6 ]*[5ウT>ウi2 ルG実゙ハKH`hア0: ッ9%鼻ーBGェャH特!Z<5ヌ_ミ睚ンY]゙pカzハ ?俔i関簔_「ッ%bV{1侶ソイnセ/hNチ<ヨュヲヌPK吩ネワPKma5antlr/debug/TraceEvent.classuTiOQ=熟モ2XhオエeS-Kqテ幹ャBチメ1fレ、、27~Tエ$奢QニヲeIチ&スンホ]゙y/鉐マ゚ヲ濯!ロ>\テ/ヘ]if%vOb・y狠Cフ)HIH艇Xチ 2^;K 貌新ー"絛Iz*Mニト<偖椛ュソgIキtW:娑ヲ_fF6vs!キ碧ョ クS號ルトヨC=Yムォサノ債朮エ趙ロL[レ/オrU.f^/T.Iネ'<サオlwwトナヌ8w2岌vイ骭ルl_ワョ涼カ^ゥ52c'kヒ2ヒユ]∪X\&:GFェXi=eユュコY4VハrX゙ヤ祈ミィZモ2YナUL`REaメL`MEq1トわm;;)sgエMンャQ砕ツCッUャ#ォbc*「S]ツ9ノK0゙-夕リ 1z1.゙ィ伉エ9ツL 鷆ォb&KF。セソ゚コ RBョ:、橈^ゥZ~ー蜈ヤ柧ムトト1トWョョモlTeン.」ナ!筥カ羽4 ク8ヒシュ涵nレ「」keG驟6煮ァ)n・ソ緋lァ ケp8レマ$架ホv~(=歿?ヒケ襍D8zフ" ヲ xホr58i8゚0:叟CTネgユェ組 9y埜ヌjセ: ィ゚ミ革\堊疫7ミ」iヘuッ\qン@潭 」%y#ヘS6t9社9wノL5nY「 =テ.PHi:セオFサD?ハハシPイy.ソ\ラキリVーイ繰ィ*クきBョ%q7$,)娠「 ゥ ゥ+Xニ械74dnIク-CツイMnサ 曼倉l=テ黠ョcルン&yラ,ロrラ「Z!カ=09Cコeルケラosg゚hネ0y@I池逡'%ュeリnマムM゙コセctホ14K跣,7T榠睹,qH阪怕ィFEィネ"'皸顎クッV%ヘゥ#゙qマ 累チiW9mf袢1HCqル「J_hリ4i憊i、逆ZBdKユOタ齷ネWD?ムEェ"6$信$U_艤Dコ.繋P2RHR)T靈cクメケAHqY3゚!ス>タ突>)洌q/q2鱒F哄dクヲQハワトi圷2Kエワyレ熙ンメ)覡タノ、I4cfjm,苓=ねソPKw_FePKma5 antlr/preprocessor/Grammar.class標}`[Uン6i>ヨnlJノロレエ]a ォン@GWカハヨ矛 6PyK^ラ@噪$vPチ/安ヲ"礙E)カルニ腓EAナ゚y%mモラRネケ7ホケ迸{>{キ゚ `ゥ梦ヌノクツ\)萢Bョ!W ケFネオBv ル%茘Bセ戝ラ+Xd ?ェdヨ亠o ケA゚壷キセヌ セ'oア冷クIネヘワ" ケユ!o""ーy0ヌ"ルoFシネハク[6゚#ース~ワ枝>wケS゙蠻ン^ト勝?~{9チ~/馴bラ"s=(ニBfソツ静<菁ッシオ`リ#イィ節ネソ1/ 只O苅O ス鞘稱チ=翆=xVチラ{ 隔g鱸 1=セオ。3投ニキョPvGcF峪ョ・巽jMJユS般 $ノuァbFZa:=梔・ツ厩フg「悦コ!レ寥5エニ#FソルHf"E込Dメ\・ツL稷#゙i。ハ)Cン3Vル ュラ鞏b、r]=QVル扠%ツF:扎5ャ蜥 p?W"Fw4nDヤfr=Fヘ早、Ncネ撥N疉w杷%ツ明yタ譏棣q・4>駄俯g埴3-鍮$b4メ6Gq瀦ムx4sシツヲ 転タ剔k6メヘ]_セ使lu鴟俺'-wUU;9ハZ5オx゙ミ'b侮PY3ウ モqュ侍&シクZク・[採懐8コニ)|<Y?gケ岷&z矮・ワ+トハ9ザy=,キラ)=キ@トイEャ2」ラ桜ゥhF。i:wMキ恢「qjセ苓QユSFツ2Z4ン1.ニy侯メ8ミAュ)JT>Y8>邦"RN託2ムw5ユ3ワ9(ネY/W贖ョ]黹'攴5o&a襖t&RaCホテルNZ*Gラミピ4|gjミlFァCネ!ァ9ァi霹V 孅堋39宕 メ榲1癈陪、lQ|LC i、睹xチ5曜8輕+^瀑 ∩S炊ク@CホUL。:_テォツシツ?。ッサロアトk,+4\&V炒 1z甌゙メoqナロB゙チ/eYー4m$釆3{ 4シK+・釜h讎ムDCk{Kリーkミツー'2A^m╂LヌUユ(jェi8Cワ 5tqヲ\ハュa」フJ班n2A諚ーGO#Q9マモムュq=モ2y=N]^ムGBテ鬚pウ蘢]KYァ _ミ靠偉ヒb ケア」iハッJyhソヲ45KSeェ\SウユW・0"チT「7クユ オ ンェ*:ef醤H無:頚=勁-1」QX2>_-ミ緩I弧ヨェmV6綉ヲ)-"ナ炭5=ツ*4cュヤkXG`%艘@_&kh悔メ*ウ~イ9.0ヘy。)」%ッ・4ホ ハeb_アン`u=.19ソウy%モュ-剔酘;セ メ谷ノ∽vノs蕘 メRリ;sオa 悸ァ粫ム\ユュfア洽]6モ4モ&輻kョtt麼lj9|Yカ:ニ駘#%メ釆{。rラスG Lhゥp2?オ貘%3濫]s<ナ7レ#「9nイヌヘ6t{<テルフヨu{ワb渋アGテルr9ナ%=カ」n詩。ンP。 ) ゥク鵲 ユ沼 クFユ滋t梓 g.ェZヘ}(テ。球ワホqV%。オトヨa9)ノョh[メ恵レカe。}亰i7ハF ァ6脚f塾Xチュ#眞Qヘマュcクツ仕+ャアホ「ク泙s&棋ヘナ蟠コセ$,ア「QJミ6k(泌Z~ テウ・!r贅!ヤw8r荷1縮ウ8h}+Wnォマ竦&Wタオ ;拙。 w」ェノ ク9lン85 zW$ワw^ツmK,h* J]H ハ(ハeィ8<#Fy%ヲテMテ"oq-歳bIE5I5シ逅ケoオT4Q(ノ「ョノcDタ冥 &]j7yMク7究&_ 'Pq、pヤ,ウPYス%ドータ_ヌ メz1ー4's,ア「ネX>竡4驛CPjオjWョョfォxユ'タyスオ儲゙Nツャg莊ハロU_]8檠ユフLZk贊ffテ 3車1タ\ケиr5ケラ3n.O1%礒ミkHェHォネィユリヲレq!ュルョコpセJ米ナj泯u|wレ疱(Gナムヘ36ンウ廿yメ#ヤ[ンAハ'ィT+ЭNz#ヤ_ス株fコ愕/ム?蘖゚~クユュf頸O+ヨユ 1k厰;q v遜ヘセツョ##te!R>ゥmhfI^ッ推虜>z吏ヌ儔RmV膰萍c洸ムVソ溷kヌr|!」d コ倉r+モヘ萬ツ1トリコ M67ソダ W67r%テT]ユV?揮z?~ヨ1z・HK ヤ"K*>LR7冩ェAヒフニフ<{Y2ムフ;Xワap゙k嗹。m゙ア剤愍qT*マィAcaUシソ衍ヲノソ瞹津UンTp掻ルミv粕ネM宝ヨノGノケリフオyヌ惧マ:o,ト?IS艚ホL曵xュ4マ浅gヘ辷棡Kp$!ュ像ホW秬"erセ闌_YWノケ,5EナV4ェk$ワ,區-}ヘ」 {QァZ |X. O楳dQW%オン」XVШcヘン胙「封{:ハxサヨケム^カ娩ャAォヘZ=巻悛ケ臭b/h悽ケクJノヤ,Nコ 儷fアNV調~ヘdゥノ巉&髦[)ヤ佑1_ロァ8フナEjンオ籀nニ<メラlx暹M^[lAo」_|窘U┰」ェ脩 蠹ェ8タ゚ナェ\-'p3?ぅjョレA゙浜B _「"\PK >キAハ エPKma5$antlr/preprocessor/GrammarFile.class攻mSW~.l\チZQP `j[ロ JKxゥ@ィDゥリキ%ケタレヘnコルX{/L?Ug$レ:モ鮃ホW龍}nv  遁s=釛郛゙徳gウ惨溝0ッネ浬モー#該uXTdIヌIdoY1ォネ:ョcEr:n爬"ォ>メW)オ゙R:kj雷盡誌奄Tヌg\アMゲ< 芥 ハTNテヲ-リeヒ%ウ( wフサfレ6斛奇Yホ謾ア%ヘ.蛩ヒu4キ、6e¢モm/掫m[ヨ、蟠U,ル騫ァ キe&勸Gリヲg協ヌ;1ケ]2).5∴コ6MЗ9棹ス`9?-0x^イsノ寇ク閊轤衒・Jq]z9sン&G7 ルタータ云、'K椦怜イ・Cq ァ{S:メ3} =愕JkB フv^qkーレ7・?[*践7/ケ讌&ルs(捍ス]/B+3#&9hメ^盆ヲ樣HbMムノフスPヒ枅x*\^マ9叮_p鹵フwbャク//ヌサ 繻2ii Haフタi 憊-I$ 慶ツタ$ヲ ワチ.爐ロ@氏ォ來(癇/墳#今^ョllHby(qQ燿ヘ&:S%桃炳*笋_$4|e`皮_゚jホタチタ醸鴛{u シZfhマサF)pクニイワ|ガ~fゥ$截ニ胞餘kWァ^贇ェコェK罵ナ/Uタ∽ォ{.,ッz/Unツfo琳ノヲ塾yロ-3*0セッニ B5-ァR^6ュ諜rッヒb=碼乢wホ龝[w3+Wホノ6uウラIフラ咯5鰡}プイモァRT」│ヤi*S[fyムdヲ篠Yテ叮ネmソ^゙ヌDXM rツ溪Klク卿T[ルォオ5ョ|[オ扮ォチN得ホモ エ ァ曄、-;hMオ ゚A[「キ曄ロA,5カ妾(p釦iAサ x8ニ]?N8ンヨx34側ョゥヌ攻。k゚υkツA!r゙ツ9榕オUョハウQ^屈な」Tェ 」且2聰89拔t晃qィムキaメトC ソ消1v-{蹐/峻8bミa,Zc%蝌i「レヨィz飼s(Tスニ~ZqマgBヌサォ テ8ツmG種!ャ~Lムirァ^8D<Mcモxmエ)>.モ。C。鰆レcエメ$-ト_{巌タ<゙5ヒ「フ壮勣左ル }匙ラh{●ォDJロQョEレbミヤ^焚/d鑠ヌ父Fサ棧iByキ:1ョィェGヌ'#ス逮0 pjwB澎惧l幄mォbP)> 癖Fネイ黙ラY柮クヘョ゚b d(oチフPKiY0リャ PKma5"antlr/preprocessor/Hierarchy.class標kxW~ンl&塘H XJ 巛@JチJ ネ%6 ) 珞・ルIイー;ウ昊PスムjスエミjユZ憾蓑ィーiュJュwュヨヒ駮゚ofイル,K「マ>マwホホwソ无キレ6瀰゙Εユリ営メ2,vY !ワFNE^C F褻ク褪о1ケ8)サSミx0&濛 鐘t ナc>"tク ヌヌEハ'T|RナV緝Oハz&元楪モr矧OヒL櫺Y渾ミg5トyム廩:ァ窕 ウ躑|Aテf|Qタ ホ xQナKa|Iテ列_ 緻「t綢シ,ロッ蟻゚PMTw'Lgソmャ6ャ|レiマ:fヨアヘ\ホvレ8F&c8 エ>テノMモヨ 8ヲY咳sc#v:ァ`IQ羣ム>唹・ロケ他q$m珍j(6y_jーエンW)'嫡カヲャT~宸h颱Aチ掀メ胼wハ2{G3GLg@萪,#咆控ー*:"ァn|wJラフナ"$.ロbDヨーヲ9s]棉シ$96ャ磋シ憧]PハJz髦ロ、ケcョ孳Oモ覊壽ウ慯。btn?e請゙ホ覺カー=tレtoロS冤コスヒJイ俺ナウktQ彡ウテッ:1hz:T|KAqモI ス)モ1愧続ョワN フ<ムヨテ^\シ ェ狂ホョ!57}・ロ」ホ鳬ゥ+Jロ v鳧ナキuツn{ミヒョアロ/ツ稔ラヤ4モ U(aクィ」 ユqキKョ趣盍+(陂タ+:^輔aシ_ヌ~x *セッx]ナuシ▽睇:~"ヲTィ滔x:ョ祟+邂榊9ル1:4$FWy慈メ シナx・ルhrfヨp ヲSナ/u ソヨ\VミTAム>WげV♯犇イ庇)1乳處Qヨ0}!FyH ミ!ソ{8・zセ茶沍?[Z蛸Gテリァ緡ゥ倡亙1ヲ)未V.:痴モql'"オヒ職スFsュ嚏?xG/トm鞐_"hルy9&c0bルHメ萵0咏ソ潼ケ+*Uャe g2コネQカ[ィ4シスv~キ=j%KLb qD%f菜k峽{ヨム.Yjル貝箱 Z垤ラネーG+N院v馬l)抃柞旛bZッE1狼\」ユGp詑5ケニ浙杪53c/Vエ8c踈スXアsモ1fレフ老楪屹c潘珪ヨhf '・-ゥxチ1:b艘lヌワU燃c'ア<ョナーヤz醜臨6鱧"zル?5゚Uマ2,ヌJt杆"@k+$ャ「!#5-D[ナゥ語弑cケシ呶ユ釐エ6?Vfチu蕾R8_|マレ豎コフE>9ロヒ媾nャ椥吁 ヒyfケQn憮(ホソKヲyJキイ*mニ授<余2yヌフリヌMヒ奩葆ノOe0(ッ!wAyンuッソルrラサ変$W G>ツ>@ム。bア幼Pbハ,固ッ*6臼"゙*ハツr抬sタ?k9ネsMb。6 jKuアヒX,枳タ_Z+w,ィ 箱*(ィKエ[チ=┰q疵クユX:4bV`)ナJlチ*F%鼓ャチQエタチヘ8疫8CムハW=Fi|ツスネ0zユX 「>NンqラツE,i綰ノ昏"\Oリ~/ヌュTコ温|}ア1 斧K8ネ゙溏頑゙゙ZワNフA駲QNヘワXタメ゙ッ「Iチsh臟卍渚PGー禾Y>>マrテ:X-攤[q!w!セT .」WQソネMsO崕慌゚ハ菊vーvv2゙サ=向^゙;ネ隣By竓;コ Df$┷ゥ`算#TホムG}ツス>ヌ9゚fgォクゥaM- 7ソし\ Xラヘヤ-icヤ鶸QtKョhュ・ 經o猶5トJ苺椎ヒc1タZ;オLX7c嗽」a&o〔鄙H糶ヤ$竕Iエ%X? =l食ゐVム^タ-豚鷲崑ュスゥ~モ$6'H.Iゥp[Sキネeヒ8ィ昼i;h^ノZic昴偉セソc%垤テシロ]cixNmC=眇XH'"d{$ワd# a;?ニメfXケウネ%4ワ▲ールr't嵋蛋スワ*).`[17ヲ鷯ヨ_h|ワ5具Wァ^ナ暉B$'wy 。3W& リ~h|裹躅クPョhmEE1Wコャ;ニァ゙ ^(,3Pc('9ONアN玳x04(テ8テo滉 g4/J3:/ ヘ\・餅ヤウテモ纖碕オDネ敘藉窕F<1#ワ/@ソPK)゙GPKma5antlr/preprocessor/Option.classR]Oモ`=oキオ[)_ロ@チveィ佑/$~\H1イe僕メv.Md$奢Qニv眺チ并シ=y樒惴ソ∫:Qノチタ>ヌ:瘟<ェ:TTイーd=フ「&k]CCCS ルG zeア孺ロ5ロQ炳ス法* ャ8^キ、゙``・Sロ釧As8テタ:aヘ-ヤcラs」'蹊リ譫aユ抄x秬ミ糘ゥ9g」Aヌ >リ>傳マ運マ叺6ォロモリvァ.R嚼[・サ6И vフ9ヨツヨ゙Tィ疸E6'エ゙GAラy飜l 酖ネス6Pタ早%,X善ツ磨Uヨgァソ]^:4)r樞/ bS`#チd ソン偈ホ辺鮭ヘリァ=:゙@-)ネ5ル゙B卅゙"フメメ6Oi<ョヘ_/監5>C Pエn ャテk(V)ォ>Fy%ョ#C\・2サ鱗_`ソタ<廸レbャ;2_o[ユ箸 l&H漂i恢(Q囹楓翁X"・{犬ネ;#sg^詔 #拙ニN耽叟嶽Wノ箔ゥY}匙:吽[-メOィ殤蟲c臈b名>9ァXニX vDワcUーPKp)lnjPKma5%antlr/preprocessor/Preprocessor.class tTユ゙gL~ツ !$塘 `X$!棚 K@遠'8 カ[[セUュV\c]A`い竓kォオオュK]ェヨレンjmオュ゙L&テ Pホyゾサヒザ幤メ7>3= ツ2S9AタWNツソfo'┸ヲツA46メ Zl ト sネト<ヲpO癰ボNケ T'訛セGヒ#Q羈Q28a"伺d 瞬 yゥ[ニq<,携!クW$絅'批$享P雌避ネァ:畚懈 ヌ0舒`)諸1H9*郎%椴`散,mヲ孤ハ8ヒ 却窒ウ;チqャ00 ,トケ2ホ」虱0/ホg蕊sッ噤。 Yタ"'ワ暑Yタ .aP」潦k\ヲ立3ィ賭ヨa=ヵ V1h`ミネタヌ973+瀟 モッVpラ:[_マ3ラ闊トHudlS カ+リ。`ァぜRpウ!キ(xェBサQ0「`ッぉ)UーOチ nUtキ)xめRロ 棠澪 椚9 椨炸 椡 ^ィ潸 ^ャ烱ョめSソマ嵜範|.gp+\ナ瀚ラ0ク銘u2^/dシチ怨」-践Am0*ニ 荼ナ"ァ=セ`ギ0jCテ籾ヨ`U8リモUEC=]Ckzャm&ツ$帰G$ヨ;b。HO_Uィサ7\オ4」%テラ; 5 = 3 ;n6w6o6Aモc跏F幡ョl く6メゥモDCィGwキム冒{炉莪Q~iY哺「zo$F」(ツ靨Cmツ'%d皐WO-矍ヨ。rヌX組Y公Bホツ-[0vHMレナメM幺閧z&妬'盗zー;5テヘチh lFホ槲X 碁ニ壘ョhーサ;]b嵶NP呈ムH゙ラ鰻ュ鰉`エc 2ツ吁u沍71拭ッ゙ー,#ヒ:S黷イlセT゚モゥoモ;ラミ`$JG粨レ:Mセ rxスヒァ)ワラ゚ォGkYByCーEルBソカFls4rz淹|=a埃h、ソ予"9`.p%チ榻ーN儲}ーxウ楓mX6|l>+ 沱マIテ習テ遖碯メp 2eaGリr_ゥヤリh メミMマル彙l?}dー![ヨモwテ%]。ネ0 漆秡仂キ娯ィ゙鼡鱠ッ*シッェ Dナ巵ィ簣x ?・a|箝ク]ナロミリk*゙ホウ?c「ラ砠*<将xB゚ wェx゙ュ゙」竢^S~|@ニU|EPq'モソソAg楾/イ&ト殴&KホワV畩xOナ]ク[ナ=ャS>LReJ *ツ*<Uワ居 L=`?Gpgセヲ、2宵ク->N。ML(ャ液ロU|Iワ虹矧Si墻キ・L>テ|欖ア欸'I?byッ 3ヒ銖JBヺM抉Gハ!鯖3u!メ齶Bヲ #寢ュТャ站抽/ウャラ巳ノ2ッxa蛬ツ66U>」ァリ険睹T-|ィツモャ3ー髷1椛NJスムミVr<ハツス甕 ア闖痺萓怪SシウワラU9K7鎮ッ ラワ{゚R畉)孱_劇*<ヌェ勺躊琲3゙`4HG)聟*セ膽ホdォ ォs舩c尸 ?マ#ョtR.慄。鑪ZホYヤ藜ンミミヨ糯zロ囹Vユャヨ,」ク-1「クッВ哦シカ壗愈&瘁ユKォシmK嗚W袋トЫb、["トツQ「o矩=<` xノZ啖y}カ@ウキ553!。-5レWM P_輒ッWjC+ム$マ」兒'Rツ:喩ツ輪齪α.ゥァ」$ェヌ」ニ惓 モ゙ 欣クスカゥアア$mZblМI=ゥヲ匿 ゙Fッ循9 ヘ5~ッ衝ウ:yヒレ奩mォシュk廏エ漕レヲィカ@m此 -ヨメd毆mCスマK=jーsレ脆Dュ]Q縊kィokネ-z_ンミ ヒ鷓ュuVァ隹E ソOル;゚Uワ#?13α0+α1 >・L愧ノSウ6I謚Sヘ。4FノムrF/ラ弱ゥBRC.U[|SP扞*キhコYg鋲コヨゥ7jトホ(マI港誘ホq紅ワN茴ツk#n.沾ィタ惓ナnノ#C檀ィ+ク框 v!S .ェ ンd2"冪クZトヤヤシ4軼ェ,S_0立)メュ:問a粃)|;;WミェGk:フc)V;fォィシ6>續蒔d3翰>sァ>ミ;;-uH「軋ユュヲ%悗Qカ3ョ=*W9R・ニ3Uヒ R~nス,9=Sメcチ。#フ#G?ュ?K{%$ャァ、ワGu^hS テwm8%@墫2蕗9"]1%G_{泄ヌ白ラg,JゥモM箕$-;t*jn。"#m讚晁嚢$qオ{xァsn4爪ィ咐 誉悶0桍Fu2メィ/ア&ァロ@ヘク$ョッ9^Gュ<ヨ/!タOヤ喇9J>ニuカ&ラヘ・T%ヤ讌KネT杠 オツ。g ソ 剞ツsヤr ェ」サヘワ災;9sTヘゥュtテハイ50a/UT宗..タヘ・オ・Rヨ>i}浚セO[゚gャウヨ9{タ>o|笈 "チ[ I釖 <艘ハ@゙>ュ4カ;キx劬6ふ@c`ヌr。'η9SAv豬#I膨 V、rメZコリ応8リ@ハ yO (鬚迹顋P Hヤ"゚kソUソ&ソ"ダヤ慟rゥゥC候メ/#ニ^イ レルJ奠ルム)嗤対D粡'EW討kiЭHソdo伽橙 瑣< #6D殪フョ'」m<8宇__XエSャウ=蠍A5D4ロ郁CリdヨAt$L72」ーム鯆6Qネ U0a #1Yу屋寇タH5シID橘Hm冉>ャカ{lqォlァS、N。マ」トチ]^q敏 ゚7勿g8泅ヘp+l}姆ュワCマナ「j}0アユ`ヘ8賑ワ\ュ身HlOLqオヘm;圀>マPチm湘d;Sキ昨池7QL8hpタ俾;'モ姻1d駲(ンIコル)梟ゥ-6Qトhァモフルd孕ホ・ム.「p鈑ミラメアワ[畄ンWテp3ワwテ]pチ0@Yb'$サ(w=P`トノ ツ[F 蹄6ル巫ル命卵ヤ嘴mロeュ瀛クhヲ ロ甄ヒ.5ZXmク ル蟄|シユ 6L%枠オェ8フシ ハm{濆テN胱タャ<キ8x恭`6/8x9'0sl捍8nフ-/゚ s0昭テラヒタv紀x>クbル5vnァイ侠li'ロ膕E{/「ル)筏ハ(kヘ$;ヘ・RO゙B權IQ「hァ ク|fイ゚ス|ンAオ.zb。キ>zO課モャ.シgZW哥ノトOノカ萸X揀ユ2V+鐵Kサ:碾jV;ア:7鯢{ク蠖nヒ{ヒ巽絡ュg]嵐<サuV.gr(g顏 9モ,99万\j%トォコ:7ォ\b`/斟_ィ笶,lン曲ok'トa5ス$5袰_ハv弥゚゙V株衄 朔nヌ ヤ*ンケ{`融i}ス。貎`$tモオ檍&欣p」y*9メ拑6Cハ;*ンd桾C「(モノZg瘟/キR9」&=.コ[ワ貧ルリトェテ%C瓏y-|ュヘヤヨ&)O1。メュイネ捺ェC」!5セ駛'ヒ4 @ムhユv タクCH.Tq珥O8ニT充干f干J竺$b 5コ7>ァシモ?タ_ 、レ+括ッa&チ蔓 ミ腰ヘ閼襪'qリぅ4,モq4怛c滷 裏チ チN サqy寐=&q揄ヤ-SNヲ復HナV穰ー盈 黍C8F乙タ-ホ*q>/.トチ*q1ト5W@昇b*リ&ョsトオiロ8ケ 鰺。ス5ソオ)v耜ェ7゚ケ・サX>ョRヨミ゙ィLj。崑.カテq維チ#Jqフテlフツbア#Mサ゚%エテウI7;ラ |8欧qモ^鐫ヒnN怱ヒ掏ワ諡nオ,<スン挽2ヘgZモ殞MツW イハa上Lk|)エ鮒ヨN- {OJ鯀I{"N゙2H゙イ&杏ィ黝nOチ稱クG<Og)<+~o姐=|$~_?タナ燿糯泙K、ソS>Izヘ~+hコ個N`\Jフ、DeQx、ヲ尺6モ6邦叟杷ハ6Cヒ-mキノlk&ォノヲi嵬FァL贖x<゙Cヤ陀゚桀マ]ホ=ス轜毳f?チt[Aト顎ゥキBナgf慍"淕ホマュ>贍?$エツ/eマWf|mチ7Vエテキfヒ咳|'ソキ NZ」カ熹ァ-?Yウ$rF_$8+ノヤs I $P$P%0I$〆チX%詑ニTィゥュDロI`Wz荒{ BヘヤAヨat箱 ヤルJ猷掩ヒVW3u3Sw+FS34S/3FEX鮮}フヤラ概麋E&養FZ)瓦蚌DH0P 筑ο#!V菻PI?VvヌノYイ5Lカ,4\"#$2Rカ%%A廷K妙 FK7、J4MU猿惰3,)w+{ニノア, 漉 喇。l M湛溺ァオァMニ?サチ=ソ錣ァvクgユハN7,ッ陵ラ4ミルK[杳Z擢ミメー悳エ-フ+ケネ宕ヒ溜)L描モ%(秦3%%チl 譏ゥミL李ゥネLs !看ョ<僚p。ァウィワ]ZS\QZ*v裕埜ナ粕-)巧+q撚Mmで6チ笞`MーaMー&リ&リ&リHはgwf7(ゥ、シト截ク,ハケィ闃「抽豫%ユ【建!ラ+eモ)Kuレ門MI拇)ヒpfLフネホ力Iヲケ,5=?kR6X沫泝=カミ剳汨幀$エ)K履ロ[ハイ3ヲ9ウイ3ヲイエヤアイホヒ佛墻(ヲJzケ餞rイ衫Iヘヘネヨ祁ロイャ1途 'dL對;Fイ愬0エL[」sZ璃圀)ELヘヒヒ嫋;.ObyホB/ラ!e}0オ,#/]0&kl&モt」ナ4rヌ6HリヲlqoU」「g樹鼇ヌ痃~xシ>フOテ#然I魚コゥ%ゥクヤpT%Jレ啗Q]Yハ,掻リ)ァメオ、イ「リUUUQゥ9ラ`髣サ?i+ワ.ンmx 習背0ーqp;。k N*ァヤリィ俶ルネ:-ェン%・1繩ェコe`ネIウム|Z@險Kヘホw賁 V%袰エ、zniI1O・編「4nラ*アム"ZフV\ZTナ膺XYアャJvケ及凉Ieノフキ8l\褫x,礦ι5旬+]ハrmゥヤヲv5ю巷%容TnC-麝TAKlリヘ-描%8 チA IマK「/IpXぅlニ-%伽 )Wノ剤TM隴ォ#サbj花Tjゥ;ウ「RZ 」俗+h剄盲 ]ノーメLォlエZ蒙リh-ツuJケョ竓シメKひマ釛ミイ跏Iレ]@滔CィォXヨ$湲nロ ロ*コZnq漉Y汢>モョeyyレu6゙Lラロ)鼾t ウ6アUェfv<-]ボ,コ7ルエ^頂ナFキ2<+5\コGカトHN4y!IFレ`テォrマxd゚mrヌラ$ー\溢#/ヒセmス摶ア3-ヤ{ンAmxTホ}E2fョミマfンI乢ュ穃cb$p0pトリpDRz]N゙5iテxS閣キ oノヨロEX$xW$x_D| チx寅Wァスト躓 8"8黥啓\T^.「マウT3a%ョ*zォrシUケ8ヤリユョp9Kワョハ「R朏3リpU.wコ0殘・゙斛%|颱流消G98ラ/ィ,萼0ツwン、ケ宜ヒ」{「サ勢t1ォ|ヲール惓:疸;赫Aキサ档ハ\29ミァウシ跛%m虹" .+Z2Vルyミoaネ夕樔薤eャ堂ィ錙甥Ov}フxzEiuYケL^メ妖iハルJ'"ュヌニsサェワ 1ノ7S6W5Lル棲4w|h*%^モノヤゥ1G5ヘゥ&5]geF&イG-瀘:6cbヘ柤ヘ]>来>ldUホbkー>クハEソ-ヤィt-ナb髴ニ赤aテ| ー栽Q3モuヘ區>u椄VNキhロdW|sケkYゥf*M゚ ラメjンマ幇K-Vm}.:STzコ銭レUレネ5s|T襾ワ p/Cコネロ.ワヤュFQ?hヤオヌィ6鶉頚娉?ェユaツyテ'区ェ犲皇ィ`@D肌エラヒ ュレ`/~公ヨ卜of*吏唳ヨ廉ユ=ユフイ ケフ\イシ,R[V」/」襞リ較蛋コ VZヲb+ィC嫋hリ恩{ミョ ヤィC=-ーヨ。C-ツヲy:z,C攵C:{ツ.オイ{C]スCコ5 Gコ{u15嘯v壗Mpーv06aト`1: eヌ亥D)識「x|渋d紲}$3リノコg+ィO!H>:2rメD%ナQメ =キb#フe5o洪d,セK-ュ.0鹵淦,涎?_ト>s拇爿ニ聯J蜀f5キM彪ナヨロ鬘ョ0生馗 $太:s馮 檪、ソ滴、⊂&(yV"ムイC"yH" d泥ネMd k$2リ@*5゚エGmFト8L菰bネ&リePcメD$ュ;ヨ鎭ミコC6」{yシC「テv。キン 。@蛛畚FpYトr飫9![オ補鐃共衛#Yヨロユ=]ミEュテo$ェ]ヤォロ{石}マ"m'ニヤ!翅d2^恭uwCホェtS造TFハZテ3測盍NT2eュbァ;1マo?頴Nスz')?R屬p岩ハタ堙Oェb呵q 「2モイミ~イムAイモ1鵯ヌゥ}Mン!レS/ム越kmツシ、j.セサチナノホ.n贄收.゙;YIメスラfリ^ャ#サ敦ャuk]ゥz廉>ホヤエV嶇徇ア=鞅1Hン"-qN_D檄M"R?。ワ鼇カ錯4]iR モ ァXL8膕ェi$亡"VRVS2nワB」・b・*L~}ミuナiウvツWW]徒0・ムム{jアエ擽F{ヤoユ2ユS・館_F;Beイ朗 。ASシ槎祟C 彷傴怪柢「Eト'Z忤irj"DhQs1Q]ナ砕7!ケラ7礙ソShoD|シFトヌ憎dメxrオ札檄チ>県gggァ%ヨ|Z]゚ムフキKノ /イァネ 擔F黹- Zラサh]エョzラ靉 イ熟ュ#伺ウヘa゚+Bウヘc寶,*チ\*テZ喚ゥ { 、ex朷%Z縟5Nミオ|p]GV=R7R<ュァ$コ曇t7ュ{hmヲ5|ーンIモスエ刻カGh{弯'mァ)vミ龍得」g,*エ_Dメ÷ーン=稠a扮マwKZq)ウaサテvイ詒&s4n、(.E\Yチ{+ケdI广`脈ハ#庭~v;ョ{0FヨC=コマ_材ヨニチh-fカ曜キ9靂ヘClキ釐n/"歐Aニ,:"z->7゙F)ス・>ワWャソa}V)ョ。リ@タ=vムキ8ト睛ロ|I'ホ埒゙胄ク。;}゙Gラサ=f3:セcワウ<,エ゚佚巛中.H6ァSャ」モャ」3謂シラAH@ヲP8ッ5」亜9"nホ766イワ>]v夥+サ厮ル圃鐃スY6n=抵ゥQ涎イY}=麼Y騒蝪7ヌミゥ_UtOCムr&跖罠ラ=ユムツ^<レ,5エア^ ;7ホ,ョ栩ナa_ン メ t站フムmQ&コキG冶ハ俸琲チ型Xマ諸困vCe溯D$>ム"&b)\トQ、逢AbMテ)G0~ッS2戌J_テFイa#シ抛hrOw5(j w65pL滿*nヒOュ惰ヘHG1E&簀X$紀H軾W侍見"余7+ /ンッtメン+].イWケM、渠メオxケEカぺ奥bダ!「ナ槐bマaア Y"{.駈bqK0祇%b1;B)nオキトヴmg粟テWdoP.mキハ;ル゙Fェテn焚ナ功NウセwワZw冐゚aッ蠱Uャ啼ヒ%V`ークテa=ZャF-?鞅イ関ア完オソラ8Vnト聹fヤ?xノAッ流1T5チWUリKzキV# ?'ンハ"l`ng゙チaス鯛Nf.wーvィeヨB端逡ァウv/ウvウv?ウカ漂{Y{榛c/ー\ヒャ]ニャ l5ヨeヨcヨ枦ヨカ3kO1k5フレNfmWkマキフZサ葷9ァuヨv3kO3k{匏}フレ~fウv榛;タレ -ウヨンxーエk/0k/2kッ0k匏W匏?3kG亰ラX{アeヨz$|ー&ウウウ.ウウ>ウウak/5ーヲT隸zq/k=)Q{ユマ董コユbソ^]゙3愴6錯゙ロ/醢]P球(メE |Q・奪xC*ミセ1h募,彭、踉y4Rtァ造;咲辺HユヌゥケモHヘ戳*~O]f萩i|ハユ?gdJ{Tメヌハ サ2叛Pb+%∪UFムI%円RRDG%M8 1YノハxQLウ祢「Pノ.e街ィフユハ,アR-6)秧褪シ稙o(ョユO E ナ琅ト(フxM^ゥ隸SツJ庵ヌ樵Fqリ駆Wヨ「ヤ!{ェ}ヨ邁j゚-ホェロ、ラ昼チCg3.ークZ(卅R゚wイQ嘱5蓋gw{~qCgpリリGj}嫖`;x9クヒ.9゙!彡「!ハ<エU脱YJv・:(ォィォイ營(ィッrE*ラムo縛ナZ{e」クQル"nSw(閲昇Oy\x'ト6・F<ョヤ荷庇笘エ8ョ<#ホ*{Eル岷xsニ+}olj謨~{・溽袰埀休GキN^陵ェ,幤 幤y)ッキrYハォ豫シ )Gア^y聞ーKy背咄.セQ>ツI8゚ム SN蟲梧ーラT+ S5)ョmr繽>diウ_}tヘ犀弦lq<ニリ~.ーbヘ2ゥu)コYチfは:+_」サ +? N9ノヲハ),UNcカ(?シ。愬15ヌU3セV-A ヲjuRmェ7ーケo|}。蠏Fb#5爺ユ帙「ソム4zゥz怨 Pロ!Dオ」ュレa麁vA産j間>(Wツュテ5ユィケuヌ紙ワッハン丞ョハ星`eケ躱、スX=ン]ツサキハu+j +*マニ開タLuョVG&5封陪ヤt<ィ蔀sj&^Vヌ(5ot二テモv_EQ;ォ皖ユR1泡'ミ?セi\ソpjヤアFgヤケ‖F拜ヤテ鉱欄ミ?狒マeョ釦ケシト・慷V.表聹"・ゥf\/縒ヤa/覧%ユ9瓢 ンュ5tリ輕砿Z6}曼ク/^ュアN遑褝w0U鞣{hセシ}ル冠コg ルろヨネュXユ`ェF/>イヤ;9レ;オヤサ%誦(ヒカ徠フヲマ{眄B\_ニ;%`湎ゥ鋒テォ+タロレa駸黜視 ル9リォ趁椙ヤ騙U-タu6゙V鈞]u.ェaコ'ヤB-ナキjセW冷エZ押ユ+慰e、ェ+ノェョ「6鷸槇ョ翕:遙^O呟 4Aス岡ヤ嬖・コ6ィ鏖u#R「ヤュゥQ=「pィ初tuァネRw瑛オV茫ubココ[フV劉ト クN=$nRロヤト縄qzト{ッ3カ伽q杲カハg V秣V稾隍AuDォG1V}Nュ訖U・フ9G綣ハj2ィソクー"ラ忰セメメ墮,レPシ:beJ?_kFハ>Sk"@Q6゙サlシc8」~ト|慘麋ヤN=A。齬ョ~EンユQ・ォ')KィァィT=設m9cッ幃Mトg&3[ク:{sケ セj/0?Oソ$箋[+經fウ彗ウヤ#フ、「キノムヲ 5YPa ニ2Sヨ嬪リhj犠ヲvリgj粧L。ニ雌ヲョDヲnd6uァ~ヲ樔03Uク)ニ_f12サ累イ70T螽I]Ω「撻kェsョ!ヤリ/埜タヤ&S?傅」))セ螂8&ウD7&Xタ門ィチ9&.A\フチ9|ィimp入蟾[潴ナ5ハ|ナ2_qL/-P\yA3ナLq$SLd奇x)&程4 ォマ#k コ僥3掫スtイ ホ{9[薙`JrL-ラI~3・3c仭 エ7e「ッi,モ~ッゥッセ ゥ'0U'S敲Rg3ナ[璋Z際゚]キケL7城3キS俚ゥLX エッセレ3偬Lヲ=喫マfレs偬邏ヨ垠n<招豐オ鎌ホ遉sムYタtr 3蛉nルサqKnンル!NケMC&ネ5d且ワョ!モt 劭#5d試ワゥ!E:イIC跛ネ]簫遜5、、$<mL9>!メ売xS*RMYネ6藾ヲi:.GオノォL休ユG2酩PK<滷クヘ APKma5/antlr/preprocessor/PreprocessorTokenTypes.classmヤWsレ@;c゙Kワk奈(奥┳轟51倹遶! *ノ%匸鮃キコ]cl殉Iイテ$フYBリYホクチル゙lエ#ソウ 4ホルクU6Mマ稗xヲmエシミ2j)チル進クBjヲ5ソ左革b!、ァ魄a[*リSNヒイ)シエi谺_J)ラサU~;゙ャウ#.ヨヒルrp阪8。承NG ワマYソk蛉NI*4M*§ォIW9モヌネY槊;*2Н、^匁。メ0g」ラ・Za3 ェQpャFHューvGFノZgGZUEヌミYZウr點\U92 イUDg砒&匝{HStネ'a{7ヘY溶禦ツ 7クィュィヨlィY:R?ェ棹ホ[3紜フsヲ( +ヲ`4)ユLnt[マ某AVl淤 ヌSサlラstMfユツ2鹿Ov%,Q1 K(ョがォ殻\ヌHd鯛ュネgz^奈ユ葎リ&篋メーr4コ・nイ漲6謫1rFシテセ萵Fサェf,ャルZ=瀝セ8號ミn7CW]cG+h曉>g;-ヤテT倶、g#齦Tンo|MルGァク5ク黴ヘイ┝ヨ '閲濘>ク毋$g烏濂1x忸$桙ァ曳澪9h栞^$セ//テ+トォ:シAシ ooテ;トサ]窖}=8E~H~L~J ~N~I ~M~K~Oヨ意ーN怐qホ軋#\ 6"アロト%アb.ォ?vV押PK_ツチPKma5antlr/preprocessor/Rule.class攻kSg~僕Hヒナ jo 新[mEQQT*机&,ークルトMすェュヨ[スフエモi3?ヤm;モ':モ傷隸hシノツイv尽=9=sホ謠~@ミゃ lテヌBqJ意3粱ャ辟8ヌ>チEtム習~ィ".トィiB 1ヌzォ0ヌ、ミ羅06瘍飄メ週W` ネ%ヲpUk>\~ワ甦nI信5。IMェSjヤPヘ鎹ニメヘN ゙糟_&Fオニモ*--瞳フユネ2"杉ー淡モC)-ホn齷捲4%)チ域>ユフV44 -MGDハ因團レ5mtニ、ナSzZ城椹.。V3紮2ヘ珊j"。ZD)KKYノク鋒'ュィeクcン、ウハRコ$-K丈bYVキ{?%%5}コゥ d1ヘVc-セq-s O゙ハ坊7;_シァ・la=撰H5ソN^EテH MASオリヘ*56エクユz2kh"}U垰qSebQ_コX_ス ]禀1_ ラu\,餔」カ・U1ョキdH\遉オey}岡モN^h.僞ア.・ト櫑ホテM炭+ョヨEo*+簟 v皖アSA+ツ "Bシヘ#mbル.トv9bゥlフミ>ワVp1除aレ(JY爆ムテ截b゙SI:DHX$シ;;6ヲ閃發・|悉 N引ス澑Kk" 'ュ#ュ・TK%1>ワSウ5?P楯xャ潭t)xぉB.}*Al碁(ャ~ヒ n.ヤ<帑刳<楊Wイェ癧ェB@~ヒユTJ3IW巒リ-1磯\シラtFKU箏8n%S%壥シャ|フエe9ヌVh孟フ マ*゙瀝F7「=f6。掃F2[スォ/ツ夭OZZマ|褒Sサ務緑Wチ&ネチJ存キア&>- @耆/YLp^キルコンヨカ尺咾槞ロ5ァ4ッyィWpJwハケDシヤ冴ラ青杠賠#ウロf!ロg ハ_A nハ Qョcコ*ミjャGュkミHルDッ掟=5o入゚)ハニヘ莫hルKt>ラャgヨ蛬Wィ゚ 抗pV8。;(サqミ゙%geヤン 膠9載引P^タ+ソシフ;ェ且j:ヲKユyラテ>ィ、ワ&ヤ燈ユIヒ1z龕x串4-}鑿テ{曁)箟メ2燼゚ト゚ &-逼m糀ミrワ_眛゚%-'鑑,ス∞」 i"X゚';チнチェ3ッア:ク&晴S蟆カ_欠 会}ヲih7Xソ[カ"ー G:a「'クセ整 ヌxィトSj/"ロ 歯6悄゙9ワ* nxェ。妍Ehw{C゙涓Iツ4ツ%aツクYツoリ津ヨメー,ベシ4kン6..サホZ瀕kd眞錚 盛湃ヌ懽'S|夙x鋩 _痂|-ユ)P・N:司i /伜$耆0PKEmkzシ} PKma5antlr/preprocessor/Tool.class晃itTWンd貯 ッ$5タ 栂$Y-a) 。糠AァZ^fn&g゙゙シトiォヨ・ZPサ`mDQ[$ 6「ヨ「ユコ~サzN症sZスノレ=sホ]サセyスォZ0v縣*q$フ甦A|:qッレUΓチ*$ BアCRsJ テa8ヲマ(;2ハ├6vル0籾クレ8a@n\蒄p"投7トh >オサ_ ィ眞0ツテaヤ・ィ>ァhァ毘ヌBx<フ侵ホィJノ朿/ナ*| OeEJ_UッL_Wヒoィ畉ナ<ト9ワ7ユュセトs;,1・c8蚊Qユ]螯捶ヨ塑ヌNネ\ホv簽6齡cd2ウラLヒ##"]ヌF)者ユー渣pベAテ8マ疲sW~hH:ぐ;xIテwrcセ セッ|Aタrヒヨ貴メ」寨ハ9dハ、(ム鞘cu7テ 瑤サミョ瘍Q?ヨ滑U\bI襾ヲ'ク ーハ」孥ワテ夬オVイc$!ウ я 「[カォ)ョヒハク ォ ョhx W5\テO5ナンAァラUv度Y`。2メ囀Yテ12 ?テUtハug,・\掖LW靡$kXI劈[ ラs ソタ/鰄zZZ)wX7 ラヘ覇ォ,[[[5シ。ュhI・ヘチ ~・甌UqqDテ Z炯升ン&・替O垓ユoミmK7}ロ|N Gレ ゙(kア6fフ\仕ンホサル<ヘ指曦}[-カn{ヘミ[Uタ゚ゥ疥椌ク鮹Tンユ?祕[#苫ケヤモFホ1ッロシs憺ゆ」]x去ム+3d0棆ョゥ&」U厘詮FンF6+ュ、@sc3 ィrm"ークビmウP1ヒ!サ誘;u祠_A勝&~ェ\ミヌアyェ軆SJ.$モイx#,漑毆ヘtシテb揶Nユ#コ)ノチa#ラm;イcレハK失ナ戛ォ廬$椹モルメ1Fフオロlクtノ_ソィ畷エ|オ`JコjKノコ虜fケe$ォo.!ーツ窖ウラv:osテユlyヌホJヌUマ薙」 ;ヒuエホ、NユJ酒 マ71xメp,移+ヲ) 剏m、woVcァメ叶蕪}セЛマPゥ^エ7マカB(堺J$]6憚ョクン沽ト-ケ\ノ@=ソpvウャ鶴%|ク ィwニ孳A{3ho賤簣|9W@ィヌ阪>6」 蝨ォ」ムリ8ハ「ヘ($¥s訓Xハqェー ア'ミW4p囿 ホスラ陝@Eラ5Tヌh-リン\@ ェ橙ュ∠@ヒ艙ャソ5,ケ・-オ` ィ.覽@舷WJ。6ミー !ャf0k莞マーzメラ2zfy+YOiセヨ~Pb!1(ア4zXネゥ,イィナケャ}ムkXメッネX:仔鎧タョリkX!pロクX)ユu瞳栗ャ{o=Ja*5憲}*゚イキッ< ァ"(`フr]wウソクオァナ_ャ?\m因_Fテ@琲8」o 陟qォ鐇R@ z1UK?ェユo フ雫eクネヲキ=,騏シヘム\,カDjeチ巍RZq籌レHスュヤワLm。mネ「 #リ収A v vワ臥(}□シホスA(セテヨシK =bzE-Q$ヤラUI{リイ2ホ:coxu]*Xケレ^AォEze6'浤ミツ。)l階エHp吃エ\ユ46Iォ4Rセ」.6y^,苛{sトセど リ8ル蔟dラ%l四+Iュp[ルE鬲橈ロQオ9ォ ンTモvGゥ2ツイ2sサァロ=ッ蔬Q>6bムヤlテ」ヤ+!ゥ$シ8ロニp/lッiyヒKネ斬G樋8" ホ7PC撰隸 O`'絣4oサ<迢2ゥRaソ徐cwGdx|ツz&、淞#Liマp゙ウ歡y _/: _b/s+ 2x蔽k ^ラ 釈蔗o鵁6ヌ0ケcoN釀サ忝輯3ゥ~霏条cn?ユ3?ラ 」纓:~・ ソヨソユ;ヲ=Wチ:菎簪:b牘寰ソs:ノ%O2` f鵤錚メフY.Tホユt。ツミCaコp"\ tムP.]D"R衡ム俯4aミ尿3"-嘖*慨-4ムメラ碍慱塰カaエ-ァ「9ッ.レsチトァ霾9掖ムw碆ホ牙縛S]9ユ喉w=ト楴霰H<゙ 疊}cミ灌0Hdt ァ1Hb0价鉅"Y鉤"Eゥャャ個/0Q粐J;gYve螻_:ΔUV楙鰍セ笠曙嵶UンO義23Gォ%D魚反LYサLT違愿キGZ羆オ北Eテテツム\8:タ。Ht囈xセロG鈔{劬V鴪裴セトI_秣3。壊XネCi鼈aホ烟ゥ F0ノ 挫(」、3テ`, d2マ`ラ .`0加dSLe0挫 ヲ3クHY囗。瑛MフDヒ枩\),ヘ弌AHヒ*,ヘ/残ワ「9圭O。ァ8ォエィ。<・hヲgク/嵬8 =99%朿L・Jfケフ「「|ァ]1ヲWZ弑X短牢ミフ~莫]啗T倬{@・ツヲヘ受年禍ヨ.余函1ヒ牝フヘ;7ッ矧T-゚ォ _]Kィゥュォッ .オ゙ウ*レォ*_]Mィ揮ゥ揮ュ揮ォ揮ッ碍*セ*WUケ官ハU|Uョ筬r_吻x祀埜聾:!#&mvヨぎクャツワクq・ナyケォゥT:T"トN@聖$Bテ4メアム fx3ウfーカ銅・)凸5B,ヘkャz,ャリ3キィクthq1ォs[゚ OvQ.u∠コ(ロ3f&5t印テ-:ウ ) 。ケ].ッ(.オpR*ノ*06 <ハdヘ峪M}M崘マ據田?因=H,鋤俵冓椴*櫓沓マ"! w1ヨ゚Gハ犹韵A ノ「 9%35}4 g2ラU ニe6決$祥悚9tU)ネzム RメG::徒祥フH=A)H9?9耄牽C、iヲ曩2-頗1=yワクヤ瞞射、將>坎三ノLマ爬LノD(ワ$0}hレPォ0"タ佶ノd,T9鏖Aェ囘"$%Jsナ颯jXZzイル篋エ. ツ艪。絣XH逾O5ホゥb苻リ)#Bイ{瑰!x。}C3fヘ4>晩PN紊94>晩PNル@驛イm%ナ4p+喟懦棉vトeYsツ犹Jコ茆イヨ惺&ョマナ4餒lュ旙ロ:ア#vr wb{@;ヤ^Xs岫ュ!;1nqaiヨ「h霈ツ,ウ鯑hァネケ f9E椈s芫ァ((r籏bョO0'ト\籘奚..ヘ`a !Bヒ:フ ラトhlDアS [.ウ+K耒マ*儷ハVラ)Jナ|ァXk彙。X萋8チ)%Nq1- エウ]4!壌$逞+*f^ヲ曜b儡\ニ]\4ァ[ヤn:9ヒクヨT収1!*ミ{ォl。味マRモ楔ェヨLミcァX!.gホW *T サ+ナU垪レ)VQNbミサ8ナ5bオ'イ珪。ラ5ワネuNア{ ラ毀R^'ョwHハ8勠角蕾ア槐H}クB キーNワ霪XJ4ョ贇悁ホ馬ウ5アチ)nキ8ナFq+ヒヒ!サ7モ鼎)咬シJRF^N。剞=%3穩刊p&ヌレB゚ァxャUノ姜ホ$U彈恤[@Iヒ+フホ?4ャ+ウpウ親r゚。&69ナf1幄メ束-NアUlChモセCムE・ム イf<)摯W拆TXJKGa宅8,=--}「サ1鴃 fpハトv竒モ鱈,Bラト顋s F'MウoFヨフ顰「「9Yウhv Y港jヌaゥ2昶6ヲァYX ァイ賽% PフMtラ、h鵞Gヒノ、タナムィ97%椈3<ケyア」ナ・ム4O{7,kmハ\<ラcf紐ツフメ;謦Gc1ノ摸櫺RZlモff蛔 sKgナトvキsb>脅A!ャ;H0サ エョナ。曄X訴Jネeウ4ow8ナN疼;ナ.ァリ-「チKLtbOヲクャ跼b7.qキ{@舍幇悁4ス9並セ f冕ミ}ー1レ淬 r僉9 r゙ ウM籌`C愿 哈%粐ク ク緞ヲオop秉愽s'sェБ) 0Xネ`η 0ク价R0ク斑21Xホ`νャdp+\ナ瀚ォ\テ`5ホ啼=wn\ミツD>O仙ハホ*,莨.uヨ1ユ寓ゥト_;ネ`Y柮9甼櫟wォノ皴貌ォ!ナモgフ&!ラU\/-イrh昆^樹q、シ*f[Zョ就ZカSヘスhiJQ4死k[モニ」ZNェケ?h樽途悸ーヨ楸シR嗇$!イェ"王?&橦%圖ユー"霧リS笵。"-ルレ王6ゥリS:ソクミ樊!楴6.ュ]鬥35ヌc羊Qン垂ヘcRjLXPIK^07AfJ9鈿I`=Xu゚耽u Z」<%%YケlKヘホEカi」3モ2BニカY-秋ケ,翔~オキo翻&[テ+トョ5鵜漁鋳リニ勵*Y・ルウクH既ミウ0゚T ラヲン>gfUソ!mTテ&ヤケ卩O*ヘ└レイユvゥ5iイ齷7゚メ囹セaネニリR~FG嵬゚、ニニフ=l量(*「札ヨヤvC昿踉gu E スIュ托\覧:_Uンhnャヘワ笋lメ ォ09uエlァ摺K狗g\/AュЪghセァタtj更$qテ告チ*lGZヌレZセjnメa#リテ迂1碧儷R苧K3ヤ呈裔ナYYナノ・E5+1SFトNvリ」@` l ム Nbレ,19ル ゙-)#(鈎ヌミ慶8ヨ爵vワユ悉ルqw;aヌqvワモ施ルqシカ;cヌ}クキv徂ヌx'ル`;bヌ釐qイHセD チ イyレレ}pRラ ゆvdwク」゙ヒRチ。尹Sタ 蹉Un テ ァァテpクMイ=Pン-+@ ヤu仍#隠蝨娼ユハw=zh1T~,蛹タ宋ハ']ワA?ニ、煦 ォgラ ッ hHチE!「"C 紫ホ0チ$惑elツサ椶dh S(g帆H??TオマT*墸@ーq`$茸tミ"Jマ セイヘアm磁蹲8愴ヲ 舟モσ,孔産-スjヘケZT ユfQオ<ウレェオ瀚-BォミjsィZセYmャ]- $Iエ妛エvK惚tヲュ[ョシCミ紙ァヤTe$オ.オツ\帝。瓩ミ条 &ョ4.諭謁ゥ3セ 祟懊KrlId44"8滉クa1{ ユクレタ・ミ泡WXAsrクョ$ W他ョヲ牒ィトjzz-・ヨRjャaワ冤ヨ。Di-マR4込タムヤウト リWヨ彫セェ)冥フw7]ルR`Cリ達GNサ・ 艱雌シ0ー婉*`ネF聢sシミゥ*o'1BZ%スREo藪-7BマG[+ア恨L+ムンA$匡s嵎6sM苹!・ホcCヤ%>#hョl0リ 甕.靡Oリ }タ .H=d胯単ム<ソUテA$、ID韜Cー6・ヌムPθ|` K8IT、ヲエKO Hjノ@uルY-ャ妣2ヤ2ヌテシロ螽$ス巧テWFSD癆{M9ナホ3・呟gャR疇ミツz譬室#GTタH粁抃←e、ヲfe4=\SケWトu*独科Q0レ %カロ匹コYマテFnWW。フ 罕6|ュサワョp? 060甬|ャYシケuヲc}%ハ索4ヨOC<炸/C2シB巾、oミXソI尭;dL蔀B8N腋.窄{p |@レ!氾Gp?| /タァ |_ツ|頻晰-カア#内Wソ曽 甍,%ユ沾(サHOツhカン、'.5 ULヤ0Qキ$Iφロ&2MアセJミH)モwメェ+シ{拡g#ヤニモYi沚Bヲレヨケ群諄Syエ(ュ&$(fjLLtD9桙QFY」 n覿Lg忠リeDN2マ」ヤd3必ゥ)fj・ヲ圸q爆fヲR)u。Bゥ馭ェ・.2S])鋲ヲレQj冕Fゥl3Nゥ剩レh:sf|^-g尿ワ:bッ<」娚アfIRYBsシc闌孑楊 Mツ7Mw,カケA~ RlカォテXly閧b(a$,ヌニb*6Yリ\フトX窓ンナeリK\‰b5&ー昇屑0Q<鴻・sp、4ヌHナ8VZHヒq超 /銚`牟gH{0[:3・ム#=嫉メ8K:宍・1_ェト9倶8W印e7.y^,ァ y^.g疔y ^'_k5クNセ o煎磅jF|佃kヌノ7 HDーラF Lt`b5*ヒ0レメ紳eリヤュr*ス 晟ネBメ瓶( -07ーホ^c%uケ0ラA゙ラ`視iシ{ロホヌ疱mヌ|ァf`ス撃+ユヘ3オワ疱ナZマ 懆d珥?!ムi ''Gキo役,tイコ2假寶 シ]U>徇鮎チ/ヲv~15ウト濃碚ネ。bレXELォ}bZモツ 1厥涌N カl9%リrJー4鑼*ァ0亡XU9- 辺禹$]^悧B沮>KNYeーヒケ、ツvタブ&5>ネ キ緝;貔&朦_UォッセN\フ アOー-峙*躁觸q.fX恬秤 ワ銭}惱2ヒ烝鴬 qG^rv、Uモ・ワ'ヘ鶸茣::ヌキリ/*ンネヒェーママSス*ヒス1!謎cナ2クヒケ「*ワ1ェ~ケスラナ;フャモュ1ァH虎ゥワ2h緕\iュノAヒ&l惰。説Z$oヲEr#-長メイノヘミキ@kワ ムXヒ;怫;=x'ワ興`?゙ ^xタXヌ |テgx偬沼O(¥」三b8>,ヌム/|7皚セ{纛o<ム゚/榮遵─? h酸禁崎衣応閭?緩(ツ?ト褸ァX補!ト釘O M<'ツトヌ「。術頴DID玩Ecゥアh"ナ頑R宙坦DKi「h%Mm、E;i」h/m擂mツ-ン/zH8駲ムSzF耽 メ ムG「ソワT 深堰rヲ,OC茫"E.ヒ尭HR&_.Fノォトhyュ#゚)ニハ鵜1^~SLT\bイメDLQ嚥飩K俗$\%QフRsロマョ']エxp悪!Lャ禹h/ヨ[ャチb aコリ稠(キタ:q+ワ$6Uッウ{ス$クラヘ爪ョD6マフ^]ュョVVjァYo;V3Y_6=ト「ケ秤$§ ノbコ、剪* $M禰X)5OH)R営舐セ+5HュトoRI:J甞'ゥ態Yj%ナネソH]伴)ェヤ]i(P"・s&メyJ驂・沐ェ 巽(C・駄Hi2UハP6K纒;、 ハni「r殳ヽ!MU.R捌s/・|薑ゥ@M*T%ゥHuIsユヲメ<5A*QH・ユVラ マイg_ッョュ/ミuIエeMSナIミ屐ナVア2v3「)ロ 9V紡ffヨク壤ス イイヘラ@A4ヨ「3UB亢HK!BZ擘ヒォエzI+ソエホ憤待U0^コ.醗〆メj(醗メuーLZ WH濟i=ワ#m{・嫗ソエ貧#メ&Dレ存Vt滲・ヌ-ソN,$9ヌヲ[e^chu烹hサ]8稷キ+ヒオ、nツキ+ル5ルLpサヲ:ヨノケ柧ミuルトrクム羲`gー@O[ホ 9VステウBロ拉ヌπF症ノ]8ケユ`&ラロCs{hN.ヌ6適 M」4賓」ゥシ;J{ア峩网ア殳・テx呪ヲHO`ェ$ヲIマGzK、」bソtLKキ8,}*皐汢giテTZシ.}OVイ?居・_トgメネ.R#ル諏ノaR_ルYmz_葹カカキmチモ;銚mIm[メ6heスU癪:黯偈(%3セV琥ユ[naイ ネ頃アワ 粭賻On 酲ュ`ワfノm。@式ヨ,l&ぞujp/,hゥ橙クGzゴz、ン#ンイモ >ヘ艨イ=ミAカ:ネ@ル樒 モ8>5+獅uGiFY4ナKカWキヌt[ ヨア|Sチ:=コV^z。ュシヤ衆ラエ*tフ捉GiOA;h浩ム8#ozルト/Wr$T、ハlゥア2G蓋 、~Ja5ロ0=卜pヲ[カチアC"{ ヌン喧夙゚Tュboサ姪ヨカエユR挧ztンケ\ー>桿ステv'clキア蘊em躇矚ォ ェx椰ン痙ノvケ墨ラ メ灑リV袿`ヘWfbxスョヘTタ甚( 暦\L\BI貅~J-|+\D`;オフm址\$Yアハ瑶尠o+フ欷G{-d=ツ゚A絖Chサgッ}V倣「 コェ-ふg|エセオCウ竒Bテキ!4ホ4┯{ヨレッユFヤm醜KシセS/s::hk゙ビ奘イキ貎ュ9ヌ龝臺cセホハ]、セG4Sハト蕚キ暾セ懶辰pLワヨ7v~VS^h/ソθ刎ミvマZgサg|tWs奎.ュ穎メレwy:z$サロザラvハヌXy|hイ・籖ヒタm9イν 埒gユキ、Gコ#o ェ*† 9ノオ城57 >カイ憔rク=PJ&縢9YoW踵゚ョ苙ニ・gセワ]ソKマヨ-テハ瘤*\飮kヤ蛛賜宗裳艪ヌコ[ ポ;瘧マ W|N8レ゙・ユ執Bー2湎艷/とハR.カハ%0PケホS泡ェr係V@屍,Sョ簿-蚫xWケ セR6タOハV]ル'[(waエr7参チIハ^シPルウ瓶\ォタ;・別斤*螽エ^yUレィショ*o)(o++背盤ハ1:藜蚫薑eキ騎ョ*ハユP^Tィイ。ォ琢ラHmゥ6Vロィヘユvj+5Q昂マ。T舘YヲニェcTキ:AヲT胖jO}ーレKョニ#ヤ磅(u、椒ヲsヤアjヲ~キ:^/W'閾ヤ戒テQuコ字ュ、噛g韭l#L挂Dィ掲jアムB]`Dォ 歳"#Vスリゥ.5ニェ惇eニ,uケ1[]a,UWヒユ+光*uオ吸暃ァコヨ黝、゙h・nrtTキ8ィe紗鶩Gョコロ1_スヒアTン聒、゙聒ョュカユネ゚ハpロ ゚ >ウッカ}Fョー; キケヨMョu亰K蟆3士}オj%ッ_ケ拉劔`?筒E(qヲ黹p斯ホWほテPャ>Wェ渉u澡沼~ixP}゙R气肚ユ、俯嘲キ袍`)カワmゥ゙ノ;芟l^V_「>ソL}~ "ヤ7iP゙ゐ1HW゚ =ッ~3ヤa愈ノXュ~Zュソウ溏j蒙ネCX殲・棚6^WネェOー?゚フ./ワXEモ寵程M8m ッメヌn?C耽w娥P|'(> ャ絎I[凝{qa/#ナ…7 ー~g胛vヌq}?sオo「原鯒ル耿ナホiヌ郤]]ヘ%コ渺ユンLp3勾チ{ス:欖ラノqm!溥ァミモ欖ラヤソサ>ァ6キC|{ヲヲk+ォ・勸宣ユ`檄=八ソタbWXェ裲歸ヲ++イタ?jkLメ忸ュヨッラ\x」ヨoヨ壹>ュ5[業オv茄 ソヤ:聆ヤコ簪Zゥ%(m」6HUKミ2、Jmま\,Gk3蒡Zカワ[)衝r Zn5咏ヒD %リD6シJサ g-ッキcヒυ6Vユ3フォ。ョ/フ;K。ヲvマZャ施シ0ヤ6瓷Rメヌ^Jコロ?ヘメノg9c4ミfC6hクRサWiヒHY/'e]Iハz5nヤV畴m-nラヨ硼レ査Kサ hア\サh[Em;モnテエXゥン!dmg5E懦_ォ{"チャ }/址電5$N牒ン 靜鶫イ澆皋{」5L| 諱イ ホネク「跂ン{點シ[砕dョ{ヌサ蟆7P|_!∠ >nコク映&踵+カ9モ[e@ 餘ソッy0(4傾AぶWa{0(Tニ{OWニ罩HゥOJRj皀Wキヨ%ギiゥ鄰fy邱ヤ挂=Y8 3'pフ@リ4Kリ改茅j[藕""p竺ヌ弃幀9。ホワ0護ミ.イCサノhェVレスミ_+d>ヲ ュイオ`ゥ0ムrqP{セヤ桷oオァilァ=蝦Q忰ス希オ7q咢^。ス耕エ/ +|A;;オッノf}j?設モ~ァナ!(T].]MuM モu属"Sw逆ァリィ7蚫#qPo"セユ寞_(qRo!izKノ。キ閉Rgスァサ・アzw)S!Mヤ、izi゙W濶メ}4W$戊C、幺s、MzイエCO鐸テ、メ;H驀=M:ゥ叙%=ス-昴゚-ウyxNー-mYe湶チソマ*}! ツョ^起Zエ({t從W圸ヒロ 茆ャモ'W゙禮:縡函ヌ*UXzS`~!Lモウ"}dクXマ&,リャ鄰リォ酖}.シゥマ%ュ~ rlェッタV蝌ィ_,+ォ稿セ,ヌル'?銃$wdケgロ ル= !チDL3/フシm陏 旁cセヒ<莟ンヤi欣2/mーd愉M紆蝮ヨラ+uZェ 8Lナot楡監  4ヨoョzァ゚S-0W゚ 寓2ク\゚ラ;`ミ;lモスpソ~K゚マ鑿テK=pB゚゚Vw\ワG_ヒ w}~qサ゚罨ム/ソワセ5、]` ゥア塵Pユセ 薫G走"チ U/Tエ゙ロァ'Cz亭#ヤ5@(レGィY唇wN~灼O「[n7鵆iミ゚O#昨テ4ツaLラqクEカ鶲メ:B縄i8ョ?マaエ<ニッa:メ゚タs7"m韵TBEツセァハ7ヘ南_リ 8"斷3!トQ敖^voヌスyワEq;kヌx*cfP8タ#タ? タキェ)lヲPJ{6,p象UhコIラサ]ナf稻キォヤL,sサnラ 3Q炯蝌甥ヨ|?ネエンV/ス侈ィョJソ畩鶸逐:フ2ヘッソpャソpュヌィエ桍モオョ束z。nWシZ-u8D ョ~チ舉_i5キszo'ホh訥.8ヘwi|@3#リ」 褝gエл ッ濛[xB手?チ 疇7xC゙ヨ+=6?4dワP潭テ h4゚傑ァム」v7喞シム'-qムムxτ w1佯 5{」fF「p迥cc 候Eゥ1B,6FV娉E#趺r5*xFE%セ:S!オ=「1ツ荊h`d@Sct02!ヨテ拷診\ツ\ ソ_ア=マロhッ0fテ1爺ノ枕矜菱.ヘ}フ5ヲCL」6/6ニt「X\ナ鵙1(ホ$「呂K槇xィ^gナ9D1(ナR?ナDレ ナm謂$「ミ露rc8:火!「ウ「ヨI=跂a囂瞥k褂ウzqq$ムYY+/%挌夷ウD迥Z{e=z"%「reュワ惠7ッ7ッ抬j・巧扈Sミy玖シMtョョ偏ヌ火サDgUュt~ェ火GD逧Z鵈\ッYム懆ャョ偏ッ「ム9:^橘,:セcンめ'ン?/79i"Ri"cMチDニY嚠ヌBчYd"s-D6繕「佇j QM腱 ムLd」&イノB ルb!ルj!a&イヘB&Rf!&イテB佇メミDー莱エルm!&r羅42遜-、ア嘩c!MLdッ45挿メフD cFウhA」固人\ 」才a干faセq ャ0カタZ6リlxa粒*口瀲縷xルx消#ウ<ハニ+aシ[慶リユx游P?和|Cュ^毋ャ?PK<リ、<,4|PK#ma5-antlr/actions/cpp/ActionLexerTokenTypes.classmモYoレ@ルprAメHOワn#ヒbユリ朕、}ウ\dUI羨@ェォ。ェキM;凧カソafヌ3+~揶:エoミk゙ハ9AO壬ユI6_$ル0洟9、 AM閇ア 'もュu濆]SNPムイ・JyAH{=あノリ偏@ケ(ィ,暝シコQ扛がvヌs/Uャ,ィョ.ー?低ミ泡ィツA yイ丸蜻*セ@wエツセr 仏諍+假oul覦uCミ牒^EァC+遣(rz5A Nォ唏鮎櫓ヨ怏l`zタメ#mbムu」-俗杓Xhラサコ「クサク講c=/ーtl゚.Rァ]ユ~r2・ス」q*ィmG椴Kソ、39杷4掫雌マ旺=シxf&:en示Sソ鰄*0ヌIム >ァ」^=UkjQ儒PZ「"l0伶2s^fョツ5クホワW燮&:シチシ o1oテ-6エテシ 歡>_`セ_bセ _aセ _cセ1゚o2゚P3゚2w`廿|漱旋傚 毘戍SモK}~ PK」% PK#ma5&antlr/actions/csharp/ActionLexer.class} xナイpuマLマノ 5l法[ `[ $H"ィツ イ` 瓦 l \8お!(xqケ額篝瘋^7T\ョ yU3sヨ$゙セョ・コココココzヲ鈕・縵@7~ッホ;ア,姦0ャ$ー澗 背#・nメルヘ0リ-吊JmVXCuヨXG`スンホ ータF昊騾カ果*tカルヘリ 昊蛟谺2vーhv7q+)o勤シhヒn。戔(g+[ァシ @u5。xPg93ロ畫ウJ剰lァホv9;ォメルn=ェウヌ 靄8 呰ユルHbp@v=ワ軈Oー'茄S棣シ:({ミguUx゙i「"eソD瀑ッリO澡ノ^」uェ/ノP蛸7e、ヤ對z騎oSユ}u。dIアd$DイO%Lイマ%Bイテ筑)ルWヤ?ヤk゚Hュマヌセ霊庵~`?J菩gェ \)7譱.ルIYヌ$;Nケユ茶膈r.ケ"ケ*ケ申貼予JnH<p'? .oH)・「ty#ハi4y 。M(ユ飛啣ュX苣7'エ。-%oEZメイ(ユ乏(ユ曄)ノ;O ÷奸g*B+]迫H;。=(疋'‥ス名丈}%'ェォォc?L\^m,ハッョ~7〔Bッョ>セ 0+Gゥ6V l湮晝Ef山 ?LーH?猿R-oウ禦ッyゥ(ェュ.ィ<゙ェ」ャ2s秡_(至窩B9Mハロ}ャ>ーハN\齏! ヌjサHヌ}厭fB)$メク>J・H`《 、L 畋C #0愨FE 涜hYイ !pアr8淆8搶ラ:泛 "逍冨Jハヌホ-0位フ+)/*K$lpQiャ 碁OY^yiサ<スt喩ク/8 K<ルs< X62蝟1pレ#z蘰y%s2J J4オ斤ヒ KKr}X+bry,OI而|rwmz・EEウ愼ツ籐E宴 ヒアJh!XRヨ3Kチz`スCー>!X゚ャ1ィ。h襲4)&「スBム゙。h蘖エo(ハUR(WI。\%rハUR(WI。\%!Wレツ陳 ニニgホフ尢郵埜2=1ァシャーdz*Uォユ。 ィT鄭テLヤアャケナS=eケySI[#Qン2O済:"匁5V=*(フ.-+ZVFワニW>ヨ点:}ス4゚3f六D暃ye%佛兢ッー41」dワrャノ+6ォ6 索膊漠ケM|]嶐マ-(ー 窩辨K=e93ハ<モl馳ウ害篁G@CL3h-G0eQ恂椦増Eツ僣mオ笨ワ C"ナi9ケ3r屡&ナケC/!イ8={陦Yケヤ:'wlFヨp「8}DレX郭ヌハ@)櫃0cハエx\レリノi99テア悠7=; 屓沾=娘2ノァ孔B&∨C3Z]D FL「Q幹h`罌「 +2ノDIj4モ研淘ヤーフ4ウヌ慘ノ!Fツヤ筍9$、!テ3フカウB0idXv0<) '眇ヅax0シo スA?ンテ3zg$g マHマ樮;<」OxF゚継N督9M 4)慱、pN督9M 逕 神d(%椁#ァtnYセgX!ル容uフラ斎*齟oノZk゙ZクNヨ %0ツノbX'kハ9Y+ヨレノ'アs擣k ;ワ^Xk尹-E;袖、<メ8クツ頂<ウ8'ソ神D瀉'淮ァ8y←<゚ノァ|掉懍 ^濺宴テシケカ蝙@テ d モム|X Z愨ャPォ鮮ヒ;t>テノ Y6Z.s(sヒ G莽儔NVラノgYN^ト:y1/qイ,弥茹|カ点ByeュN[ォォャメqヤ&ュィ|Xi7Mリ!/ラ\'檬C慫ホeミシn[フc髄ウ渓mhh礎」キj^桝TレイF7AナN~)_@彑詒6ィbPオヒi+t~・/ツイPJl,6 鋺*~オ桃!サツユナノkィ%N~-Mv.オ:洟シ/sPハ,壺ー _N稍Q}ィA[鸞)ゾノレa轄」)5朿・|莞W:*~ン゚ネoB7ヘS[)Zケ%魯黼TOヘ。"Oノ4-イjzャ]T滂ロオ2マtSヨエメNヨ⊆曄]O4ケn゚^7;-| Rツm#W邱:m|5ωレキoWRZ7/ッィpュ浄s糘KKハqヒsイa ステーフフNヨ践櫃`綱 _規ン.宏$0陷7キdVI鵈頂2@穡ユMヘWTZ:+oョj、]賢 マメ柎「ッu16'ラノラ=ンツ)7{6 ン筥[嚔a∠チr?r1輸;ウ0モS2ス|F\jワ茫(q。M┸}慢ツzヤ~チl徐U 3Kv疾栗6チ$深jiIシEカK\Pg]ュシ'清 L'ミb=*吉HエUセ8トJM+じB竟'゚タ7:掟涛WヘN)サ m姚エンD爽。w;Yg嚥ゥ悲Rア享ャ+・コH$身*Aラ¨ヨ斷$0レホ 0后2襄"唇hoJ!ミ諭?)トOL$澁潘ャ{ヨiチノ ルLZa8ル,J(&PB搬l(#0@9∋楼潦 \F灑 \A濳\E瀚 \C` 實槐k槝g4Lウプ$tvLV~^I 蟷OヨフT0メィ9~Aニハ3ヘuャヤコKmホn,ロW ォ=u&期DユeケュサクサニラャGg8Tvレ銭椛-}コ就ワイ3フshyziムワb\6死[+」カCG骨 l董7ヌ薙)刄ニキャト^闡a^コ>w6[ルヲ#7ヒcラraロー艦簽LDPMK^1@f愈9ァ」I`/zfノ=8蹟hマ9yモ争Zn.ヨwDヒハヘ6キM(B*2mヤ:コY冑Mァッケユ(2=~RzFg*希ハgP付ウト3゚レ} メ_孥サ<0廈Mi」Z<フホ-x2p%、ヨ}鹿qBュM5マ%s-ュゥm財-'4ヒT六;3マッTkli)$コカセメ*2キ@k :ユ2嚮Zn@miサ5沸ウィウヨウヒJQ'ャハ靤瞶搦./屁o 忝%メ28C<ナ梺:т3剤P'hーハ=>ロムウホケカッレサt倥及荳トァ嵬萠3ァp:ョPK|這怜逡・風ラbHj・/'広mYホアニ*0ニモ」ロ1ァ=a、ウFア嘔[;ngヌクwエ縱クヌロqサクウwア羂vワヘクサー$;iヌノvワヒ施ロq;kヌ8ナ狐ホト┼ ユ|レハスリホサナンeィョサ@クcユ]=LRa61+ァ@6nテ% ヌメA\嫖;璧@sキィ=ミヨa謗B 劍独ラィ゚yネcーy鳶リモ1J:ケwま ニン煦耳g*握*h庵!ェ 「テ 鄲8内P嫣嫣 |呈。1Lツ彗譽`ヒ邀「R詐ミエm」Q鄭)條|蝗s3ワ&怙薀8&讎ノnhホ遒タl6「貿ヘィYl-ヘf`ウBウYF-ヘ售ウサ。exウYリャネl6メn 該酥&ュワハ.hF拱綏ォ0o/エナ駟キ レ(5 a Jュ?R-0ez エ2ャ1各祿Dハャ/ヨ#mロチ2ルhe 官ィmf" ェFアbtァL/6」+VOp゙攵ミr/t杼Oヌn頸5}4oFJUAラキェOHQB ア麩頌Dォ>=ラC」ス酔yOBマ斷k7~b/Aシ ~O ラ恬+ ナシミ _I&N)カ$ホE9^鞄ーヌ}カクZテ片AgX渓ク"e(ッー誡ャアKWb鵲Lンキツヘp7ワニF#5(z`ヒ3 Mヌ綻, G乏{ォzェメ[3ナr?ミソホh6ロch■・ンJ B:ナゥ^鐇 ヘ&U0h=DPケ^」ゥ娉糊ョNI/ム亶zu寓ミスミ・%゚Кヲ%NPVラス6チ稱p桂3」Nロ 餠0 Qァ呼オpDタ p瘰サテ^ C*lG uョGPPUp9FQ>Bレ"レ凶擴マ妊Nトゥ埼o ;QTィヲxコKヲ$5ッ. タカdトャミ 侫2ヤヒT括y甍キヒQテQ #シスIテ氷゜DPハス 梨ーLifx瓲ォVd4キハ8Pwネ*e損マェpaェゥY。}.ラヤ将=A:dR"ztdya質カ武mカ3邁苔菱M0ウ ニxア_.キ+メマzr%傀Hャ~杠スオuコsスレ"|鋺Y罹 ^~ ラ! 嶢Aヤ7qョ゚ツM秕4&チ|錏C懃汁Bm*甦x>|疚 ~oセeセcN8>タ小さ_4nソウ1+…lッ。'錏DlB=佳ユ メ蘭ハR4未ウiI'ァァ5Mh|EミL )ャ;膜N頑UロZ**!ヌ?WJ瓜wワF「;タタpヲ゚エテ!;ァO7廬Zァ>"LEOAaモ)寅昔青" 阪 邀Pネ\P~ト・x惻5イヲ<5耨X湮コォX~-K+X2ソ糯ーセ鳴_`圃l、2粗+3リ・晃豕 忍lイr+屬l`yハ]lェr?ヒWvーiハcャ@ルマヲ+oーB蘊6Kゥ:+QーRオ9+W聒\5-Pマe掠Cルユj6[ャ使ラォウル* vホ サQンフnVキー[j偃ア>モュBモン1チヲ[蕨肪ーK臆f・C教4dXk籔(評チ懣鐫ヤ弼( L貅J( ョラ孛ムkスレL録シ 沂fCリ&サァRPiヌqvLwfラヒク礫ンアhカゥキ+チメR'ュq)NモH81樗ネリネ!゙[eエ 擘s眉ヨ綵ッPィ゙G貞Po叫ュ_LM-1eTーHwシp1ュモ 沽ト4?HLヲヒ蜩Sイ-ァd[Nノカ恍m9 89Eヒ)ゆ*ァ疵コエN9)ヒj蒼R謫L]ネGェ櫛(uマTWムュ<[ンホヌィO マU゚聽Dc>Aト欧%ソXエ甦E_^ "斫シPdYb/Wルb +カybソT<ヒ"5ソJタッヨkエニ|I栽?ヌキ綺ム<゚テ壟ヘソQェz$_竇セ疇nサレソモMK'_:hタW@;セワネoa侘WC_e|-ワトラテj~{慌鋺Gエ=ヒGン4ユjg/d奄9ェ・゙*aY`キv垠ョVソkU?ン託&テテ7篳ト僧ニ^hヘキタ9>H觴!? 鮠ニG`゚ 蠑 ン5Fwセtル」サ5xtMpt挧ャ!」D」;。ケt截>'゚ε゙凝゙テ~8gpリマ縣_ +PネC)VラッC銷e[!エ扉。Kロ侶 r概z{磅 .ヌnX鯛:ナラ Y9鏖ツロvルチ鰌 >Z?レ!シシカシ3~<僑カア摯セ渡7}s~イ昃゚ KW4ナ$cヲsly将ェ煕 ァ 坏Cm泪ソsキ}^フt。y4x ッ ナRサbウ」Az尺!8}コチG;隣殆ヲテg1ヤ6カウメ燮kンヨkoラcテスヌ恊Gソキkr'ュGM>5OVモン。~。モ'7Pqz 「ロクKス ュ{ヌ゚ホ#ワゥ>~7鎔y'テシ締畧t%Sy5ァ(|イ「i癌ナ竿(/Q"3葛W「%堊ァ4*M!・]iァィJ{%R鬆4R:*-逼麸骸0・ウミ.「。メUD+ゥ健.コ)テEed娃ハH1Jノ*迚 JョクW9_lSニ烏非「J$S&ハLオ2KU監Jアヲ(%哮)ユ(ウオd・Lァフゥアサ樹ソ9ヒキ゚怡シサカソ:」%qラ4MW&A|Xユ*ネヘ暗l/XY册郎9オソu`e/Iヨ}ロ晩UP襤2(佑排%tTAg*陦\ }斌`ーイF)K痞e\ャ,ハP「ャケハ*X、ワラ*7ツJxPY +k`ァイ朿ヨテハケイ」lャ! |禪[~擇?*ユgホYcl冉s8K。qX芦lキ+マuYワ艱nW嗅「呷钁M6ィnsW 7=^カト/ソ|、ルン棒v=C ツ正ツ拮ヌホFマチ98セル麋&ラリモq|ユ゚v6 ヘqU煢Fル+^&僵y5Rc-=ャオイ瞬P杼摧イs風X薬 _ゥ莵)oオハヌ|途衣・贔/ハ7藜^鋩ラ?ハ1詠\。イォsヤオヘ>uッホhXハGゥテu8泛錫惇他BヘキェYA5幢Vヌ=j.zSスH碎ナJ:Y嘘ァ(ヘユゥ鷁ユ」S Wァuh.Q鷆8KDOZムW]*メヤebクコ\\ァョ7ォォトアIスQワ・゙*Uラ笠qPン$セSム悴W飢モZィ[オヤ・。>$ィ;dSaル\}VカR_反ユr穿ェ「セ&Gォッヒ\ゥ鉢ユ7褊鷦nmケE}_nU?飽ォ_ハgユo艸yXA~ォ(Qェ嫗ィ5:ゥソ]ヤ?倦1c啝m\"タ/亞Z(ニ頌サハ入ヌli4リU ヌ?ョナ=リチ鰌 >Zナ硅レp_^}ツ悦ィ/砕 オ{ヨッマ]・キV kモf哢ヲカャ["詢柢Dh]ネオキリJXクa熱ケ:クャウ゚z>+8ンS襯hf[!。}イ/ECュpア#「Iトsァ靆基シ霖{京|ャHEo>K絳ト セE 觴E:゚.ヌト0ホ?1イニRサミキヤTタKIシヤ :n゙ ヨヘsZn+,シF(ォ#Lナg)\VKソgュRミア^VNq@Q|x/ヒキッO゚&ソン>hモ スj゙fメIレx'嬖頻G`梱ウヨソスキnBcロ畭妄鈩ー、 }c i゙ャケポ壁AvRル柆_枢オc^z吉゚{ェ+ョ゚ムク~ウp氏(妥ョ聒*1槹$&アユ磽カNウ;E ロ,ハリ1mリ?ト|愃反"ー2々ネ~Wー゚ナト"n悪x嚢フト5(クフ毆ョヤIツ悦ィ/3ツ={討ワ5ミ>サヒ泛贐1?ル!8}コチGねjヒ Gホ3ツ=kラi!N|b'-{キ^ヲ- 黔m竸:E~フ漣^オゥメロhlR_cソm゚蔟L>5ソ'ッァョサァT魃蹙贔蜴1メッタaアセッツo穹ヨJシチz狭漣Z゚+mト・」QYム@*嚏゚Dsヘ)Zi.ム[k,jmE墮QLヤワ&ュォXッuwj可ォuロエdヨW<ァ ッhゥ5mー8 kテト'レqmャ湊ソ、斎゚ミイ7オゥ;Z」6SU+吊オRルO-j諒畛ゥヘ謄オヒd」・,ユョ7kK舂レオrュカBnヤVハサオUr久倒AサYセェン"゚メn董kk臑mュッカ^ラn7\レ戳ェVa婆w エ{桁レ}ニ"mサアNロaTh僻テレ.%mッゥ、广肇ゥOG+ョd}"キハテ瀋}мシ晢Ftfンgカ済9np#ャS'Lュ┰*ヨニラコIオYs・{ヘクpフT?サ楞e゙yヨ^hス輕ッテフヤ゙2 ,ユ゙エwタォス;オ`蒸!シュ} hjHq簔ソ[Y+Ηリンpgア擱k,!enツー8,d疆<怛t;チPテ0スV・QロS録 ロュGZ エNqモ9タM11ロカz2スユ_エzフ鱆[)^゚アセア圃コkg=| 列S報!R謫Rヌ爆錵陜x%xFッユA刃擘&ヨ}辻咸xe漓ミVイァ<[Oメム鰰j馼〃Z'!J夊愈ウミWメ軋虎_|}?コ5A~アソA岷ユ゚w}|ソBBiゥVレ鸚+暄oキJ?2キ)WJョ,投rスヤ俵2Bル"*ロ・KyHニ(粉咄ク袈椁-eKルJルZuハ8uィlッ^,;ゥ2Aン サィハョ6ルM}D&ェUイァ們」ユ隋廊  Eフ 腥%ウDS-Zハ1「,膈ム_毎イTdノル「H哩Kdヘ屹檬ーNメワ珎ェEネIコス$[ Ьル 鉤-:ミシ ミqコ]勗Zルッ寞jゥPdV蕨U錻HKウ|<授メエ稜c;納ソュt=視。) {Dネミ@^QJ)Aケ.潰ツケ蚌ク\^K腴リ WチスFxH゙サ舂暴 ノ;瘉ワフ y/k"キー猛ヒR膩6Hヨ|-マel鬧犹l懋止ネ -皿先ヨ ナ人"ヲqVニル6OM賈骼7`ッソugoNラ スxФY踟j^HラwZ_カ斷セTチ意ソ竒(。アャばr7菠G瓧フ飽テ \#氤奉yX#_MexTセ祥ラ瓲/xMo蠢瓱yーク >`NdAークcPワ;穽モ養セ_寅シ勾・卓璃ロgロ,ヘ }ォ广ヘOb`~:斡 伐コフ?5。y>=ミ電。ホBq>BMлコs;wテ(3rKセ3ホ8テp?チ&ネマ`イ 臑X'ソ;藐ククセニE | ソマ蔔,Na 7(ヒネ゚ル`"1小k蒋W[ル醉ツ獻n頬ヲヲ倶犂マ囂レ准楷シヨサツ7lキ筐v啓3[q/;mヌ}B /a蛎 ミキ.タ e韵苗リ钁葺NヘunW劔X舸副謁nラT3Qv永ヨozミ&W.シlャ5(/ヒoレzVチM~;サヤogfN`g俟ヨ_9チ_ケホ醴ク}|覺ュィj。eレ箋M匙O犒$サ「~|eヨ゙マゥスツ9-ソ.ユ "   1ーヘ傑違*」1ャュム銃)en' ス=c&ィィ>c&cフ ハDマp&hiワ荊JtレtZム1NBe547ヨ 編:ゥォ保 ラ:ヌdスニエ ヌ&a娟-sメ~z)=3F弃跏マフE冩(テ 口迅ル h?hoGレ鞅"潜、=ァホソPットB:蛄メyノ、ハIA:WヤI逑zI 、s饅Y遞>ォヌィセニQ}サユノヘ粫rメケェN:_嗾s:?#拈石ユuメヲ^コ;メ,ョ鳶w「S -tョゥ鳶凧ャP閣メYR'Eヌ@:川Zカヤ「テ>6!峇晒x~゙ ソ〕゚@6!信 dイルタ@vP リェタV%カ*!ーU ュJlUオJZTZT9 !;&caメ紆メ9募E・ヤム。z沸¢傷 亨ヘ3Hゥ2濫フu慕賺J甫ァirLコネッcェZ]i!螳ナe蝠i裹B;zメウ]eゥ Twレラb0Gヨモ!j祗ヘ//・(ヨVセ「イメナK*)ォ+ソトネレツ濫ハ溽*7b[zェ6イ十RXh%D{2\ヒ]9 ヒ],ネJ卵E/u-ァメ.ニzHby{PAイ(IIヘMマハツ&ハェ%9ケ敵&崇莫ワル鮖iィHInレE陸fM趨+J鈕fァg拶PJRヌヲd ?7;- A*I%靫段ノ粕ル)99馗ィ○囎IE'ァ詛e喬 罵 5fァe、儷随骨F脂A谷メMマ BrD紆1CEチル5:#+ナィ1'cカキ!%8ケ$-'UiTt」「G脂>.(コoト'A ~`?(逢吽,=}#G$G$G 手1 8b`pト煦燒&キ4!ク・ チ-MniBpKナzト[+#エ゙亡カ、シタ5コH,s1觚o15Iu<3ヤ懌<オc+H?;2肥(」bGリY"エ#G晄q`NeQbZrV之/竣快&コィ4:゚ィ:)レホア ーウ〕雀 $ アウ。l鞍弟lng#ィlNュ6gネ._奄&ヲメja&"エmd・リQEヘホRリH;KEZ8Z]YRYT?6ソbA・XdlKウウムxア搓acヤトt6ホホニ矯 F+F'ウェフイ)E「LJq裴イrムJDイL稿ルルDムナIリ。MテK/B7ソ5ヘ\Kヒ'A:褄ス燮ィmpZz棒jサ:ユ%ロY6ヒ-マ6ヨ/」_カノbヲーゥサネホィ廊Rツaぺ1ヤホヲア騅*dV;崔f開fルルl。n#Dゥ!9,゚ホ謦拝ク(ハBm'イ4sX寞ミ<。#E1ア*ハコリ レル|カエL i」Dョ4AF疹コhャネホ2圻2-盪[dgナャヨョ」ウ22イヲレ1\罵!ネ]ハネR綏j ルwアqIムKJ楓-+.ァ}>囈}nシ鞣イイE h2E舶泉蝗4マユョ」モウsrlアタモL.F/v偏##」銚ゲ&G rWs苫・X┛F「0\・+トフ橄喟T;$:ー臣m銅ケ@D'G 犠FT%(゙u>ル Rライhslb ,J1躓LlOウ満F|アYql/ォj;Nは$G18モ b$"エoタf截\1嬖ヌ 竜 ;ォdKl)[fヌg9MS/`[ 0ハK9Vリアオ有%B8ホ士Dィュ DB;j/HA: !r(Hエ壱$H冒;輝Aコ メM扇「Z/嶋3セ.冉ク'。レ+8U.$O進Ld 3%ネlA/ネ\A 'K殖A イ@"A イH臣AJ)、 i`63゚j「1イ&奄メRモx!Cッ"Ux屑-ヲ*lSRt>ャマ脈e凾Aルウ.$ 7慢W侭1エ崘ヤヘ'ホD、ノb,ヘ/。fi= 、-2ン8ラUヲ/)。ルbYヘ4\ヲア[殞_'&ン4チ遥 翳 W辞エ7・ョ犬Jイ紀f、 ・'S/歓9"3ル焼アワU癨ャ,#ナ4Sナ勁ワUケ、シヤ夾。Afアカd1Y蝗ソネe蚌Pル遣cR鷏_NSよRb8+*カ\|サノ逝CBV9テFX\ Dョ4:9e詛d貢ォ辿ンフuIア.qV23ヘ`O&aヌ鎚エ倬ゥ鮴3轍ォR胆Yー@dI5h9,6ヤ、sフNンニムトサ"[10[ヤs3ニaqnケヒ逼s#ケ痺c#a}コン<關ijサ`3 m小ヒ8浬ルeeヤホz。ナエ16;ウ=麒lzスヘj&ハ轄ヘSメヘ腐梦BZアォトUZtヤフwi8aミホ廱J・ヒ3ナ s;ゥ_J6」l#(1ゥニロ+*頑モエ1e0リ<ヲ<ソ、$ソ<・イャ、框^/セ3}\ァ。刊zAニ3d?>ル昜3a@吉jツ、&/JリJ゚フ媼セテテ,?ワ[X~薑Z~Kヒoeュ-?ハロX~[ヒog-ソ蚰エhヒd-ソ句wオn戻ンS{` ムX簫@6+カwフ;HホAv:3J>鵑B$レメネ vaTク%傑v0R羃ァルP拯k@雰ア !ッza:ホ檸'RL<アァR粂鱆<閇タRクュ’ホhNホA.ャツs。;L6Z}-タHw窕モ(&チ[|シキ=Tユ/PjZユ 'ルフ fSx.オk16p,・ 澑姨16-Aォ璋メXフ7忌ォァXkQ,ェ枌ETl。Qャ=ナレbmAサ瀉ナTャト(6タ*酎、"旆A鞨$晞阡k(t「硅|コ接Jエ研6ПCsク賄ZNSイS トA4 處ユミカdN炊諾コ ミnhkツ+サ蕩{dクaS-({ャSーホσテ 察\梠?「1PンオサDィz ~-I闢%だO$ノQS精Z×$$`タG` 50 ~ZmャYXセ獸Cっ Q癪ノD嵐Wミ ャ、~_B%.ーコタWミ\[Gェr5!\C2[ヒ畛BルH96キ6タ ーn;`+ヒ 0q0カF&ク露=Kェ!d-Ybケラxy'@r]ネオ%ラU抒 uCェ鈍0、Ej`ア#ワミ-R庖。sヲ^ァ「サvkロミァラ1鞁種ヲ)モ(ウラHノ3ロ42Rカ)マ;ゥ50J,D=|3取 タN‐ {タwBpCクチ} テリOB}ヲタヤ!#$ホテ$、#$ヲヌ漓x彝0ナI√i(B8 [ツ:F「"5・#ョOX<ル'ゥ砌$cZ鋸騷尽 側ケ疇瘴vテ1ァテVcH cンpリタpトWムQDネケ屹彙ナメLwテMfョミ*hcヲルェH若緞`シ S痳餤 Fc8Z8「.堪 nハr8。゙VォL゚RL#{テモ臀m)エ'4 ロ34lG。<痒 /A シLノk4エッモース内mZ゙‘ ル4dタv' 鼇Pテ汝朋^/-7q_チキ-r#l 'ihツas!N 1ロOCL<~7ト5典Lメ0央庭R画ミェノィ8]0燕、ト,層「X/探7ホ名ェ&ニfyc・ウ犀Z之9I:1J箱j &ル「lG在))Rル)E5ケヲ ユ冕V拿 "7シワ埼g錻樮゙狸フZ 檳:hコ箔イLァュ7|レ{獄オRァV蛙サ「0ウアVwオZ鉉畭鎬W゚"+4>オミ褐ツ8dミ曙ツ$!阜ツアィツ$メェ"2ヒムヒノ \プク ゥ菜カnァuユ冴(カcリ ^タ&v聒擽ア1H;%哲2翳齏=ア7ル}p6ナB 甫\E;゚Ziノケ ヌ8繆|致ソ纔押`V]贄e#メ蘿dBEイ贊eLメ1ノI!ヲ、サVaエ9ヲ」ォー・S。ャ*エ;テgム鏃テl゚mッ9>セヨネK;#`タK渋貊ht「オ< ?ヤ」-_シb ス:6コ;;bрRRセ。6ァ#ヨヤX。yIv13lb2$ル巧cァケI。Q。ヌ ニ魍 H;」BO 2雰 ンメ KK臈キ|妓L攵bje)ス C昵ηエ#@Lヒ7媽Qナヲ:テ=ヘw膩チ!3ナU;テK;6ヤッ_}ュ~オユラ鷂_ォ_ρヲ_Z`ソJEソエ& ゚カルイ*リ / ?ョタ/軫lzエキヘヲッィ{昵6マ?z`ロヒEロニロFLウSォ阡^!:蜿^i珱iR「ル9ミ 'モ` m#Siサレ`エヌi3`ホР8*1テyコチ!,仝bレ猝=ャ床セタK;シ~トU;ョF嗣0噺イシ憾アkムpnツ[z|oトwp セ7?)ワ懇ア蟹ク騙;k樹ーヨク招ヌ{YWシ滸チYワマヲV床+Qカ床'q,>チマーソ1)掵/ーoE#セト~テW、(ヤ_友メX|K噤oKs]i;~ ツ絣~,ン◆K障淋モ付 ソ某トo、ラ;梵U<)キチ乕Yセ蛭穉ャ $_ハ4y-胄ユL71崗ル襠Xィsネoー0=ヨB荏-(ヨJiヌレ*YGe「 c]謬ヨMIgン,」,`アハ*ヨKルニ媒,Qルマ+マイチハq6T%+'リ0オ9。Fー:pマ喩リ豼キH"Kワ0(8ヘi・-3Mッー遁= BX4cc。3K'テX掲Y0嫺C1ヒ〉 巍リハヲヨ鵄oッ涕zスメソラュ喋斜ヨD譏aj匳D5听゙ンレn膸皸}ラフ~ョ= 6鉱6掏6 "リ\霏 ;sA?VノャRYLd蟻 サ*Yャ`埠z冷ン?ュ゙m]K鷯循z苛ー2w=D]z怫n' 專-」n/ァn_Bン^Eンセ玄}9u{ u{u价ヨCサハリリト6チ作コ:]魴eフs呟:キlYkAN簫\堤ーK斗~囗Mキ}ニ磨?ZナJ ソmケ->W鄰゙rチ鵄ナ捐tぶt 3゙R伴ハァ)%eウ,ヌヒ|ヨニTrェ・;I妙租ルI妙!|1msノYカヘ俘=ォリs~ ンウ~ニ冬門NZホ?|ョホブツrチ鵄ナ捐wコvA>ャ>kェ>泙左ヘkンヌN:ゥッ6 yヤ;ヨ_スマuGI6ル エ#ンH;メMニカメョt3エgロ!夬8カ砺ヲ]斉n」 ケ界ウ;X,サ匂fwア〕/Kb床d崢枦ウルSl{囎ーgX+[ナ山ハsY<;{鎖ダdッーwリ{攷/))"リwソS4Lムリ迥「┫゚俸(*$ヲ$K鋳*ノハIU6JコイUイ)キH!ハ綻ル+9倍、虔ゥ」ュ|*uRセ:+?H]T声ェェヤM*Pc・:サiェg7便嚀└ヲaノ柆#ィd慣澗rA.トjヌツ9歯5pケ8)司Fー嶮1幺ーアyV kシ&ョハッ0Xヒj「+hト瑳@.グi?B8慇マ;ァ8}ロ6k8Noモ# 。 ヘ8想B3ゥri0:、$l!債カRv脊アォ4{H8B嚮參TカQ堙n梯ルヘメ"カ[*fキK虱鬱チ蘭ア・螫Fコ反,]Mタ5ciスメI6ヨ劑」シッ ャwr_マホv゙羊篆MエPK 釈ロ、fカ*aG\hF颶馬D袂hィmシZテl齶シ9:ECp= チfZャ・孱峩筅ミWコI;a、エ ニKサaイTウ、ロa》寧wツi/ャ勿p・t7l誹、!顴8 =OIユpLz>ツ由MァyOYャSVゥソテ髞菩=e%v F=3菖ア毅AX{ョp艦Wz$p$裾Y駟(= 」、」0N:F=~J、鉈bエァ!忝A8Mスースネmカモ_?斛。」ウ3&ッ:?ヘ3"ZF膠ヒj舛-縡 ケ卍?ヌCムGユーホ糸スfスcaマ{゚ア0ソラlイ訳ネ窄(ヨキ Nッ/.8ス)|`W_゚.Hン軅マオ?Oマ饑#%SマwCャ唳%63フ j&{フK;「 Qfリヒ=@8寃 恬ケタ酊$hレK712lヒ7 jー-MkァcヲソFO碇((tイ妓u+^wS「xスY゙I^_O,Pdシェ&滔サキケpルz/眺ルzッ牲イ^Vメ-スン・7ノ{巍・X、>k#}タコHヌYw驅+トFJソイメol「;ヒ哲アルR-ォサG木イフノ ;,/r({S利ワB #、09Rj#キ丹ハm膣r[ノケスメLォノiヤ+詠2H「、ネC109EケAゥl鉄鋪(蛆yャr梨。<.OR゙尽ネyェ]棕Fハ3ユカ,オォ<ソタuyo&サx+ケ盛糎e|シ留爛舫||%マ賊虧VyソE゙ネ7ス y+Z゙ナ泄胄] y/Fv滉{)y湮艝h~ス廏*ラgハユBa}ア|@ソNョムキネリネcシ就イフト゚F=ーサ澆ヒマユyー>ε楮ヲクニレミT茎uユ{チ麸ハK:fWテユセg Rン誦聚ホtMタ攘ィjXサbユ%bスyhヨ`スM<`橄rエC|塰O狒荅N~ 節cvCヨ\?e蔆Yカ&-珍f蔡ル^GZ^~bノソーGイ'臉ルcT擠6ヨ;ユ棗宇SI 殍コ8斯mワ誦Y1ン6|W゙@tk/深YOスャ~。ン8AynLオSン8ヤシァシ={yoコvi勘 チeヲ泯L^侭p/vRcラQッ勞マHクヨx {by-nワ)ヨ底スカ:LラVEnBワ..&)「 4S$STシFムq澱テヘJ3ワェ4ヌJ4゙ヲtツ=J7ワォtヌ{嚢銀ト」Jセト7膿'>ォメ浴賃t%槽)Yャ2%(ル eKQウアJヒTイe岫 gヒ乃カAナnPメリSハXョ2栲。dーo R%9秤u&x:宍7ケヘ n"ム、命邁CdンeX4罨aバネロ{Jゥ>P5フテ^"2z切コin痞ュi~ィ薑[セクカ圏[エ8ヒiス,ソ7萇[|イ薜ァサラエuZメA@wWテF_wzθ6,Qヨ欽ニウ)6・3球VYルsn・ナG,ァZホ?|ョホツリMi゙クFレ[gp嵯。ゥ酖ラ{碎幹ノ8luイnェ?ルーoZヒマユyーセウZ}qチサ&コ〈>\pスャW鞍マBKワァ゙mメメ Mサトl|閂弥1^サ・ラ、ヨ75モォリ6]トMマeマ3ユ=={ン=軾NT?goIサEr綛ホ銃ンァhン-Tラエ「痍9ルーJナ&ァチ廓囀ユp捐OソイョYヒフ給ス ヌ"_モ>0?ー] フャ 防フ N健就L<エnOfーBヨ゚V<ー1ナ=レン轗,6M<6WヲSヲ)|卍モ BQハL險フ!ハ・腓コ2&+Pォ秒腴6S.ニaJ9参*0Cゥト渠・クJY⊥藩、ナハ メeォエZル&]ゥlW(キ*EハN・Tル・,SnSV*ユハ*・Fルヲ&亟G;^QセS^Sォショ弖゙P5藪ユヲシォ*ヌユハヌjO5^履|・UセVG*硼シ幵=w*?D$r>Xオlユチァェ痞ウノwィ-mj+V[}j;~ソヘ宵]ヒj~Bu屠YヒOゥ コ、&ェ圖su萎FェTyj汗Y・oUモjコ~ォ:A?NメSsユ)鉤,スV挂 U l<[「Zhャホッcツ譲゙;pZ/セ埴湲Cャ+缸=゚ 柘鴟=FlKb}:キnゥSGO髢セメFホU゙ォ轗@、l・ンgク憲ハj)┴eミL-j袈梁Bu)鉢ヒ*u\ァ^ nu5P/テx[ス>Pラユ礎タヨ麹Rl羹スQキ甜嘸ス笨l\JWッヲ>_C}゙a4(孱亮イヤm政゙ 惇0Wスィキ vチカ:杳魃「兒#Oッテ澗Z 膾|c.ソ)ホzァorテM奚閥oハYkxcテカz$jリ諚Gォ ゚;マ}屑dCメ趙>ホZワヌY屑8kqgR!ロ<、~/ヒ~}゚M杖^シ5N=ャク>?メ驤3カテ¨G/#ミロ)pニ嶺簪pIホwィ榲ネG訳洵`キZ}qチxンタ8.グ VS豪M゚ソ;ラ・6。4Cヤロi鰛 ナ=pアコ*ユーB}.%;ゥZ=Hヒ!Z}NェG0T}惇ソ礑゙ oR_タ嶐Wpソセッセォ瀏布」~'ユOg <・求5騎OH'ユ瀞ユ・゚5]ェユB袒ZュEノスエ6r「ヨV椄オ立h,鎗゙(1幽ヲ/堆|冢゚&7Xセpkツツ竇[゚D\ポ]|―}^ー:゙4k OカカヨVメヒA渡%マウ?ヲ@$ヨ BエホミLエク^オ$シA7iテq6wj」q6ヤニ=Z゙ァMトjmヤ&絳レE椁iモーV崔dmfEフ("ル4壓[輩韓ワニ|Bォ?。ン^マレ:/{U[? ムト'エニセ#o{ケルr[,>Wキケ゚ 桐(i橇レク3ケ3オ8ゥw#}モ]スw4。wュJeマ。w5ワ+=ヨSzクッt?Ok窩ュ妻テス<ッ淀奢タF3OR ゚ゥ[ァ。tオ∀ヘg>ラK> Fミ:?ヨ|Z LsAKュレi `新)レBュ-Mュ エナpゥカЬ、-gウエ菩:vウv)ロゥュbwkォル芹Zケカ^jァ]+uメョ凪エ・セレfiーV%-ヨ新hwJ擁wIWk{・kオ}メ&iッV-ンァ=,=ィ="UkG、ヌエヌ、ァオヌ・オ'、゚エ'eミ梺レ3r嘛T棗=/h/ネ;オ怜サエW艟レ?艸オWMQ]Uク樢掃JT3EメセRtk・咢ニ%%LJ/「$qサ2g*ウxs・;IスO帷e眩'オ 8/w棊」ェ瞶[瑶翠轍mェ鴈霰ア;\cf(ョ'Cア疎セ劭キ8*奧緤oS揮7ロX肢mレyョvナ,hEツn!<夾ヨニロB"oCxG佶;デ xwクu<vr'ワナ{ツシ艟-腓;> u>[TlヌGa催:c9ノ;9ヨ、ヌ2"ルセヒ)カン絶ト`゙30冒ルキ[輝+゚"dwヌ_墨ニn 圀ェイ7ハzナ ゙v;友W{ョ7~ク緒#'gBマ8 9|2フ犁`1 +x>\チ酊F>カq ^ 杖x似ツヒシ セテシシ失ウス窰ユ=コBqG頂o丸)セ胝メヲz、ムマ+=ァg}膽゚~V苹=Tt身|竈耨ユメヨaQmケwh@q_=モ冬8Pエィネ」;ァ ゚M「9/Wィ皈4ツ+h/」^M#|9蓴50岷Bセカ+6~MョkhRュオ)゚尉|ニュマキ甕~3惹ロqソ]ヨ:」昴スBユ゙:E竣dァcg2v[qヨ塊aホSヨテ慵タ|s啼?ムiニ蔔キ?ミg須棡クUアト融+%キ―99?ZU紬蚯ヲヒ5NGケXtT▼Nヌ\#P穰【跣mクqコ nフ6;衄t_b \]gッョウ陽Fソツ7 虎ヨ9ヨ峪チァマエ}|鷁ュ0殍ムハb':w5b幺s Dモレ媛=gR贖ッ&ロH痍督゚N 'ワノンー鍾 濛~?ヤhェ'x <ヒチ゚x?澎梯?g瓩~>睹テソ゚疚 應ッムホ:v穉b: 'wpラ術;籬爛;亜タ盜?ヘtニKモeVォlョユ8ゲ'kャ]iト雨|Yzョツ,車コ Bhヲ7ヶzs陲; V1z ネミ#ラFgヨO蜴イ~a7レチ "j「<エュz港ロク}:$ヒ蛔轜o}ニ/ュAム」ィホ6ミAoKSAシウI 1;bgBシィトスMBN=1cナoIyムq@|ン$}bwOフS(、蔦峨ミ検ヨ{Aス7。Lk%止BI$秘 「ワラC}。フ「$Y(,墻ァ。ミNO&恙^廰Kヨンスイ゙?ムIHユ彦%nキ曖IO0}tユモ{V#リv:a填」' ツ曄リウM日?戒櫪8sトyトタ9|)3頻トyャI儂83gn88Og砌NA8O8=視p g^8G$氤┻p\ j゚國e、 ・ーチヨ3ヌg,;襍A/ z%ィ焙ョ jリモ|セHナA:9ホ (ぼチネ9N竍k゚ア鷁APルイ・JEA・X蚕W。$ィbナ2、3T. ェJ釼ソタTT'ェuEP]溥ホミ 、ハェz; b考F*ョ j趺ヒK^'uシhEv椈ラュ`セソ#垪?サ[v<=*:XQbナア7ミΡ 2pxイォ高リOXイ仕x*ルタセ・Gレト-bD/ミ左ヤラmオワニス〒wt須ソ7wq]X&クラ-晨゙ニjオォレO夙耄=彭ぜヨxq8ヘK6噸什.ソ~ハ貎」s*h_z23S]27Uhキ立Nメセ?ハニ |R4UスゥC*B%h果ーチ\↓フ5x匯7聾&s ^e^ロフ&シナシ w仆ミ.|,|需<|"|懐2|*|酵セホ|セノ| セヘワギ;]諚}C襾c'S詒ッヲ轉PK」U?S PK#ma5&antlr/actions/python/ActionLexer.class} |Nヌ9sワ衢"アニ桍鱒 b 草オ串Bネ「IヤメェRE許「・ユ穫_オ幡EK隣シヨェ幟ッュWT几キf造~0g賣r賣3gホワ; サUナmクト*゙X*タ2nyヒE&Wリ@ヌョヘワ"タjQgkクユキ:ヨ pサlEル&ーAcシAナ;mミ7歌ヘ6ヤpウ鐵w仮サUワjシクR葆#r`・"oサPQ!ミク_ナlミエ瓩濛wィ コ筌*Tq頼城興mミィク bーフ=q 淨ス7ケD゙S6 Eァm粋マDa9 !優鼈Hス,堊"ミW唏ス.スaテ祺Q/Qヲ(xKc[テキmセォ痣:セhx\テEムG~ャ'~ェ ?モs ソ4セ+Ak ソム?「キ~'イセラ、ユ OixZテ3ィ畉 メgQ~7A駢凧C、T nタ"ァ3 $CB」 L",P.PEcェィ Dル6 )サH PGuE*Dz P_4 L"ッ.RMh*@3 ミBc-5。アV「Zk嵯ムX[嵯_SU$?蛤カツハY綻QqWョVT彑Iモレ@@ 4RセノHP?マ蟹迂卷iゥB8D[%?#sフ!D$?1#sjr赳態$?sUnDヒOJ9rpjヲh搗辨:∫' KLqf`イR~ ヌe伸ヌ&ヲOMフネHJt瀞Rゥ髦、フエtム柊>1ヲNlvD┥倭rル5)ハO6#キ=Io゙<_埣「甬"5$%-ム1#eェgA#}0%淅 ニ$ホH茖ノ9~H詞8< サ[ヌ珎x\゙3モッ翩Oラタ系1アン3zfトf フネiL ァ1●ニrネiL ァツ0i}ウ,モ$E キeホ+ハvノ#トエ6ニB"ヨ(髏{ゥ嗅ン\ーv寄$リ1v ニ:vャ lーc= ・h'フ5-咯K!caAIヨァ0<ケYFラv6  ルホウv"タH頬gi,Ue」l4Kキウ 窶e0ZM!誡クート鳫「ッ%ァ6ォヤ弘ア9Y錫|ヲY汪>ユf音ヒeウU6ヌホh8└Гテv妄易チBニ!鹵glョ鞴j;+齧,Z R.f%v6商古DSl」_・fヲ、フv」j芥ン モ.`ァ8w咄兪Nーサ恥Idj萋、dヒ阨軾錨マU柱lyシqフ-翻 奎ォ8=07ウネ餃ヲオ善!エメイ*ンョp`5オ]ゥカUIヒ8[謌Z驟トgh弗ナイ161sェ`6ケJカ鼈カbモr7ユ 鬥莞Mサ08マ呻,( 8カコラ;4派ミ譫Dv・ト饋箜ユNケノT-%嶝{:蜻Hxd1テャ粐ワ匐lLvSZ舶沺U嚢R論ナ:ッ>除8Iュ?Q]ua!ノt-ノkゥニL、%H罰ユレカN#・~$セカI;e;45窰+%ロ蕩;、ク ホネ,T=ハ!Pヌ^鞨G。繆」ヤw:Aァテ「P\T9t'粫衵Eト{粢σu|クシコ似テ裼 v ヤ?ン)Y}zクテ。'砒ミk>L\ロタ !fチuF,!ニタEシ丐M王B}べ淋$,」q゚H-V@ X m`tユエPラタク(ャケー樔v;QルK`QレL5カタク {p0gミ皷ウオ%マz、h*菩ナTY|9遠ユ)N1トィ"、」0レPhJ。ュH;、r齏$'クO3囈C-$rク]"o'1Bj%ス\AWナヲヲ[k#ミゥロFKヲMPj辜8゙臺`詫1&マリI0H「^ツ:?A テ◎ミvCリス潛Hスd缺"。'テ4 0)ヨウ$゙蹣H/占Aオ_「哦 Ai、タム4=。8N[ツ:B「"5ナ>ツメシ底OA2iトフミ≠Cdー 至x 8Bl0汎0フ !ム・エDクH9カム偵V!ヘdl4k唯ウフVJr ^#L2ナ2ュハ ナィcLG}GHエ闍2ヒa、喬サ 8B=\t/サST」zヒ膤ァhNU垳ラhレ ン甌 oA_x畩ヌij?、i因Oノヲ 9Mル4e_中M 氾キ4'稼畧オ漁1芝マq\%]ヨe}メerb/}uY2ニoテ SメmKルsN規Y砲)g)抒6勍ーマャ,]社ゥa夲ッA7ワ'モ 8リ#ャXシmLア Zヨク D E、「i襭pДQロスヒ"2昧ミオ髢ョルu柵ニクnO壘鳴#社l_kkkk゚軛ラ!ラLォc]>a}ッノツRxヤgンリzワシwキxn゙ン篆サナ{゚吟]=Gョ_^ユSキ4[R L幸+E#t& ッ fyMツe尖5圻玄蔘-1マWr襁ミfハツイtva盈ツfr珮vハ|"n堆"ウサu54マ能9莉、コメ メcソ{ヤハ#、Fヲ崇K1リZ(、-~BZメ2ッ先_Q 粘瑞S請 4キZ9瞹Uネ ゚rヒ bワ諡ィr售J紹ォナZmZd4ワ稱hルエ,テヲ_Q屹ッォ1x・6 施ク4ケ殃 v^・チ^cT゚鬩~ウYス之;/ツ沌ミ能^)mホ。汚ォュ4ェ-0ェユx竒疊。チマ゚カ吶g萬Nヘ|"m謫i3檻ロ禝レファAフよ8"pC' ツ冪ホ)86b>lチリ界ー ッ〒pツkシ^チナpo萄.ヌx6テ紛 ゚ゅq5Nツロh聳 Nシ7纃クoニサ(w+>功3X覚穽|g! Ck件イヲク孛ヌ'X<>ノ綽6ヒル|艦タl!>テnヌ釖|&繼+<"5テW・V墻繍4゚膚炮メ|_H胙4 ?糖ケ這_J +i1~-ン゚I駑<%ヌ3r桾/r2*ァ燹ry灘ニ蘆ヲネ */au膓ャョシ妹ネYィ\ハヒイォャア k&歡ヘケトZニャ5トレヨ煮b<檸ケャ3/fンヨ゚ホ糲擣'ソ糯ウxテ゚aキャ??ナホ)疝惟 Uーde ョフd#Xコイ燕Uc纒ネ岑|ツ&+_ー)復杪nァ>0マhpッモS?チs2 ?V-rAッタ2Uv゚駻ニ搦YトヲCュルLpー\霽fテSY1莖(b`巉J」N厚ykヤ弓Gン8チメレ26B&Y」コホUqコ {w サQzWナfKY0珠Zルu4イ%ミン-リrhマV@wカリm斉ヨチ(カニイロ。mуlS・ム 乞kt|GラFラ。貫Al册リワ」 FW」Ip/|mカ刎スス膚}/ { {; >4ワ0=ケQ(d;a-{ ngサ+ }к鑠軽UセCラ,゙レ45)N?7アΗ6气\d漓ョン#ニヨ?Y{レ"Jソg洳燬セヤ爬uニ ∝U蘊p ェニvENコqvvマシO+*]贏テ3 キY槓洪ッqナメ捶ヒ換。xヒqL 戰|他 サS){ナG。;UュミxヨG渝オツY+ヲ/5クi攜B`yUy;慇ぃェア]太/ソ>ォ、マ゚ェマ鉅_Jシ`脛C'ニ褂ウ0キzGェ・H?CHeエ#=A;メ^ィヌハiWz 垉チ@G L ヲ 9チチ「リ峡 {快dGX<{%ー証XvMe浤s妄セ`ナKカ}ヘ枅゚Rリォ{v拳dーモヤ'vIマI悃翌Xヤ@B$ヒソI\>/)\謬n督yーヤ惺融hゥ5.オ R[$オ#・|堺燬、周.ゥソ_フ勅Rシ4メ@ュ汎) Vi惟VヲDIノ夫モn*-3 IンM%ク鰯j7ムイ"pBムdモ諮ャ職蚪 コ:BC&ノv諮ゥ、5ュウ2X痃アpケG~ルDウォ%ソエZツ+拜ナスッDフチ撼98ソンメ5、キ他テ嚴kカテネlhBォ"う善丁4C、厩樽・l!ヘニカメ Lー5メRカQZニ磐ーmメZv浙="mdサ、ヘ i +齢aG・ルメ咆メN卯サ*ュホ憎ラkルヨサネ|WgsマヒHセ*ツ筬。賁-レBウZз&ahp児・Yコ装QヒッセfカJヘfm゚`Nサヤ)ネ}嘯=4蒋棲満Wメ~(nメA%=・C0BzニHマチ% 、a柎蓬/テMメ+ーFz梼ツ耡ソ潛-xN:G、テ淋;ュn%ァzNY醜ァャ_メ)+チsハJリヘフ77簀尼$L~猿]白ツハK斧Clセメ$√$¥。♪DK櫃O+$} テ・ohト゚Bセ]・Q、ケG、ム(疾ムw6x゙ヘv」9美ptqホ茗テ托>Nd_ヒ餓k9挿-'イッ<磁0Yト踊(:Qf=ネ[0;f=gッz樸3ォ>6 ルxm+クee帆嶷V,ッ*/ーシ6疵ミク。ェア]太/ソ椨沾モソKヲ柞(35ケTlツXト&{ フヒJgpv」sp゚ヒイ4゚オ0jTャヤjy鱚れ_潰煮惜セm<ア禽テユ裏峯xXシヲギ龝YツクョUYナ>オトEナユ0覲゚ムMォネラ/z?星w埔ン3蔡墾 BレKソ撰ウKイ0k"挌m、 ヨ^ョマ「腱lワ剥菅アQrs6^nチヲハャTnテvネmルcr;カOnマネ五ス#ヌJL.ノ=、zr慟D%泄ネ鈕セ慄 (マc4ナ>巣ス就>ュツ/ヤ セ飫 nZト鬪pw^mBM<ヤ免・ェスbラ訝ウヒ`桜ナルlテサTカC桝0ョ曙ヨンユ /ゼスwNぎ榑Dョァワェキ廳=^Nq?:ァ刷9梍ソタSッ」'ウsduケフコqホ篋ハメケニ&p崚ルr^=トイシロノテル~゙覡ルGシ9弦ィエヤF{目橋ミh)I|履.璽゚7yMX,キユ&^)U9テイ+Uム_((xm綵リラシ沚ン=\;{nカゥjャI ロLシ@幇.xンモfェ゚}獏ユカ」キナゥ跌3Lス n1゙!ナ}`ヘ註fワイノ剃ケョロBラuォ、ン┷O\樞ュ 姐:シ-ヤpw濟゙ラhワトサ稠梭∈チPワチ3|セトSu>゚讖ァh擧3L逋ャヒ「8テッbスx綿'ーa|"K蜩Y淕ヲil>淬Vs'[マsリs<抑タリラ<沱R+ォ+-tフー7ケフn#ム「=b趁ノコテg4;銘:7wキRシ。bク戲=_3z切コァィk瞽2モ75秡+鴫bqim9/キ ワaナュクwカ.Sm V<第Qモラユ侭ロソ ヨ{ロヘ}/1メ{/アz$憾JMワ娚=ヲtaア枢鋪躔2>イdナ セ飫 -キ゚膽賈ォ。?樵u ̄オ・q9B`ソWョ。(キ{ウN朞G+h゚・7ュSV-ォ*/0悴eク4.G忌_ュィ]3ミコ9ケチ簍`_G仆avナ夬< 并タr+|-4煖!彑- ミ/A|$a ソ r*Xツラ「ニラa~;p(゚)|^ナトナ.シバ捺2i/蘭}メM|?淅滓ケ^タ0_ト杖ナ]~?ホ裃{~滓ァY押?+*Eア?覗^。エW$・"+ム完ト(v・ッャ T麕ヘ捻ュ抒_s( エX%L鬼「エ゙Jg-]鬥拘bオuJ惴E鬩ンォメ\Jo%Aロ・$j/)Ψ」ハ0エ2B;ォ、hソ(鰛9%C濫L]Q&齲2Io「Lヨ;)モハt}昶ヤ7)9feカ~キR?ゥ餔+%ハ|+e運。ワh V杢(7ルb封ガハェJ.|ヲ釵テzア=ネコ2゙レ$9ッモルメト6:アAヨ'セモヒッ:オtキn鑪mヤ\貉z゙u;勦゙Vル轣ヒクゥャワAハ:ィ」lハFヲワウ;。Hル+判澤p)・$1~@ゲS諸+Iq檎タソュロk|・リヤ{{」 7ラzナ%詔ク醗HW桙1ハ舒Шハモ$イーZ9\iシcン肄ェゥ58゚x「B.~ナYBVmVoqチF?MKzJレ7「5シヲ蟻wy超2クロ囚w;モ゙wヤ_y冊Cメヲqn癘qn礦!メヘCェwカ禀ラハ閔oz顕錵[ヤチハjナ!削ーァ#、ウ題t嶺「/pIホ{9Qナa舮|モワエ>カPYUy≪綮ヒA縒タ~ッXオu.nwy鶻MラスB-慕ノtス@ヲ)ッツユハkP「シ 聞:蚓(S゙!3|ッシg1X/q柵 ョW蕪xァr(?繚ハo咄;~ゥ愽 ッ*畉Uニ_T マゥ:5ォ。メY5LMm(ゥ:、 オ衿X国#ヤxケウレG散ハ」ユ~Xオ%9ホc ヲ嬰ワラDヨセgコシセ8ョキb愈、Eリ却s-C`;゚p U{ナクユ-杼m%=ャュ、ウC.ワ&?ニB:ぶ$ィ」チ裲XシYkヤ穏^揆ユiクEヘツュ鶚ワョホツヤ<ワ。跂Nj,S却ッz S]杏ユExBス+ヤナLVッッ、WケQY@芥{ 溂ォuンoaフ'エJレ{ェxB[~gQ/<。5yρテネuVリ`゚・uユトVZ\F+sァ臂(\+、.5勾oU?セキス+iU「h{スヒ`峭0w゙ヨンンワtr!俶~Zqネz*2]゚yエエZツナCイ萩シc>$テ・R懋~Hュヘd2゙vア2'・.n)オJゥ央^=/ス啗クGw単-リィ覈ェ;uヒUt/>ールzメ陽ノチイKノホ/#;ソゥ+。コ圸7C/HTWテu 、ォキBカコョS`Wゥ[リnvォコ鋪ゥ゙テカェイユRJゥ哄ィヤJン-uTHンヤヌ・゙黌メ\メBE騷%i・zDコE}MZォR終;ユKサユ・2C駟#騫骭ゥzB3ルョ~!Vソ密ィ゚ノル鶫yォzJ~P=-?「梠蘖藩ユ_[j|\c9行ミ.ivョkチシ桟ラモレZ$ャu糂Zg>@ツァh]yカヨュメ4゙ヤa准^ニセ{R3ソrマy9シ JnYモ&摧RhRロスt諮CyVェィ身T亙V"至ヲクy鱆1ノッi読机En6ネK+ヲ@#vwメz@ュ7ヤモBャ}エ0YKJレ ネヨツオレ0Xョ ュレxP サオTリォ・テサZ|「M4レTヤオllィヘタf羞叮_婉i.'x2ラa8?GキA#セJソニqソ猾ヤd$zセE!チ7 」,{佐eyヘ吊i|ッミモコ。邉サ{p.:コハ,縵D{ロヘ/_jエ+0R'$斉賻葭Bュレk A[Nmh+`匕Vk7テ&クW[kk痃mシェmjwツqm3慴カ@v7ハレヨJ0ムsレリfMツ樟寅&瘁&チ!セq"篦,曙2=2゙ス@憑蹠セ油A縅。ムンCEナz鬮ワウ壽旌ANx;駛uR+1 臾ヘcnユホ:z;角wヤネロ措cマ゚jt}ー{z.ヌ7ッwA{メ塾、r・、r凸=D*キ典nゥワc0Gロ Wk{`鐸8,ムハ`崩$ャムツf)Rケ/シッэオァ疚|ァ_エ1J{」オ`/ELミ^ツ!レヒ8\{ヌjッ肬レxキ>}サエ脛!>ュ}┗h絏<ゥ畋3レg謬w堤ルヨ閭セェf=℃( 6ソ{ォxルlェ・ヲニqk/u コャミセ、)寒炮揆d2サァネ オ゚潘w2ケツ\%アL\枳n=?ネKx#d畜2,qンBャg OY杖楜晨1ヒ-qャ畷)aナqVワモ'qJq滓!゚コォ)Qリp¨!モコハRd$8BJ呼5諮颶"゚団$f仭エ篦画o昇0ン 毒浤蚪堵]瞼aッsマeアw.gャァr肌rオネqツス俯y3@oヤ|vュTシメ!=}Q;セRェ鈔^ヌ]・4rロフ:コ tミuxDキチ.ス<。ラrス><ュ7テzcxQ廉跼Oスシゥキw6∇>ユ#疽スG ゚1pV∀8l・ツdス7蛇,ス.ユ菟ウ>ラモpソ梹' >× ラァア}ッ;ル`=eウリ,=キメツ吮ア'K-{2メw4IpS|ゥツi-ヲ閇$忿ホ\hィ_ m"惟蟻ィ>Rk鰐" ヌ。Yソ)「=4ツ!5」萓M方i6GG-GYマ8x3~[_\ソ愈シZ闍ナ/yユ@qWュ(.%翰遺好q9Q弯ナンオ「ク(゙Lo!喚叡カb/:奇チ>jシワ5朶J。凜FP3~X[ソ 夊位桂ィ4'止*果&「筮面゙ZPルBT"*9*扶オモスミL゚Ftfz隍Zイn託Q「tpッQ簇マホ篤 ?ュ"レウjフ_(ム゙Iエwヌ因n「拏ラjOミ$:ウォ・廣辣 ミy願':sェ・bュy":yユメ9bミyt':/暼j鮠fミyt摎丑Aオt蛇J>ッ7NaオZf-エMメキ位ワjケ9fp霈CワシKtョョ茂サオ「s懆|Ht巻・~ュ、)ム9At喚偏ZH蹶槻WD・、Zn>ェ7゚7゚掣ユメトst~ :ァ夷5ユメシVk,ム芽フッ茂侶「ム晁,ィ茂ラオZ[迚NムYX-ヤ皿G64\ラ嗾ィ{盻Oチ琅禺◯ラ@RLI5全2レDNネX9c ウL腮ノ5袖イリD~2%&ウ〓0狙 臟ユ@ヨ佇orォ改n LYo"ネ9g M莨〕2 ケテ@ .AケロD⌒c"秩ワk"イ〕3n ・&「ネvッサdコ汗エ`ヨBv隶゚ 戒VHユキテD}フメyt[ヲロgaォ"ミ_%(シ・ソテキ銜 ハIャァ淬/リQ{ルロ:]ヘPKPォ鬟+~PK#ma50antlr/actions/python/ActionLexerTokenTypes.classmモYoモ@ル覩&mz%・懷*s゚PV雌Zub)゙゙,Yミ9U"ユx。ンR労シovfgw,5I詆'G・ソ-Hクぬ8厥「dカ克メ│Zo炸。;N顋權udKo・e・ュ\ソァ ヤンヨネハヒ@ョrコレYA蛞ネ J:/ィdNw<ァ博Uカ゚ T`+ソッテEAUyシ「ロ竭n`:ハ~Gサ弧3シシoL覦uUミ很スNイハ p;趁KSカ゚=ォェ!!BYニd箭>ミXチvワ*FaJセ5u{ヲコ4ュ蓉慯r;ョゥh o`vXGロ・云タeO嬶サ件'テク}05舊v0Nシ[Yヌテx8テF[ン晝箸4煤( 逖p∋マ3伶2\aョツ粁Kp酵^a^ラ俎:sレ`ヨ゚dセ_bセ _aセ _c゙け3゚o2゚P3oテw假ツ峅ナ|~タ~ト~ツ~ニ~チ~ナZニャ゚PK皮Nー,PK#ma5$antlr/actions/python/CodeLexer.classオW |TOイルキyy$$$痘タr」\Jh`( ミ6%Yリ#n6タサxョカ*XヤTュ( kT!腦Q~JE~!ン_ yZxJナB<#徃<酊ッナワ.Fレ秩yム陸}ルW\xユラRzzカヒヌ輛ウKソルラeホ7蓁Mo)リッ瀘Tソ/hT75オURスチ?\ 塢ク・7生:Zok己ノ1`cネ7#セPーヘ エンeセ,?燧 " モ<シシnソ7リョ延}チ99sッ 8ハCk ツP;\ユh0ツオ゙?sメルwO\爾 前 ュ。pdQ8 ウキEカスeFcィau4ュaレマタ-皎[vVzテAfFリrセ製"リレaQテ0E -3シk魂ノヘ旡/kojイ {ホ#幻トスカhヲHK(闔ュBf\スチW2アョンヤjエナヤ聰ヂy。,ヒ7タ=ァキオユ+hヒヌイwャ黽u`箘Hネ糅レ//'Dsシ[Jsッサシ敲Iト請=タeィテレモ 1Oec旁鑪3j形ッ ヌ1ツ^?#H\郡y0G瘴I艀XヌFjリ`スZ゙ゥネ\6"癶スgァ'リiq]+bノ泄*+ノ艦$GL゚j孕ルS}・イュJ」ュヘロフFモ<{Uオ枡 ル~!.NHs、],ーNモr1濔oリng0+!.ノUッセャ。ュ*キア升Fルイ46ヘT;袒<メ萩ハク「ク]威阿C・;UJ゚テeNZ訂儀-)^ア|:IX5poラ|mM痴ロ<ルWフカルlEカ>ヲクキ亥Sク・w鵯コ赤d}Tス4=マアホ],@Xマ4ラ艮児侘)黄<鶉タ69而N籔ゥー>]ユ萸` 3Dネ/g詞」ウJ]\YゥQィ%Q、E。餡、EF「チルyヲチVロロ`,ネFsホ茜/笹テ奬XQャD胎蚊フ3佛X櫛"/aソ<&ヨaロp1暑疸ニz倭昊廛ェロjSュmオ\Q>レ)ャカフT 6Bヤコ澄ィVヒjヒMオv[ヘヅs、Pg=yF鷭」tG」ルヤNO醐ウ'モ阜レQl{ハwlヤ+eKlタFカ'戻gヒR=M茖ハ蕀 ゚81:暸=ハ#d悃=+乾蹶ヒu8Aレg;コQT礼錚゙ル<ヌ(ケ テコQハシヌQイサ0io7ワワ獣rヲ蟷T`蔀hョPi・<ソ|、/4ヲメZq!Λ,$/CラタQ5イF3ヌモツャg-?ァ4タユdヘVョ3X;ツヤ 宍/>lチチdu5ネ周F. チh Uq4S」6モ。フ内ァ;oCョ痩Hモ謄价 ワノ。tdメ6ョテ漸フャ壓杭u愈ア9ロN残m視bカ>q殊ヒノ、2=i8鮑姐胸 ョ笊w5趙ホホuシ]\o箴テ(ク9ハ-ス>モ妄邉ヒ9テスcメキq}・リ<モ9攤ヌ谷゚旨kLン =kn8鴣;qiフフリpシ)ナ 弃=テナ撕Q盃「!f(CマHS}ゥナ6ュfィ1Pn侍カメルJケied7ヨ+キ 遇)ハ T霊ヲ∫テヘタセ+c/ィトmシコvpヘワホタ掟盻 穗\施p-eノq,マ`>槫ヘyョテ・・了B_蟐|ュ_ヤgナ靡ウ]佑筌貲dァ鏈'I捍7Cャ随ャ%ムXヘl緞-*'ラL",饉у&6フn~刀77キ1`?F'=F#!_ョ9.絽レuキ+ュ?サW獸xッS2> >ヲ~(釆米ャ憚尓、fホ環.ワ泄ヤyノ3捍黄$1コ,赭zuvRf キb8マエ7kヘヌrV册Mモ'ウ鯰;bffuN_V'ロヒT},ケ0u^ ォ孩M藉ュ>8稜テy、es'4、ニ3児賠|BハF1 ヌT 砕h ゥ>0催F峻馼l・ ク滑ア鍍ヲZJ!セ$tP%]タ#猷5ンEァミNZヨッマ盈ッリロ盞%リ{bn?タイ&{゙カトR)頬器9R'コ-g聾0ファ「年テ ZZゴ憶裝C椅  $/マp楔ミゼfクI[ェ'g」訶ヌワ ユ イア莚D=滔|=・3殖a楞 」)ツマト簗テイリチ7アナヘlq {}lオ-ホbマ藐ミ瑳コP゚U窗ォsェル)Oク脊、モYフケタ|チ\-PKV'x` PK#ma5.antlr/actions/python/CodeLexerTokenTypes.classmミンNァ3・ュVォァ}ッハ3%ユKイ礑[ 4j^>拝ヨォ5nソ=3ノノ|}|歪f翊ニ覗i@0營モ下サユVz"監。vモチワ除a顯゙p.テ`豢{cB鵐r~ハイFァ9炙珊晤ハタサXL\/ ラ躁B熊┘ニ、證セ廰}5lォ-リヒ吃陜Kナヒ Zcq/,dーmチタ。シヨチB&aB^_wホ・;覦シ{」9タ>)ナヌモ@ハ_、性Nヌ,W\k恚+ネNシ@ウ牀?ヌ哺5πアツアネ* ィ$眼ー傭肥%tj ZソeメAマ~偕:仇ケS)~PKTァm7PK#ma5*antlr/collections/impl/ASTEnumerator.class抗]Oモ`~゙オ」ロ(屁フ朔オ\ミ 1$フdフ!ノニn4Pメオ、?ヌ oシミB"ケG殼(bレ、yマ銛懽橸?>Pニモ !ddc0+ケ(o轍Laレ?fフ*ネGP!「r9ツカウmt Uン,Wk9貌エ<モア;壑゙キエMj纂アサmテユy[&5=遖mzヒケヘルKMy菱キェヲmヤコヲ疣鎬uWャ;ョQ1沢-∴mzオ c,゚賞J}サbuァカヲ_(Nロ%ム桎ォクΥ8ウル。沒T1ぽ*4ンeヌxタ|ノ'evレ イ秣+vロ総Vワ&}iiI?鵝ゥムi ォ耻<((シ 3!s。_Ea`、ュ詆,リコ$コ 鶉ミェ敬フc`j3「%T[b。跖弭[Cレヲ骨iュ384ニラ#UムH」:舗ュmォノw51」オκ齠X耜街,ーA"/Z.ウ;]ィソ-L!3クカxエ6k4ヲヌUァメ栴祐d慓」±咫l^4ヨ杏Cアセ・銹/ン_FゥgH就ツ ;%ゥ%ァrsS|MOレラqSッN.j」mアミシ&Y・to諒」F lE+%m nr瀁ワ"狎bヤクオVシタ)惴5ホ牀タリ、AウeアユD羞m--ムX<惱セ>ヤ" Oイ#h$極Tンc」ルァiU」輛゙5メェ_盒 メメケアHル兄「rカ78ー+X各ャBュヤ鄲lソ都Xz*ロB1^ヌ置」愎iナ゙ツロエH。ャ鬮チP8ヤ監ゥオワ『Nヲ dァkdケ・嗹9サゥ浬UエョJJz i呈iスLRォ5l#ヘM」+{渊。5jfd9ユG[XR>:攸底0Y*鳳$8Wキー6)貘zカスEp「 ョェ,uWウ,dマ黔wHeハ7KイリハfAェ_ニ萄 {: x%Oβt倔ツ 敗>肉ifq]卒ョ艘vハzラsSヨノゆシ4ヨ'ヌケテ=#ャ樌YンG9ィ禀焜ナJレ.w柤dxシヌaJハ ┥ッ絏翠雨ョニ菅ト=)7Dノ虔チlヒFmユ}(廊Sfゥ "w N@帋鯲リウg・ッMサニィI對ゥ; ク埼ーッム緑8リラ飢;lJ啗ノ゚:タス撩「ウgホ>タ|}h7:w腋(羮キcカc?wヌテx棺l?fz馭Nヒpオ{ホズ労ヲ{<靴ノ マAI:悋カn ゥ1I畝;P'YM[UfqZ<ャイ蒋RツユV= 恤;胎naルルI譬淞ミ・ァF^sI7彎ヨョ促ウ8ル イタ_Wfシ@t/メレ嶺o+限QV1vモ.~迩ネォl2ァ佻ァiIセホ1サマ3]リニXネg疎 ヤ(峡HOカ楙n ?3\ァヤィD/NdケァクWヒヤヒ鐓:=シ-!シマャサォT扣イヒ2゚ォBヘzRクTuェオj斷RがキセyツGヤ1ゥォQo陷kt垂2LuRケ#゚hIuz#剥l"歟4ト _WYツッT9ミhナUト#ス髱sヤ%ゥ 掩6漱"eヌ、タゥ琲賄ム;ロナホ扶ヘ+6゙徽iOXニ祭1ア吝ヲ_釈フソ):C嗣=.6ソシrキ ワn:スLモ詰E)W弥zヒKRaョ6wDO"oェ腋゚惘QmQ0レク}ュyLヨ「レンXCkル5u謇FrNチカ4W[ナスハ巒シヒ|z (-:DハW」+jリYツFjs稻Wc・コUTチ甓ォnクッ篦ワj\露/ゥ.イxヨ@ ル+テ'r0罪 1能ヒュ$Fーソ呼@窓!ツナcワD4。zセQュ」Nィホエ`Uァ>甜ヨィc。衛T:#ナオ%腓ャ%qアL・/ロイ>-カツヒ(ナnQ鮠羞ヨメ吐8ッサシBゥレg0G1イ」督*ュナdw )D,セ應スィノfニケシトexヤォ介モ ヲ'餒露マVミ \&9ワ蟇罌[y倖l q蝓カW冉跨メチpw。`/膏Qサ{測「/SイYO旋ーネ~  1Z\杖戔ъ゚:セルウワU6Is熏+・Grホ5]鳶\ァ7フロ jrニVB_nソホラqO`リ)檎S藥ョ 環.Uァ席$e肓箸SッCAO鴛,Vム芫ユ$xGZ鰭Tx4"ル渡ノkワ Dxン)H搭{A*NBェI!憇Ix3`ィロ補td-Dcンエツ%#レP&ヨa1iub」BカYG&Lイn゚ヲスWzr口{0」S」ヌ|06e/GRヤ鉄bーub^RJ/貍ョホQスヲミ9LH5ヨテァイBエウセwタ.fワキク擁キナ>フテ/タZZナテリ#瀑讐テ]<トx9 <|bUイヤカケbク!圄3ISユ$lゥLa(錚YホdΛチペ#ォ黜SJ閃 チ^*:諠海,荅1E<テ0w"(昿D<クxサネウ[ヲ莓婦ツ、コノ簍ュ,マハ嚴["恍嬉l Bu寢cDxサ芦 8ッ'ノ到メ7ム゚ァヨEヲ駈GSオ泝万アGLmマU゙3%i諚メエ*BチHヘi-巣、エt"ヒi-ウエ[峨ウ+、)ヘpKjnQ昌$オャR3sjEHK/ァE>gッィエンLhRRv1トヨ鷓vハs$ホ#GシァxΛ{-蝿キ1Sシカ.;ヒ[,」V|;Dナ GxJ|跨トコ_?稾ンソ5`qュモ慷K5ヌm槨?PKネsゥ∂ PK#ma5*antlr/collections/impl/IndexedVector.class抗[OAヲロRZカ\忌B-ケエ韮UBB ツ「o 杳弟カiキト孰ミDJ「憶&(7サ宸ー3ノ9s譛;9ソ~`/@>勧8B(トミ城(赫DO冲娜#ヤ鷆 キムs5z喪└^t車*u充U 訃A ォpw|ミョサDD;ヘトセ゚jセvレ!b~%ノ6逝&ーEイ2雲1;\゚髟w('ンスネPK;宕PK#ma5%antlr/collections/impl/IntRange.classuQロJテ@=屹Wモ況カjォオ^ -<セ(>(△`E1mキ%%ン曝] カ?タgS/`13ウウ醂ホル}xy``?%白HaMツz)ィ(I('P倉SツVユ8カ「m゙wウ"\tb'姿4レメ-ォ~ヒ桀]ホ仁:_紫m゚リm*佳kセ# ズリカ鰛「oホォヌ ゥ7;ツ琢エ%Lレ諞djネ「ヲACZCi秬ルクラ羸2(!ウ= +(0Tmクセル\謡ヌヲ3ケ踟 ケ゚vW楕.トム(4Zヨュナ。辯メ「。鬨A゙<ミO ソLミユ!GクLォCZG(f3ーFy・Q D%Eァ_,dKj8LPKzj,PK#ma5#antlr/collections/impl/LLCell.class}粧Jテ@ニソmccメリヨ"zlS>筵瀛ムυ&]dヒv#q+セ<>枇・゙彳>冩テ|~ス話?Eн61vc 「Z`ィ愈IKァスシ)岬 逵yN崩チユイャ懊タVQレユンJヘ茎4v.ャキ甞濮3椏4ォFッャ7ラUa;]8vメロj]玲ハ6E8meリBG琅゚゚;レ」g<4粃-&Ah"ニ6U%Hゥ]V9'ペ ノ+Z/OF痴トヌl或oPKルZ晢UPK#ma5*antlr/collections/impl/LLEnumeration.class}Rヒnモ@=萄1qLÅフ」b^bSア0「 v3"ョ&vキ ]燃 $リア爛gPト」ィ竚釵s=sfセタ桿ョVa。YE mラア&6,Ttョk陏リ4qテトM゛X、Y t ホU黼嘘2フ」$ホシhコッ<゚゚谷=0T泌モ歌展yナQD`テYLスdモュd,V(杢ナt$モチH鯤$ネ'ゥ*9敏 駁゙k〇,yP狆モす〒差3レJーv" 衲Hwェ0.ヲ2 エaヲロXツcォ&z6\,ロ8ォ。 ソ:y、シdキ'qテケ&6nim コァ橙ッケォ' 4Lハ」メ帶"hンDcヨツ{#Mニe艨6?サpリ()ム闖ヒ険。N<ヌンC24ァCク#能wメ #倨0Mb Uエ0Co輓\ラ牛oX2ヒソ距9リ_(F/簫シル|~ラ挨wィ}F袗1フッコ庄・3悖 ユ'葆蜑q歟0s匈ョPK\ム懊APK#ma5"antlr/collections/impl/LList.class控MOW=マ3cB !#ijニキ!M$iS ハPゥD蔵ェレチ~ナCアk蒋ZゥェサlイH、、 オサV鶲髀h{リ倭ツヘシ9ワsマスマWSトBョ・pモョKx゚bク詑M qワJ1フ$0k!%タ8クmチニ9zWーH0+ |搬| fIサE他н5W(セ.Э9ィ蠑搖淪鉗オマコ椡uーヤ2_n{゙U0メ /オツルシ靱ホヲョ>p7}n9メm騒ン`+鱶6フノアク[ゥ閠ォィ}・HQoz8恫ナヌ]*、衣ムAx/$ツマPヨCママ-]u寛xI{[・0*%ス獸脅v喜 *zョ オF・\。布斎。シXk蚫オ{窶変{オpJl 痰>ロ0印6F1「p? Gl,「_aー遮ZスPZlTルrヌニ}|ャp薺ニ7ハ%ヒネリテク混>(+Q鈩7E。v璧オミ-|Cヌウ閹ョBマI K>嫖o倹カ5ェ?ヌa障-ツ)&倪sエメjョg、古|サNシ籘A9j1 cツ|チ}カア弓 櫑ヲ.)\ツH殿 1ョq' gャh:"臧 職リf_マ!ア箴3'サチヤ"チdァ#サ共I兩ム5$聟シWム購XW嘴8lb祷a4・ォヤ"ケ2ウィ"st#オセ7y}火シモxH恰(]鷹2マ[Y pgq.ワ" 3ヘ穩/ッ$Wィァ啄W徒レク#シォ徳cツ/HN2ラ'1ナ5ケ涸dエ  倡tr踴8マポナメ曾WYsテユ1V&}Lテ^g/マ且GVモ鷲ツ/r鋪Y3F.6 T!Vイャ!KuラO0潁%ケツ靴満ィ *恷mpセ潘カ5_6'`:bt襦L蹤ヤVッeリィィ1mi6臾GサDk<\テ6ッメqイ5煉&モ叢^o左7Fs}wトゥ/Aw%'潘ョ彬コyNトヤ?r筐I.歡x_PK3ツ#PK#ma5#antlr/collections/impl/Vector.class}U[wUN.$MエP叩Jア 釘クYミ^-ュ岬Dハ} 堽dM&<與pケ麺コx癸コ、uノ}ッヤフL@sルg淇ソフマoッ慊B aョ.*HホK4O*漏0ニ%\a&Yフノテ+Z,($O\静ァ!,ハェ"錚6e+ツェ5 Aウ5∪ロル-ア)iニ」フ譁楾'レKZユ/馥ンート「@ロdム(レモヤシ=gtョlムミWj藪ンコョm防 、蠣Vゥ錻。「?4メ/蕃*Zセh?u3O0_2 ニ9:ム XXw#ホ81[<洙タ烝爆],e謐ZYキ4サhtヤ綱ヘメ鉤:,スl>ヨ゚錠法エェs#a エps8T均XI$gヨャシセP簸エ゚牾i搨7「8蛎9$」H煦@ws(bGゥコ・pB 。vノハ莽R寡ャ」)+・U炬#ツ應qナエsオJナエlス0$ッWラ「XG呪5テ0、テyメシュUtKチ(>テ(ホa4観qS ユ>cYレモE」?Yュルォgヘ啣ィヨ嵩kホカ竿」ルレテコナH` 9=部ロQワチ]I &'透」ュqソヲ%ッン」妍ve併ァU[/S_コEンfヘフqルDァk蜑0o矧ワ阜&1ク59k抽シY。ヤVJharヤム貔リロ2~/k庖ン!ロt-ヘ/ネサ*p,扶/チャ Cョ p>ホヤ ?QゥQホエSイクヲ牀ィン=亜クd ミ~酊ーwa忻yメォチァ^ツッニ~G%mサスL蒭0?X=萬脛タ1ノ#ョェソタ|ー6.れwキ3挂DЮQぇ。u」^$ケ汝暾ッ_ヤエ9ヨ<゚ヌ:柁E木?ハ>G銓マ咽カ"*vメ.:ワZd8Zメ+:モ;娯#V蓙 鯉カ{'cトz|p嚮效y彳niテ3ッ{絃Bラニ+to。'カ謹゙セ]pホクルE}#ヘn鱶Kリル掏ャSrニ]"塚X籬f綾!2! ソツ。 イ1クモDン]Nユ嫂bHルメ恭タワ62r^v6チ/ヲiOlC4ュ&\ ヌ営ッノ^」夘ィL*Uクト?[縫-S\+フエF聴rフ!h77^誦T\雁ノーウレ'キg?聒EN竇鯑w椥8 W`+ツ゚菌ネ斈-キケソCチワe{l゙}\:t侫ァマカモナU&*)ヲ凅祈ニ゚ムHH:゙FHI堙、リ"/ルゥ2?ワ&&Pa,z忰khノ:マ・Y_q<マルPKァ泙yPK#ma5.antlr/collections/impl/VectorEnumeration.class柘kモPニソロdヘZ聚ユオョレコレヘ-Mニ *セH。嫣eミイエ [F嚮4|ナ香%~7-t1ワヒwホ捏Oホ?゚リタzYワヒ メT2ィ竅4Kjウx櫓u ヒV4<Hサス8忙N貸 }汳ロvB@x\oy螂x+⊆;肯uク 膸^瀟 ]7炙}ゥ8ο0r崟ロwベ ー+OD」ム:tロw}]薪ヨ知ティ鄒$ゥ4Nラ }7rd且^ラ」「qImヌ匠ニアネ$、81萇ム*\yフモ[琺 #dLホ<0ュ3、L ハ7JフI[J[「スK"!ezP゙コDヨヘS、:ュ_ワy< 奧ェb裟lZ」 FjィpNチUKq嫉П。キ*G冕菓ニ3悔u4cvィィサャア5餒A;ヌナ4ッ鴬、Xx野羮%イY9スヒ|マ スォョムサH=_PKWsPK#ma5-antlr/collections/impl/VectorEnumerator.class柘kモPニソロdヘZ綮ZWm]賍&cE_、ひMXeミbレ-#MF嚴?>窿ツニ@チ±ソ婬*cフタ=rホケソワ弐キ6ー昿w3ネ燼4 ェク/ヘイレ<d詮]テ恰U メヌホ B∝種G^h マ」ヒ 大8G%ヒXI4ラ>z鋒ツ衞YLof,ニキウYユkム紅叔スQネクェxサQ顫ィq(イ )ホ ア?エ Wキ-2'g廚Rr+]ヤ、-@・-ム゙!t艶2#(Nロシ.爽ゥ^'7EO。~┬~が苫帶dy*彝pユ=ムX Fォr宣-Q-瘍競マョ笳噸慯」wWタハ剴/゚宅エsフ_フt ル}Zハmニ」、#ホeJP9ー,ハ pT<シテ&テワcケ釿=ェカPヲF滸\ュ1チサPJ$k-4テ鳶ホB>4"aネヨ^騫4ショ0癒暾V#ョコチラv_t ヘラユツエ、夜tイ7[モ ・ニZBOオVエ]R栫]Sy丼6駘oワンーンス沽喪)(ナWX?!trqoの/d?g&俥 欷、_8$蝋ナT{oュ!旄 PK熙9幅yPK#ma57antlr/debug/misc/ASTFrame$MyTreeSelectionListener.class控ンRモPN)Mァ・ ア@ *"E*Z棲 #絛レレ0mメIR倍o} oシヨテ湖>」Ia造卷ル槹vソ=9?~};0「aミキ1ツト$ ノhFテB縞#!セFナ2&cIaBハ、Xヲ$ワ0#Cタ)騅l庁'」NルR 独p禁Ix==?]タホ協/Gホ!aノEpワEネ笞 薄恨ト<娃3ョラノ5O涅カセウOソ仲狂SAi!ク^コテ「F?&ゥア ノWエ.5シ未ク/國ネvLPKJオカ ア PK#ma5antlr/debug/misc/ASTFrame.class}T]Sモ@=K[C[衢DJK!"*Eリb j+Xミミ`4a 陬ネ要ニホ8>:(ヌサ ((オ3ス{ケ=サル?ソ|07 .a\ツрnK狼テQL)ト]ワヌエ コ極a2f筍91>挺XニシTX紳ネX伐$cY`燈芯(|(yケヘ+∝:yヒクテ=5醪hリヲs!カ措,e%uョヲャ广 无・ワ斗"岬hヨrャ`!ュ裃呻Mン6スx無橡Sェラ>ナR&ケホ5ワ*g靉[_ンォmsッdnロ4ュ剿テミッm[O$*EwマォeKサゥワイgヨタェクノーk賓5ヒッ鍜ィmD碓I\pハ~f:ワ@サアE匸0ォX#q ワ汝Lナsシ`クリ樛$TQRエマウ!Tャ尓タ>按、マ況5ツシB雙 1[ォケ!:栗嵎 0S Cm2トノimbd鯊ャレ\エケゥb ッBユン゚'陛叛SZフー撲ロィク>dテェGヘb、 Z-a3'ヨ{RU? [・zvx@&qn.%Cハ諧コa=ワ゚ノYYュ2$オ)オ]ラ。r冂;_ロ~Gス[ 續アa9UwマW5z,?$|ムJ培ュ愁1ヲ蚌b&「 ロYクpQ薇サ轟 21゙Z6Wィコ藜7・m&ラqガ wミx#`4#「Q官怯モ汢[@ヘ|、掠独袂Xャ猿rェ∧jムrアrコ¨ヨ4Mユ!マトbCムト:乃]ェヨム搖縵悋顋L4イ」惜゙「)`、ゥ8tz!3-ag畝ユ隻'z」4R釵rgc船シ柴7+、ノ?u鴎D4;jbー"メVヌ」u\娑,=ヲI9糘鮎ydア Rfャ・Qテ4&ネj^&ソ ュ゚/PKスIBソIPK#ma5$antlr/debug/misc/JTreeASTModel.classU゚SW=K6ルメ@ヤPミ挨「uB$ チニ>8ウI.d斛ョウル シvK:S}」レ2鶚ホ?駝カ輜Q0ヤノンワサ゚wホホwソ タ8 !D1゙賛、5L1Sョt ォョ1BSr~=n1B'ヲxmヌ-|ト跼オy 2 Tヌカ]銓コ蝴Nコl屹(サmユモナ漂ーeクキ%NyL珪 ホ Kオ恥V痛豹スRYqЧ龝3oヤ]a GA"ヲo陋鴾 テZO aケ髀<鞨コp3Uテャ(nf・M9ヒ・gT1xMA蹉RニnXャ/レ"s$Gチ戸Yアケシ?ュs5>ワMエ0點。ッ敖┛;「fol鑁Cdm遑V3U j8塒<マ4ヤJ$ン SYC6#r_FウgロクL#;a愿yエ9カx]7逹F痕Yワ,狗イ ツrテト鉱ワj丿フ^d}-7FYワeォ襲G゚jレンp隣リ妍?リスuナォ:チxYテV$Lヤ羞晄qセfコ`裲「)NH`e& 9\%紙\rォ「嚏ソィレQY7M:wォ慟カリg+^nユョフiネア$ヒソミ,ソ"J索tヘィ燐ヌ|Vミ}イ5|媚6r?゙乱ュ;カk完テHッ・rog ァ~VトクN{{サ[嬾ム(ノB鯖)ハzjdウ iv!兔 スFヒワマ衫エpR ホまFロ?>次s悶9ユニ@rJRm」ミセユ絢nr葱?2@A?ヌ>_ ィ△<セホzーУ0 ー1^V9J x9^D|?;イKタ諦倣ム゚(舷ヲ`F昆梵瀚、}。]゚エ?ヲ痞ヤ゚="1-:dヤKソ?c!マ8:8^、ト8ア9サ|A舛 sL`#ク$Oロウ.qスゥヌ?読M声乏椣PZロC佗_xゥッィ謳Kィ魯ョ穿*]サFoヲIt昜7(qム)ェIj゚ァ」{ミRヤ鶲YSS?エー$ →ヘ{tョFコvpfP4ワ踟h枝徃hXzP[面フ衿yャタ!ヨラヤ<タサlィ?3レカOtヌa目 ヘイ<ョョv殫45T゚総<蹂-m\・弘d萼)ヌ$Fテ$ゥロツg楕ッ9 y:M(q%裔 PKv敗 4ムPK#ma5$antlr/debug/misc/JTreeASTPanel.class百[kAニWラmュ4Sモ\t」n JQPRク ヨ]YヌXBM。Uh殉闖*=」*J。,{偈捏;キロwy惠cKチカ カTト慎ー」bB、、ぺ 筍ヤ、リWpB到U、 B預G イ r ~痃ホ/]7襾」ロキ恂ョF`!pb9堀テ6=c1)サMnfl;blャr7:%ォ+クテスB謦テ)YッレuユフコMZu{^Α[R燕S8ュヨ.LロyI> ォl}a愡^怒%s璋「sUネィヌVーJ彡$osU $M獪iルシゥ 柬x k3WT枳ロ2 /Rチ+ ッqL薦*怒R」F%MGリ樮蔟^ヒh[ン1?ユ3s\Jヒ┝\L柿HL<.ouMヌlM[キマ)H宿ァ'憾Eゥ]xn{bタp8スモ6)諳喃R賂dコルl.C6_Cキククエxソ縻TユF:3サ^L8 ヤハAォEキンq-d 刔ホ徽E萱@弯」欸c|bエ)k$ラI?}@E鮃]魂4─射WG#セBムソ@c。!ヤ├"| 骨TrCワ娩z.vo郁1マI*ノqャ Bャ lロ'iS8ニ.ホア2)b倒}HPK$マDPK*ma5 META-INF/ハPK*ma5RぃWFF=META-INF/MANIFEST.MFPKma5ロ}Qヤホナantlr/ANTLRError.classPKma5タツヌラantlr/ANTLRException.classPKma5=)ウ$KO%2antlr/ANTLRGrammarParseBehavior.classPKma5靆\)ッDミantlr/ANTLRHashString.classPKma5iョFFネ antlr/ANTLRLexer.classPKma5Τエ7E.ハmコ'antlr/ANTLRParser.classPKma5ォQオー「DVantlr/ANTLRStringBuffer.classPKma5w稍謝 ?Yantlr/ANTLRTokdefLexer.classPKma5Jク\gantlr/ANTLRTokdefParser.classPKma5ヲ鴫;H'6pantlr/ANTLRTokdefParserTokenTypes.classPKma5Tミ7クツニqantlr/ANTLRTokenTypes.classPKma5Lヌ}&J ヌuantlr/ASTFactory.classPKma52M#>トUantlr/ASTIterator.classPKma5+イhv#リantlr/ASTNULLType.classPKma5トDu宕antlr/ASTPair.classPKma5聿喀antlr/ASTVisitor.classPKma5YT}ナュモオantlr/ActionElement.classPKma5_rィッゥantlr/ActionTransInfo.classPKma5Γ7lY嚴antlr/Alternative.classPKma5ハィ&nd Kantlr/AlternativeBlock.classPKma5Gア眦dantlr/AlternativeElement.classPKma5ルコ央 2antlr/BaseAST.classPKma5z覡伶Fェantlr/BlockContext.classPKma5K尨」ォヌlャantlr/BlockEndElement.classPKma5チЫツhZ$`ョantlr/BlockWithImpliedExitPath.classPKma5^モローantlr/ByteBuffer.classPKma5[+ョcj$Yイantlr/CSharpBlockFinishingInfo.classPKma5ソワゥネィエantlr/CSharpCharFormatter.classPKma5B<マnモキantlr/CSharpCodeGenerator.classPKma54歩a「&antlr/CSharpNameSpace.classPKma5セーQ8(antlr/CharBuffer.classPKma5フ]軸%ニ*antlr/CharFormatter.classPKma5uF@ォフソ+antlr/CharLiteralElement.classPKma5粒Aォカ.antlr/CharQueue.classPKma5xffTマ,1antlr/CharRangeElement.classPKma5[s禿 モ5antlr/CharScanner.classPKma5椦ニ>ソルCantlr/CharStreamException.classPKma5?u#ゥ!薀antlr/CharStreamIOException.classPKma5、w纐# #]|antlr/ImportVocabTokenManager.classPKma54:antlr/InputBuffer.classPKma5d.ヘc"ツantlr/JavaBlockFinishingInfo.classPKma5:ケ~uantlr/JavaCharFormatter.classPKma5Ds、Vjマロミantlr/JavaCodeGenerator.classPKma5ャ+/qantlr/JavaCodeGeneratorPrintWriterManager.classPKma5ョU8R隋antlr/LLkAnalyzer.classPKma5P~Qwマantlr/LLkGrammarAnalyzer.classPKma5セ・\,Uォantlr/LLkParser.classPKma5 ゙gQ* antlr/LexerGrammar.classPKma5D横 :!X$antlr/LexerSharedInputState.classPKma5)#@T2 *'antlr/Lookahead.classPKma5p{ {CQ-antlr/MakeGrammar.classPKma5&@イRツ #Nantlr/MismatchedCharException.classPKma5):カV] $エSantlr/MismatchedTokenException.classPKma5^O;4cZantlr/NameSpace.classPKma5@テ{Bs レ]antlr/NoViableAltException.classPKma54衂セ'イ`antlr/NoViableAltForCharException.classPKma5本(嫩antlr/OneOrMoreBlock.classPKma5 2'ヌ fantlr/ParseTree.classPKma5働不 +uhantlr/ParseTreeRule.classPKma5サ;フ沺シノkantlr/ParseTreeToken.classPKma5ノf ーmantlr/Parser.classPKma5R?鰭モxantlr/ParserGrammar.classPKma5ィエオsw"}antlr/ParserSharedInputState.classPKma5甌/ツ Eantlr/PreservingFileWriter.classPKma5鉈Yカ!antlr/PrintWriterWithSMAP.classPKma5ハLh$ヌantlr/PythonBlockFinishingInfo.classPKma5|* s=″antlr/PythonCharFormatter.classPKma5(」ミxUjCユ antlr/PythonCodeGenerator.classPKma54TヲjU ュantlr/RecognitionException.classPKma5‡「E ユantlr/RuleBlock.classPKma5ワ沺Wイヤ antlr/RuleEndElement.classPKma5$%I7 antlr/RuleRefElement.classPKma5。M碍antlr/RuleSymbol.classPKma56 ラ%antlr/SemanticException.classPKma5ミf|~ antlr/SimpleTokenManager.classPKma51Wロw Oantlr/StringLiteralElement.classPKma5^2ゥantlr/StringLiteralSymbol.classPKma5[呷綸hantlr/StringUtils.classPKma5i)ユ(!antlr/SynPredBlock.classPKma5ワ}メU#antlr/Token.classPKma5%m}9ヤ&antlr/TokenBuffer.classPKma5柘5*antlr/TokenManager.classPKma53阡w+t,antlr/TokenQueue.classPKma5d%j゙Q/antlr/TokenRangeElement.classPKma5/kマ^`3antlr/TokenRefElement.classPKma5 ゙mイハュ6antlr/TokenStream.classPKma5寳"7antlr/TokenStreamBasicFilter.classPKma5坐g g:antlr/TokenStreamException.classPKma5紂ミ(ム;antlr/TokenStreamHiddenTokenFilter.classPKma5ンァ惶 ".@antlr/TokenStreamIOException.classPKma5c`lc0 +Aantlr/TokenStreamRecognitionException.classPKma5nt眇イ%Cantlr/TokenStreamRetryException.classPKma5:ラョ6シ&Dantlr/TokenStreamRewriteEngine$1.classPKma5QT-Eantlr/TokenStreamRewriteEngine$DeleteOp.classPKma5/|ヲ@q3QGantlr/TokenStreamRewriteEngine$InsertBeforeOp.classPKma5G相?ニ.#Iantlr/TokenStreamRewriteEngine$ReplaceOp.classPKma5鷙|OE5Kantlr/TokenStreamRewriteEngine$RewriteOperation.classPKma5 7M゚ナ $クMantlr/TokenStreamRewriteEngine.classPKma5"tK櫂D マZantlr/TokenStreamSelector.classPKma5\エ`_antlr/TokenSymbol.classPKma5\ス]o5aantlr/TokenWithIndex.classPKma5X >U/cantlr/Tool.classPKma5gcuラ>{antlr/ToolErrorHandler.classPKma5クヤf,3|antlr/TreeBlockContext.classPKma5韃樊繃antlr/TreeElement.classPKma5庚暝 antlr/TreeParser.classPKma5トモユ&Zantlr/TreeParserSharedInputState.classPKma5」シ,ァクxヨantlr/TreeSpecifierNode.classPKma5{ ラgantlr/TreeWalkerGrammar.classPKma5笵lハノ-フantlr/Utils.classPKma5莅ホ'ッヤantlr/Version.classPKma5ゥ}4<antlr/WildcardElement.classPKma5ハY')antlr/ZeroOrMoreBlock.classPKma5"、ヲaq!antlr/ASdebug/ASDebugStream.classPKma5渤Yイー"7antlr/ASdebug/IASDebugStream.classPKma5V3イヨ~#7antlr/ASdebug/TokenOffsetInfo.classPKma5 KソQN、antlr/build/ANTLR$1.classPKma5Wァ/ァ pantlr/build/ANTLR.classPKma5ソ柾R勍antlr/build/StreamScarfer.classPKma57ュwヤ8ィantlr/build/Tool.classPKma5レ ォjPーantlr/collections/AST.classPKma5頁テdヒ&/イantlr/collections/ASTEnumeration.classPKma5J泝ト""ウantlr/collections/Enumerator.classPKma50セ| エantlr/collections/List.classPKma54イヌヘ%Oオantlr/collections/Stack.classPKma5,>ホ =&gカantlr/debug/DebuggingCharScanner.classPKma5u‰v、&Nチantlr/debug/DebuggingInputBuffer.classPKma5B zツ!ナantlr/debug/DebuggingParser.classPKma5Cニantlr/debug/Event.classPKma5~A+耽F管antlr/debug/GuessingEvent.classPKma5ヌクオエ?$5ノantlr/debug/InputBufferAdapter.classPKma5EZ> ヨJ"ニハantlr/debug/InputBufferEvent.classPKma5モリモ )ヘantlr/debug/InputBufferEventSupport.classPKma5[」Fヌ2%ノモantlr/debug/InputBufferListener.classPKma5XYPォ%耿antlr/debug/InputBufferReporter.classPKma5ノMェ h$瞶antlr/debug/LLkDebuggingParser.classPKma5LV\ョ=antlr/debug/ListenerBase.classPKma5ア6P!  7antlr/debug/MessageAdapter.classPKma5杵ーレ_Nヲantlr/debug/MessageEvent.classPKma5梁ヤォ!Qantlr/debug/MessageListener.classPKma5ィJ有ヘDKantlr/debug/NewLineEvent.classPKma5ルdム4ミ!dantlr/debug/NewLineListener.classPKma59「タ&Y &Kantlr/debug/ParseTreeDebugParser.classPKma5kモ}_antlr/debug/ParserAdapter.classPKma5「ウ"3antlr/debug/ParserController.classPKma5aq&D'P+$6antlr/debug/ParserEventSupport.classPKma5際眄f ッantlr/debug/ParserListener.classPKma5ァ3$サantlr/debug/ParserMatchAdapter.classPKma5褒tH>"@ antlr/debug/ParserMatchEvent.classPKma5旧ク+%ホ antlr/debug/ParserMatchListener.classPKma5W ,Pa ルantlr/debug/ParserReporter.classPKma5サ=$$ーantlr/debug/ParserTokenAdapter.classPKma5脇V暢"&antlr/debug/ParserTokenEvent.classPKma5zヘヌャ%antlr/debug/ParserTokenListener.classPKma5ャa *antlr/debug/SemanticPredicateAdapter.classPKma5ロ姪`ァ(antlr/debug/SemanticPredicateEvent.classPKma5莉Eh・+antlr/debug/SemanticPredicateListener.classPKma5町權7+antlr/debug/SyntacticPredicateAdapter.classPKma5ムュ$ォ)antlr/debug/SyntacticPredicateEvent.classPKma5bSetセI, antlr/debug/SyntacticPredicateListener.classPKma5吩ネワ8!antlr/debug/TraceAdapter.classPKma5ハsgロ-"antlr/debug/TraceEvent.classPKma5D」ワコ%antlr/debug/TraceListener.classPKma5w_Feェ&antlr/debug/Tracer.classPKma5 >キAハ エ 6)antlr/preprocessor/Grammar.classPKma5iY0リャ $N4antlr/preprocessor/GrammarFile.classPKma5)゙G"x9antlr/preprocessor/Hierarchy.classPKma5p)lnj゚Aantlr/preprocessor/Option.classPKma5R4aィe*%:Dantlr/preprocessor/Preprocessor.classPKma5<滷クヘ A*5Yantlr/preprocessor/PreprocessorLexer.classPKma5_ツチ/Zsantlr/preprocessor/PreprocessorTokenTypes.classPKma5Emkzシ} xvantlr/preprocessor/Rule.classPKma5e6テx|antlr/preprocessor/Tool.classPK#ma5<リ、<,4|#セantlr/actions/cpp/ActionLexer.classPK#ma5」% -Kアantlr/actions/cpp/ActionLexerTokenTypes.classPK#ma5アィーaE,&糞antlr/actions/csharp/ActionLexer.classPK#ma5P:0.antlr/actions/csharp/ActionLexerTokenTypes.classPK#ma5at?*チz$antlr/actions/java/ActionLexer.classPK#ma5」U?S . antlr/actions/java/ActionLexerTokenTypes.classPK#ma5Pォ鬟+~&Yantlr/actions/python/ActionLexer.classPK#ma5皮Nー,0P;antlr/actions/python/ActionLexerTokenTypes.classPK#ma5V'x` $ォ=antlr/actions/python/CodeLexer.classPK#ma5ャn。N。.]Iantlr/actions/python/CodeLexerTokenTypes.classPK#ma5Tァm7%サJantlr/collections/impl/ASTArray.classPK#ma5SコB*ELantlr/collections/impl/ASTEnumerator.classPK#ma5ネsゥ∂ #゚Nantlr/collections/impl/BitSet.classPK#ma5;宕* \antlr/collections/impl/IndexedVector.classPK#ma5zj,%^antlr/collections/impl/IntRange.classPK#ma5ルZ晢U#ィ`antlr/collections/impl/LLCell.classPK#ma5\ム懊A*鐶antlr/collections/impl/LLEnumeration.classPK#ma53ツ#"#dantlr/collections/impl/LList.classPK#ma5ァ泙y#5hantlr/collections/impl/Vector.classPK#ma5Ws.$mantlr/collections/impl/VectorEnumeration.classPK#ma5ヌ*q-oantlr/collections/impl/VectorEnumerator.classPK#ma5熙9幅y!鑷antlr/debug/misc/ASTFrame$1.classPK#ma5Jオカ ア 7ヤsantlr/debug/misc/ASTFrame$MyTreeSelectionListener.classPK#ma5スIBソI黐antlr/debug/misc/ASTFrame.classPK#ma5v敗 4ム$zantlr/debug/misc/JTreeASTModel.classPK#ma5$マD$antlr/debug/misc/JTreeASTPanel.classPK2D怐antlr-2.7.7/aclocal.m40000644000175000017500000004771010522211616014452 0ustar twernertwernerdnl --*- sh -*-- ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## ## This file is part of ANTLR. See LICENSE.txt for licence ## ## details. Written by W. Haefelinger - initial version by ## ## R. Laren. ## ## ...............Copyright (C) Wolfgang Haefelinger, 2004 ## ## ## ##xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx## dnl dnl =============================================================== dnl A couple of Macros have been copied from the GNU Autoconf Macro dnl Archive: dnl http://www.gnu.org/software/ac-archive dnl =============================================================== dnl AC_DEFUN( [AC_CHECK_CLASSPATH], [ test "x$CLASSPATH" = x && AC_MSG_ERROR( [CLASSPATH not set. Please set it to include the directory containing configure.]) if test "x$CLASSPATH" = x; then : else echo "CLASSPATH set to:" echo "$CLASSPATH" echo "IMPORTANT: make sure the current directory containing configure" echo "is in the CLASSPATH" fi ] ) #dnl /** #dnl * Test.java: used to test dynamicaly if a class exists. #dnl */ #dnl public class Test #dnl { #dnl #dnl public static void #dnl main( String[] argv ) #dnl { #dnl Class lib; #dnl if (argv.length < 1) #dnl { #dnl System.err.println ("Missing argument"); #dnl System.exit (77); #dnl } #dnl try #dnl { #dnl lib = Class.forName (argv[0]); #dnl } #dnl catch (ClassNotFoundException e) #dnl { #dnl System.exit (1); #dnl } #dnl lib = null; #dnl System.exit (0); #dnl } #dnl #dnl } AC_DEFUN( [AC_CHECK_CLASS],[ AC_REQUIRE([AC_PROG_JAVA]) ac_var_name=`echo $1 | sed 's/\./_/g'` #dnl Normaly I'd use a AC_CACHE_CHECK here but since the variable name is #dnl dynamic I need an extra level of extraction AC_MSG_CHECKING([for $1 class]) AC_CACHE_VAL(ac_cv_class_$ac_var_name,[ if test x$ac_cv_prog_uudecode_base64 = xyes; then cat << \EOF > Test.uue begin-base64 644 Test.class yv66vgADAC0AKQcAAgEABFRlc3QHAAQBABBqYXZhL2xhbmcvT2JqZWN0AQAE bWFpbgEAFihbTGphdmEvbGFuZy9TdHJpbmc7KVYBAARDb2RlAQAPTGluZU51 bWJlclRhYmxlDAAKAAsBAANlcnIBABVMamF2YS9pby9QcmludFN0cmVhbTsJ AA0ACQcADgEAEGphdmEvbGFuZy9TeXN0ZW0IABABABBNaXNzaW5nIGFyZ3Vt ZW50DAASABMBAAdwcmludGxuAQAVKExqYXZhL2xhbmcvU3RyaW5nOylWCgAV ABEHABYBABNqYXZhL2lvL1ByaW50U3RyZWFtDAAYABkBAARleGl0AQAEKEkp VgoADQAXDAAcAB0BAAdmb3JOYW1lAQAlKExqYXZhL2xhbmcvU3RyaW5nOylM amF2YS9sYW5nL0NsYXNzOwoAHwAbBwAgAQAPamF2YS9sYW5nL0NsYXNzBwAi AQAgamF2YS9sYW5nL0NsYXNzTm90Rm91bmRFeGNlcHRpb24BAAY8aW5pdD4B AAMoKVYMACMAJAoAAwAlAQAKU291cmNlRmlsZQEACVRlc3QuamF2YQAhAAEA AwAAAAAAAgAJAAUABgABAAcAAABtAAMAAwAAACkqvgSiABCyAAwSD7YAFBBN uAAaKgMyuAAeTKcACE0EuAAaAUwDuAAasQABABMAGgAdACEAAQAIAAAAKgAK AAAACgAAAAsABgANAA4ADgATABAAEwASAB4AFgAiABgAJAAZACgAGgABACMA JAABAAcAAAAhAAEAAQAAAAUqtwAmsQAAAAEACAAAAAoAAgAAAAQABAAEAAEA JwAAAAIAKA== ==== EOF if uudecode$EXEEXT Test.uue; then : else echo "configure: __oline__: uudecode had trouble decoding base 64 file 'Test.uue'" >&AC_FD_CC echo "configure: failed file was:" >&AC_FD_CC cat Test.uue >&AC_FD_CC ac_cv_prog_uudecode_base64=no fi rm -f Test.uue if AC_TRY_COMMAND($JAVA $JAVAFLAGS Test $1) >/dev/null 2>&1; then eval "ac_cv_class_$ac_var_name=yes" else eval "ac_cv_class_$ac_var_name=no" fi rm -f Test.class else AC_TRY_COMPILE_JAVA([$1], , [eval "ac_cv_class_$ac_var_name=yes"], [eval "ac_cv_class_$ac_var_name=no"]) fi eval "ac_var_val=$`eval echo ac_cv_class_$ac_var_name`" eval "HAVE_$ac_var_name=$`echo ac_cv_class_$ac_var_val`" HAVE_LAST_CLASS=$ac_var_val if test x$ac_var_val = xyes; then ifelse([$2], , :, [$2]) else ifelse([$3], , :, [$3]) fi ] ) #dnl for some reason the above statment didn't fall though here? #dnl do scripts have variable scoping? eval "ac_var_val=$`eval echo ac_cv_class_$ac_var_name`" AC_MSG_RESULT($ac_var_val) ] ) AC_DEFUN([AC_CHECK_JAVA_HOME],[ AC_REQUIRE([AC_EXEEXT])dnl TRY_JAVA_HOME=`ls -dr /usr/java/* 2> /dev/null | head -n 1` if test x$TRY_JAVA_HOME != x; then PATH=$PATH:$TRY_JAVA_HOME/bin fi AC_PATH_PROG(JAVA_PATH_NAME, java$EXEEXT) if test x$JAVA_PATH_NAME != x; then JAVA_HOME=`echo $JAVA_PATH_NAME | sed "s/\(.*\)[[/]]bin[[/]]java$EXEEXT$/\1/"` fi;dnl ] ) AC_DEFUN([AC_PROG_JAR], [ AC_REQUIRE([AC_EXEEXT])dnl if test "x$JAVAPREFIX" = x; then test "x$JAR" = x && AC_CHECK_PROGS(JAR, jar$EXEEXT) else test "x$JAR" = x && AC_CHECK_PROGS(JAR, jar, $JAVAPREFIX) fi test "x$JAR" = x && AC_MSG_ERROR([no acceptable jar program found in \$PATH]) AC_PROVIDE([$0])dnl ] ) AC_DEFUN([AC_PROG_JAVA],[ AC_REQUIRE([AC_EXEEXT])dnl if test x$JAVAPREFIX = x; then test x$JAVA = x && AC_CHECK_PROGS(JAVA, kaffe$EXEEXT java$EXEEXT) else test x$JAVA = x && AC_CHECK_PROGS(JAVA, kaffe$EXEEXT java$EXEEXT, $JAVAPREFIX) fi test x$JAVA = x && AC_MSG_ERROR([no acceptable Java virtual machine found in \$PATH]) AC_PROG_JAVA_WORKS AC_PROVIDE([$0])dnl ] ) #dnl /** #dnl * Test.java: used to test if java compiler works. #dnl */ #dnl public class Test #dnl { #dnl #dnl public static void #dnl main( String[] argv ) #dnl { #dnl System.exit (0); #dnl } #dnl #dnl } AC_DEFUN([AC_PROG_JAVA_WORKS], [ AC_CHECK_PROG(uudecode, uudecode$EXEEXT, yes) if test x$uudecode = xyes; then AC_CACHE_CHECK([if uudecode can decode base 64 file], ac_cv_prog_uudecode_base64, [ cat << \EOF > Test.uue begin-base64 644 Test.class yv66vgADAC0AFQcAAgEABFRlc3QHAAQBABBqYXZhL2xhbmcvT2JqZWN0AQAE bWFpbgEAFihbTGphdmEvbGFuZy9TdHJpbmc7KVYBAARDb2RlAQAPTGluZU51 bWJlclRhYmxlDAAKAAsBAARleGl0AQAEKEkpVgoADQAJBwAOAQAQamF2YS9s YW5nL1N5c3RlbQEABjxpbml0PgEAAygpVgwADwAQCgADABEBAApTb3VyY2VG aWxlAQAJVGVzdC5qYXZhACEAAQADAAAAAAACAAkABQAGAAEABwAAACEAAQAB AAAABQO4AAyxAAAAAQAIAAAACgACAAAACgAEAAsAAQAPABAAAQAHAAAAIQAB AAEAAAAFKrcAErEAAAABAAgAAAAKAAIAAAAEAAQABAABABMAAAACABQ= ==== EOF if uudecode$EXEEXT Test.uue; then ac_cv_prog_uudecode_base64=yes else echo "configure: __oline__: uudecode had trouble decoding base 64 file 'Test.uue'" >&AC_FD_CC echo "configure: failed file was:" >&AC_FD_CC cat Test.uue >&AC_FD_CC ac_cv_prog_uudecode_base64=no fi rm -f Test.uue]) fi if test x$ac_cv_prog_uudecode_base64 != xyes; then rm -f Test.class AC_MSG_WARN([I have to compile Test.class from scratch]) if test x$ac_cv_prog_javac_works = xno; then AC_MSG_ERROR([Cannot compile java source. $JAVAC does not work properly]) fi if test x$ac_cv_prog_javac_works = x; then AC_PROG_JAVAC fi fi AC_CACHE_CHECK(if $JAVA works, ac_cv_prog_java_works, [ JAVA_TEST=Test.java CLASS_TEST=Test.class TEST=Test changequote(, )dnl cat << \EOF > $JAVA_TEST /* [#]line __oline__ "configure" */ public class Test { public static void main (String args[]) { System.exit (0); } } EOF changequote([, ])dnl if test x$ac_cv_prog_uudecode_base64 != xyes; then if AC_TRY_COMMAND($JAVAC $JAVACFLAGS $JAVA_TEST) && test -s $CLASS_TEST; then : else echo "configure: failed program was:" >&AC_FD_CC cat $JAVA_TEST >&AC_FD_CC AC_MSG_ERROR(The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)) fi fi if AC_TRY_COMMAND($JAVA $JAVAFLAGS $TEST) >/dev/null 2>&1; then ac_cv_prog_java_works=yes else echo "configure: failed program was:" >&AC_FD_CC cat $JAVA_TEST >&AC_FD_CC AC_MSG_ERROR(The Java VM $JAVA failed (see config.log, check the CLASSPATH?)) fi rm -fr $JAVA_TEST $CLASS_TEST Test.uue ]) AC_PROVIDE([$0])dnl ] ) AC_DEFUN([AC_PROG_JAVAC], [ AC_REQUIRE([AC_EXEEXT])dnl if test "x$JAVAPREFIX" = x; then test "x$JAVAC" = x && AC_CHECK_PROGS(JAVAC, javac$EXEEXT "gcj$EXEEXT -C" guavac$EXEEXT jikes$EXEEXT) else test "x$JAVAC" = x && AC_CHECK_PROGS(JAVAC, javac$EXEEXT "gcj$EXEEXT -C" guavac$EXEEXT jikes$EXEEXT, $JAVAPREFIX) fi test "x$JAVAC" = x && AC_MSG_ERROR([no acceptable Java compiler found in \$PATH]) AC_PROG_JAVAC_WORKS AC_PROVIDE([$0])dnl ] ) AC_DEFUN([AC_PROG_JAVAC_WORKS],[ AC_CACHE_CHECK([if $JAVAC works], ac_cv_prog_javac_works, [ JAVA_TEST=Test.java CLASS_TEST=Test.class cat << \EOF > $JAVA_TEST /* [#]line __oline__ "configure" */ public class Test { } EOF if AC_TRY_COMMAND($JAVAC $JAVACFLAGS $JAVA_TEST) >/dev/null 2>&1; then ac_cv_prog_javac_works=yes else AC_MSG_ERROR([The Java compiler $JAVAC failed (see config.log, check the CLASSPATH?)]) echo "configure: failed program was:" >&AC_FD_CC cat $JAVA_TEST >&AC_FD_CC fi rm -f $JAVA_TEST $CLASS_TEST ]) AC_PROVIDE([$0])dnl ]) AC_DEFUN([AC_TRY_COMPILE_JAVA],[ AC_REQUIRE([AC_PROG_JAVAC])dnl cat << \EOF > Test.java /* [#]line __oline__ "configure" */ ifelse([$1], , , [import $1;]) public class Test { [$2] } EOF if AC_TRY_COMMAND($JAVAC $JAVACFLAGS Test.java) && test -s Test.class ; then #dnl Don't remove the temporary files here, so they can be examined. ifelse([$3], , :, [$3]) else echo "configure: failed program was:" >&AC_FD_CC cat Test.java >&AC_FD_CC ifelse([$4], , , [ rm -fr Test* $4 ])dnl fi rm -fr Test* ] ) AC_DEFUN([AC_TRY_RUN_JAVA],[ AC_REQUIRE([AC_PROG_JAVAC])dnl AC_REQUIRE([AC_PROG_JAVA])dnl cat << \EOF > Test.java /* [#]line __oline__ "configure" */ ifelse([$1], , , [include $1;]) public class Test { [$2] } EOF if AC_TRY_COMMAND($JAVAC $JAVACFLAGS Test.java) && test -s Test.class && ($JAVA $JAVAFLAGS Test; exit) 2>/dev/null then #dnl Don't remove the temporary files here, so they can be examined. ifelse([$3], , :, [$3]) else echo "configure: failed program was:" >&AC_FD_CC cat Test.java >&AC_FD_CC ifelse([$4], , , [ rm -fr Test* $4 ])dnl fi rm -fr Test*]) #dnl#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx #dnl# AX_TRY_COMPILE_JAVA #dnl#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx #dnl $1 => import section #dnl $2 => class body section #dnl $3 => if_good_action #dnl $4 => if_fails_action [implicit action: candidate is removed from #dnl list]. This cannot be overridden by providing a action. AC_DEFUN([AX_TRY_COMPILE_JAVA], [ ## make sure that we do not use an existing file i=0;cn="Test\${i}"; eval "fj=${cn}.java" while test -f "${fj}" do i=`expr $i + 1` eval "fj=${cn}.java" done eval "fc=${cn}.class" eval "cn=${cn}" cat << [_ACEOF] > ${fj} [$1] public class ${cn} { [$2] } [_ACEOF] ## wh: how do I check that a file has a non-zero size (test -s) ## wh: is not portable. if AC_TRY_COMMAND($JAVAC $JAVACFLAGS ${fj}) && test -f "${fc}" then $3 else ifelse([$4], ,[ echo "" echo "@configure:__oline__: failed to compile java input ...." echo "=======================================================" cat ${fj} echo "=======================================================" echo "exec $JAVAC $JAVACFLAGS ${fj}" echo "" rm -rf "${fc}" "${fj}" ],[$4]) fi rm -rf "${fc}" "${fj}" ## eof [AX_TRY_COMPILE_JAVA] ])dnl #dnl AX_GNU_MAKE #dnl $1->var that contains list of suitable candidates [not empty] #dnl $2->action_if_not_found || empty #dnl $3->action_if_found || empty #dnl => $MAKE AC_DEFUN( [AX_GNU_MAKE], [ #Search all the common names for GNU make ax_gnu_make_list="${[$1]}" [$1]= for a in . ${ax_gnu_make_list} ; do if test "$a" == "." ; then continue fi AC_MSG_CHECKING([whether ${a} is GNU make]) if (/bin/sh -c "$a --version" 2> /dev/null | grep GNU 2>&1 > /dev/null ); then [$1]="$a" AC_MSG_RESULT(yes) break else AC_MSG_RESULT(no) fi done ## handle search result if test "x${[$1]}" == "x" ; then : $2 else : $3 fi ] )dnl ###dnl Like AC_PATH_PROGS. However, each argument in $2 will be checked. ###dnl The result will be added to $1. There's no caching etc. ###dnl AC_DEFUN( [AX_TYPE_DASHA], [ for ac_prog in [$2] ; do set dummy $ac_prog; ac_word=${2} ## if argument is absolute we check whether such a file exists, ## otherwise we lookup PATH. Each hit will be added to main ## variable. case $ac_word in @<:@\\/@:>@* | ?:@<:@\\/@:>@*) AC_MSG_CHECKING([for $ac_word]) if test -f $ac_word ; then [$1]="${[$1]} ${ac_word}" AC_MSG_RESULT(yes) else AC_MSG_RESULT(no) fi ;; *) as_found= as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then [$1]="${[$1]} $as_dir/$ac_word$ac_exec_ext" AC_MSG_CHECKING([for $ac_word]) AC_MSG_RESULT([$as_dir/$ac_word$ac_exec_ext]) as_found=1 fi done done test "x$as_found" == "x" && { AC_MSG_CHECKING([for $ac_word]) AC_MSG_RESULT([no]) } ;; esac done ] )dnl ###dnl Like AC_PATH_PROGS but if is given, then it's argument ###dnl is taken unconditionally(?). AC_DEFUN( [AX_PATH_PROGS], [ ax_arg_list="[$2]" if test "x${[$1]}" != "x" ; then ax_arg_list="${[$1]}" fi [$1]="" AX_TYPE_DASHA([$1],[${ax_arg_list}]) if test "x${[$1]}" != "x" ; then ifelse([$3], ,[:],$3) else ifelse([$4], ,[ AC_MSG_ERROR([no suitable value has been found for [$1]]) ],$4) fi ] ) AC_DEFUN([AX_JAVAC], [ ## make sure that we do not use an existing file i=0;cn="Test\${i}"; eval "fj=${cn}.java" while test -f "${fj}" do i=`expr $i + 1` eval "fj=${cn}.java" done eval "fc=${cn}.class" eval "cn=${cn}" cat << [_ACEOF] > ${fj} [$1] public class ${cn} { [$2] } [_ACEOF] ## wh: how do I check that a file has a non-zero size (test -s) ## wh: is not portable. if AC_TRY_COMMAND($JAVAC $JAVACFLAGS ${fj}) && test -f "${fc}" then $3 else ifelse([$4], ,[ echo "" echo "@configure:__oline__: failed to compile java input ...." echo "=======================================================" cat ${fj} echo "=======================================================" echo "exec $JAVAC $JAVACFLAGS ${fj}" echo "" rm -rf "${fc}" "${fj}" ],[$4]) fi rm -rf "${fc}" "${fj}" ## eof [AX_TRY_COMPILE_JAVA] ])dnl AC_DEFUN([AX_WHICH_JAVAC],[ AC_SUBST([$1]) if (/bin/sh -c "$JAVAC --version" 2>&1 | grep -i 'GCC' 2>&1 > /dev/null ) ; then [$1]=gcj elif (/bin/sh -c "$JAVAC --version" 2>&1 | grep -i 'jikes' 2>&1 > /dev/null ) ; then [$1]=jikes else [$1]=javac fi ] ) AC_DEFUN([AX_VAR_HEAD],[ set x ${[$1]} [$1]="${2}" ] ) AC_DEFUN([AX_VAR_ADD],[ ifelse([$3], ,,[$1=$3]) $1="${[$1]} $2" ] ) AC_DEFUN([AX_JAVA_PROGS],[ case $LANG_JAVA in 1) AX_PATH_PROGS([$1],[$2],[$3],[ LANG_JAVA=0 cat <> "$[$1]" </antlr.jar $ java antlr.Tool mygrammar.g c. write a driver program using source code generated by ANTLR, ie. Main.java, main.cpp, Main.cs or main.py d. link generated code, your driver code, ANTLR's core library and any additional library you are using together to get an executable f. run the executable on arbitrary input to be parsed For a set of standard examples have a look into directory "examples" and appropriate subdirectories. You may want to run make like $ make verbose=1 to see which compiler is used and which flags are passed etc. You may also follow a simple mini-tutorial at http://www.antlr.org/article/cutpaste/index.html if you are absolutly new to ANTLR. ______________________________________________________________________ BUILD AND INSTALL ON WINDOWS NT/95/98/2000/XP? There is no difference in building and installing Windows. However, you are need have either Cygwin or MSYS installed. We haven't tried MKS yet. If you run configure with Cygwin (or MSYS) then gcj will usually be chosen for compiling Java and gcc for compiling C++. In most cases however, C# will be automatically disabled as no compiler can be found. Configure is not looking up the registry to check for installed soft- ware. Instead, configure will just check the $PATH for known compiler names and also check some well know locations like "/usr/local/bin" etc. In order to make configure aware of a certain compiler or tool you need, make sure that your compiler or tool can be found by looking up $PATH. For example, Microsoft Visual C++ comes with a batch file named vcvars32.bat. Just run this batch file prior of running bash to have "cl" in your $PATH. Configure knows about this compiler names: bcc32 cl g++ for C++ jikes javac gcj for Java csc mcs cscc for C# python for Python The order in which the names appear is also the search order. That means that the whole $PATH gets searched for bcc32, then for cl and finally for g++. In other words, it is sufficient to have "cl" in $PATH to have it selected by configure, regardless whether there is g++ available or not. Similar, if you also have bcc32 in $PATH, then the Borland C++ Compiler will be choosen. If you have more that one compiler in your $PATH and the "wrong" compiler is selected - Do you have to give up on this? Not at all. In case you have more than one compiler/tool in your $PATH, you can tell configure which one you like to have. For example: --with-cxx=g++ This will favor g++ regardless whether there's a cl or bcc in PATH. You can archive the very same by CXX=g++ ${srcdir}/configure That is by using an environment variable. Try --help for the full list of --with-xxx options and environment variables. ______________________________________________________________________ DETAILS ON CONFIGURE? This section will present some further details on how you can tell configure to behave the way you want: 1. Choose Language ================== ANTLR is implemented in Java and has code generator plugins for Java, C++, C# and Python. The default behaviour of configure is to check whether a suitable compiler or tool for a particular language is available. If not, then configure will show a warning message and will automatically disable support for this language. In a very bizarre case it is therefore possible to end up with a configuration with no language support at all. Please study therefore configure's output whether you end up with the system you want. On the contrary, configure will enable, as mentioned, every target language with sufficient compiler/tool support. To speed up the build process you may also disable languages (and configure them later again). This can be done by command line options: --disable-cxx | --enable-cxx=no --> disable C++ --disable-java | --enable-java=no --> disable Java --disable-csharp | --enable-csharp=no --> disable C# --disable-python | --enable-python=no --> disable Python Be warned that when disabling Java you can't even build ANTLR itself. 2. Be Verbose ============= A typical 'make' run reports all kinds of actions exactly the way they get carried out. This makes the output of a make run hard to read and it's difficult to keep track "where" make is right now and what's going on. ANTLR's make run has been designed to be readable. By default make will report what's going on in a kind of logical way. For example, when compiling ANTLR itself you will see a message like *** compiling 209 Java file(s) This information is usually enough to keep track what's going on. If compilation fails, then the exact command line causing the problem will be shown and you can study the arguments whether additional flags are required etc. However, you can tell configure to be verbose as usual by --enable-verbose Having enabled verbosity, the command to compile ANTLR will be shown similar like this (using javac): CLASSPATH= /opt/jdk32/142_03/bin/javac \ -d . \ -sourcepath /home/geronimo/src/antlrmain \ -classpath /home/geronimo/obj/antlr-linux/lib/antlr.jar \ Version.java \ ANTLRParser.java \ ... [[skipped 206 files]] ... /home/geronimo/src/antlrmain/antlr/debug/misc/JTreeASTPanel.java You have used --enable-verbose and output is too much. Do I need to configure again? No. Just run make like $ make verbose=0 to turn off verbosity for this make run. You can also do this $ verbose=0 $ export verbose $ make to turn verbosity off without using arguments to make. Of course, you can also turn verbosity on (verbose=1) in the very same way, regardless of what you have configured earlier. 3. Debug Version ================ A typical open source configuration sets compiler options to contain debug information. ANTLR's approach is a bit different. We believe that you want to u s e ANTLR and not to d e b u g it. In other words, we believe you are voting for speed. Therefore configure will set compiler flags that go for speed and we omit all flags introducing some debug information. If you need to go for debug information, turn those flags on by using --enable-debug Similar to verbose discussed in the previous section you can override this configuration setting by using $ make debug=0 -- no debug information $ make debug=1 -- turn debugging on without the need to reconfigure. But be aware that --enable-debug is just changing flags given to your compiler. It will not change any names. For example, the name of ANTLR's core library is libantlr.a (using g++) regardless of whether debug is on or off. 4. Examples =========== You may leave out the examples just by --disable-examples Note that you can't undo this without reconfiguration. Nevertheless, we recommend to configure with examples, test them and to study them to get a full understanding on how ANTLR works and - last but not least - about all it's capabilities. 5. Bootstrap ============ ANTLR's parser engine (ie. the parser that accepts a grammar file) is written in ANTLR itself. From a logical point of view you would need ANTLR to build ANTLR. This chicken and egg problem is solved simply by having generated source files enclosed in the package. However, if you are doing some surgery on ANTLR's internals you need to have a existing ANTLR version around. You can tell configure about this external ANTLR version by --with-antlr-jar=${some-install-dir}/lib/antlr.jar or by --with-antlr-cmd=${some-tool-running-antlr} The former version will add given jar file to the $CLASSPATH when trying to compile grammar files ( *.g). Very similar you can also use --with-antlr-cmd to provide a program or shell script to compile grammar files. The name of the program does not matter but it needs to be exectuable and it should support all flags that can be given to ANTLR (check java antlr.Tools -h). NOTE: Bootstraping ANTLR with an external jar file or program does will only taken into account if there's no lib/antlr.jar available in the build directory. In other words, if you bootstraped once and you make changes later on ANTLR's internals, then the previously build jar file antlr.jar will be used - regardless of your configuration options. 6. Which Make? ============== You need a GNU make to run the build. This is especially true when building ANTLR on Windows. Nevertheless, some efforts has been done to lower the dependency on GNU make for portability reasons. But we are not done yet. If you have GNU make installed on your system but it's not in your PATH or you want to have a special version of make, you may tell this configure by either --with-make=${list-of-candidates} or by using environment variable $MAKE like MAKE=${list-of-candidates} ${srcdir}/configure Which variant you are using is a matter of your personal taste. But be aware that the command line argument is overriding the environment variable. Consider this example: MAKE=x-make ${srcdir}/configure --with-make="y-make z-make" Here configure will check your $PATH for y-make and z-make bug x-make is ignored. Note that the withespace seperated ${list-of-candidates} may also contain absolute path names. In that case $PATH is not consulted but the file is directly checked for existence. Here's an example: MAKE='make /usr/bin/make' ${srcdir}/configure Here your $PATH is consulted whether a "make" exsists. Then /usr/bin/make is checked for existence. The first make found which is then a GNU make is the one choose to be used to build ANTLR. For consistency reasons you may also use --with-makeflags or variable $MAKEFLAGS to pass specific flags to make. However, this information is not yet used. Right now the flags used are the flags provided when calling make. 7. Which Java? ============== ANTLR has been tested with SUN Java 1.4.x using either SUN's javac Java compiler, IBM's jikes or GNU's gcj. Other systems have not been tested and are not expected to work. The default search order is jikes javac gcj It is therefore sufficient to have "jikes" in your $PATH to get it choosen by configure - regardless whether it precedes a javac or not in $PATH. You may change this default search by providing a list of candidates by either --with-javac=${candidate-list} or by JAVAC=${candidate-list} The candidates should be seperated by whitespace and may be relative or absolute path names. Only in the former case a lookup in $PATH takes place. By default options passed to a Java compiler are those who are sufficient to compile ANTLR. You may pass other flags by either --with-javacflags=${flags} JAVACFLAGS=${flags} In most cases you want to leave the default flags intact but rather add your additional flags. To support this, ANTLR's build management interprets ${flags} like this: ${flags} STANDARD FLAGS RESULT --------------+-----------------+---------------------- + f1 f2 .. fn | F1 .. FN | F1 .. FN f1 f2 .. fn - f1 f2 .. fn | F1 .. FN | f1 f2 .. fn F1 .. FN = f1 f2 .. fn | F1 .. FN | f1 f2 .. fn f1 f2 .... fn | F1 .. F | f1 f2 .. fn --------------+-----------------+---------------------- In other words, you can either tell configure to append (+), to prepend (-) or to substitute(=) your flags. Note that this is also possible when running make. For example, $ make JAVAC=jikes JAVACFLAGS="+ -verbose" would use jikes to compile regardless of what has been configured before. Additionally the flag "-verbose" is used. So far we talked about compiling Java. Let's have now a look on how to execute class files. By default configure searches for java gij to run Java. As discussed before it is sufficient to have a Java in $PATH to get java selected. It has also to be noted that gij is still in experimental stage and not fully supported now. As before you may also provide additional flags to Java by using either --with-javaflags=${flags} or by using environment variable JAVAFLAGS=${flags} Again, ${flags} are getting interpreted according to table shown above. 8. Which C++ Compiler? ====================== The algorithm on how the C++ compiler is choosen and how to pass additional flags are very much the same as discussed before for the Java compiler. The default candidate list for choosing a C++ compiler is bcc32 cl g++ on Windows ; and aCC CC xlC xlC_r g++ cxx c++ on UNIX. To change this list use --with-cxx=${candidates} and use --with-cxxflags=${flags} to pass additional flags to the standard compiler flags. You can also use $CXX and $CXXFLAGS if you like. This will then also work when running make. 9. Which C# Compiler? ===================== The only candidate for C# as time of writing is csc for compiling C# source code. To change this option --with-csharpc=${candidates} and use --with-csharpcflags=${flags} for providing a list of additional options. Note that ${flags} are getting interpreted as discussed in the previous sections. Instead of command line arguments to configure you may also use $CSHARPC and $CSHARPCFLAGS. This variables are also working when running make. 10. Which Python? ================= Configure is searching for "python" in your $PATH and in some well known locations. If no "python" can be found, language support for Python is disabled. You may use --with-python="${candidates}" to provide a list of Python candidates and further you may use --with-pythonflags="${flags}" to provide a list of (additional) flags to be passed to Python on execution. Note that ${flags} is getting interpreted as discussed in the previous sections. You may also use $PYTHON and $PYTHONFLAGS instead. This variables are then also working when running make. Note that those variable may override what has been configured before without any warning. ______________________________________________________________________ FURTHER DETAILS ON ANTLR'S BUILD SETUP? In allmost all cases it is assumed to be sufficient to change compiler settings by using either --with-${lang}flags or by using ${LANG}FLAGS. It's most likely not sufficient if you want to support a new compiler, tool or a new operating system. The general idea is to have a configured shell script for each tool or compiler being used rather than doing the scripting within the Makefile itself. The reason for this is that scripting in Makefiles is rather hard to read and a pain to debug (for example, there's no line information). The configured shell scripts are to be found in subdirectory "scripts" within the build directory. Their originals are located in ${srcdir}/scripts This scripts are known at time of writing: antlr.sh.in - runs "java antlr.Tools" cxx.sh.in - run C++ compiler cpp.sh.in - run C/C++ preprocessor (for dependencies). jars.sh.in - how to make a static library (aka jar file)(Java) javac.sh.in - run Java compiler (also when using jikes etc) lib.sh.in - how to make a static library (C++) link.sh.in - how to make a executable (C++) python.sh.in - how to run python A Makefile contains nothing more than variables and rules and in all- most all cases (but simple exceptions) the command behind a rule is just executing a script. Note that these scripts are not intended to be used to compile C++, Java etc in general. Instead this scripts are specialized for ANTLR. For example, the script "lib.sh" has a very simple interface. All you can provide is a list of object files. There is no option to tell about the library name to be build. Instead the library name is set by configure and available by using @ANTLR_LIB@ in all configured files. Unfortunatly, all scripts are rather complex and limiting ourselfs of not using shell functions (portability) is not going make changes easier. In general you should only edit the upper part of any script till there's a line like "**NO CHANGE NECESSARY BELOW THIS LINE - EXPERTS ONLY**". Then let's have now a closer look at cxx.sh.in. The other scripts are very similar. I'm going to discuss here only the relevant parts - this is open source after all any you may try to understand it by reading the source code :-) ARGV="$*" In general all arguments given to the script file are collected with- in variable $ARGV. In some scripts, the very first argument has a special meaning - usually it tells about the target to be created. In such a case TARGET would hold the first argument given and ARGV would hold all others. if test -z "${CXX}" ; then CXX="@CXX@" cxx="@cxx@" else cxx="`basename $CXX`" cxx="`echo $cxx|sed 's,\..*$,,'`" fi This script snippet checks about the compiler to be used for compiling C++ source. Note that we have two variables, CXX and cxx. The former holds usually the absolute path of the compiler as configured. The later, cxx, contains the logical compiler name. The logical compiler name is "gcc" for GNU, "cl" for Microsoft C++ etc. etc. The logical compiler name is usually configured and available as @cxx@. However, a user overrides the configuration by using environment variable CXX, the logical compiler name is computed by removing any extension. In a further section (similar in other scripts) we set specific flags depending on the l o g i c a l compiler name. As you can see it is rather important to the logical name proper. case "${cxx}" in gcc) cxxflags="-felide-constructors -pipe" case "${DEBUG}" in 0) cxxflags="-O2 -DNDEBUG ${cxxflags}" ;; 1) cxxflags="-g ${cxxflags} -W -Wall" ;; esac ;; cl) cxxflags="/nologo -GX -GR" [[skipped]] esac In the snippet shown you can see the handling of "gcc" and "cl". Note that the compiler flags are saved in $cxxflags and not in $CXXFLAGS. Reason for this is that handling of environment variable $CXXFLAGS is rather longish due to ANTLR's special interpretation mechanism (as discussed in an earlier section). In some cases flags may depend on platform in use. In that case you may do something like: case ${cxx} in gcc) case @build_os@ in cygwin) ## cygwin specific flags .. ;; sparc) ## sparc specific .. ;; *) ## all others ;; esac .. esac Of course you can utilize here the full scripting power to set flags required to get compilation job done. ______________________________________________________________________ FAQ? 1. How to setup jikes boot classpath? ===================================== Jikes (http://www-124.ibm.com/developerworks/oss/jikes) is just a Java compiler that needs to know about Java's core classes, ie. rt.jar, for a successful build of ANTLR. By default configure tries to locate rt.jar by searching $PATH for a java executable and relative to where found, for ../jre/lib/rt.jar. If this search fails or if you want to pass a different Java core or further core libraries, you may use either option --with-bootclasspath="${args}" or environment variable BOOTCLASSPATH="${args}" The list of arguments, ${args}, is supposed to be list of whitespace seperated files or directories. Configure will validate that each argument exists and will fail otherwise. Besides this there are no further validations done - you are supposed to know what you are doing. Note also that configure will not perform any search in case a bootclasspath is given. The arguments given are concatenated using platform specific argument seperator (ie. ";" or ":") and passed to option --bootclasspath. 2. Can I just say "make" after having unpacked ANTLR? ===================================================== No - as explained previously you need to run "configure" before you can issue a "make". 3. How do I cleanup afterwards? =============================== You do a "make clean" to remove all object files. If you do a "make distclean", then all files generated by ANTLR are also removed. In this case you need a external antlr.jar somewhere to rebuild. If you are using a external build directory you may just remove the whole directory without any harm. 4. Is it safe to edit a Makefile? ================================= In general do not edit files named Makefile. Instead edit files named Makefile.in. Configure or config.status will override changes you made in a Makefile on any run. 5. I changed Makefile.in - what's next? ======================================= Just run make again. Each Makefile contains a rule that automatically remakes itself if corresponding Makefile.in has changed. This automatic rule works fine in general. However, if you change Makefile/Makefile.in in such a way that make rejects it's execution then you need to remake Makefile yourself. This can be done by $ cd ${builddir} $ ./config.status ${your-makefile} Here's an example. Assume that "antlr/Makefile" is corrupted. The do this: ./config.status antlr/Makefile You can also issue just a $ ./config.status In this case all configured files are regenerated. 6. My configure has changed - what's next? ============================================= You need to reconfigure each build directory to get the changes. So far no automatic rule has been implemented. The easiest way of being up-to-date is: $ cd ${builddir} && ./config.status --recheck && make 7. Where do I find antlr.jar? ============================= Have a look at "lib". Should be there - but it is subject of having enabled Java or not. Note that Java gets automatically disabled if there's no sufficient Java compiler found (warning message would be dumped in that case). 8. How can I make "make" nosiy? =============================== By default make just tells you what's going on in a very brief way. To change this just do this: $ make verbose=1 You can also tell configure to turn verbosity on by default by using option --enable-verbose. Run "configure --help" for a list of options available. 9. Am I able to run "make" in a subdirectory? ============================================= Sure. 10. Is it safe to remove configured subdirectory? ================================================= In general "yes" but you want to do this only within the build tree. For example, let's assume that you are tired of getting standard examples tested. Then just move on and remove subdirectory "examples". ______________________________________________________________________ LIST OF CONFIGURED VARIABLES? ANTLRFLAGS ANTLR_ACTION_FILES ANTLR_ANTLR_FILES ANTLR_COMPILE_CMD ANTLR_CYGWIN ANTLR_JAR ANTLR_LIB ANTLR_MINGW ANTLR_NET ANTLR_PY ANTLR_TOKDEF_FILES ANTLR_WIN32 ANTLR_WITH_ANTLR_CMD ANTLR_WITH_ANTLR_JAR AR ARFLAGS AS BOOTCLASSPATH CSHARPC CSHARPCFLAGS CSHARP_COMPILE_CMD CXX_COMPILE_CMD CXX_LIB_CMD CXX_LINK_CMD CYGPATH DEBUG EXEEXT JAR JARFLAGS JAR_CMD JAVA JAVAC JAVACFLAGS JAVAFLAGS JAVA_CMD JAVA_COMPILE_CMD LIBEXT MAKE OBJEXT]) PATCHLEVEL PYTHON PYTHONFLAGS SUBVERSION VERBOSE VERSION WITH_EXAMPLES abs_this_builddir cxx jar java javac ______________________________________________________________________ PLATFORMS? ANTLR has been developed and tested on platforms listed below. A platform is considered supported and tested if basically all standard examples are working. Devel for ANTLR 2.7.5 took place on: i686-linux2.6.3-7mk - Java 1.4.2, gcj 3.3.4, jikes 1.16, 1.17, 1.18, 1.19, 1.20, 1.21, 1.22 - gcc 3.3.2, gcc 3.3.4 - Python 2.3, 2.4 - DotGNU 0.6.0 i686-cygwin_nt-5.1 - Java 1.4.2, gcj 3.3.3, jikes 1.22 - gcc 3.3.3, bcc32 5.6 (Borland C++), cl 13.10.3077 (Microsoft C++) - csc 7.10.6001.4 (Visual C# .NET Compiler) - Python 2.2, 2.3, 2.4 - Mono 1.0.5 ANTLR has been tested on: MinGW-3.1.0.1 (needs manual install of cygpath!) - MSys 1.0.10 - Java 1.5.0-b64 - gcc 3.4.1 PowerPC or RS/6000: * powerpc-apple-darwin6.8 (MacOS 1o, "Jaguar") - Java 1.4.1, jikes 1.15 - gcc 3.1 - Python 2.2 - Mono (?) also reported to work. SPARC: * sparc-sun-solaris2.8 - Java 1.4.1 - SUN CC 5.6 Known n o t to work: - jikes 1.13 and older due to missing classpath and bootclasspath options. Jikes 1.14 and 1.15 are not tested due to compilation problems compiling jikes itself (on Mandrake 1o). - Python < 2.2 will not work. - gcc 2.* will not work. - You need to have GNU make (for building). ====================================================================== INSTALL.txt - last update January 11th, 2005